Initial version of msm camera hal

Change-Id: I1de77a18f9740259910e5ddee2aa0a14b7fd843b
diff --git a/camera/Android.mk b/camera/Android.mk
new file mode 100644
index 0000000..6b87ec0
--- /dev/null
+++ b/camera/Android.mk
@@ -0,0 +1,94 @@
+ifneq ($(USE_CAMERA_STUB),true)
+    # When zero we link against libmmcamera; when 1, we dlopen libmmcamera.
+    DLOPEN_LIBMMCAMERA:=1
+    ifneq ($(BUILD_TINY_ANDROID),true)
+      V4L2_BASED_LIBCAM := true
+
+      LOCAL_PATH:= $(call my-dir)
+
+      include $(CLEAR_VARS)
+
+      LOCAL_CFLAGS:= -DDLOPEN_LIBMMCAMERA=$(DLOPEN_LIBMMCAMERA)
+
+      #define BUILD_UNIFIED_CODE
+      BUILD_UNIFIED_CODE := false
+
+      LOCAL_CFLAGS += -DUSE_ION
+
+      LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CP_MM_HEAP_ID # 8660=SMI, Rest=EBI
+      LOCAL_CFLAGS += -DCAMERA_ZSL_ION_HEAP_ID=ION_CP_MM_HEAP_ID
+      LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_MM_HEAP
+      LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+      LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+      LOCAL_CFLAGS += -DCAMERA_ZSL_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+      LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+
+      ifeq ($(V4L2_BASED_LIBCAM),true)
+          LOCAL_HAL_FILES := QCameraHAL.cpp QCameraHWI_Parm.cpp\
+            QCameraHWI.cpp QCameraHWI_Preview.cpp \
+            QCameraHWI_Record.cpp QCameraHWI_Still.cpp \
+            QCameraHWI_Mem.cpp QCameraHWI_Display.cpp \
+            QCameraStream.cpp QualcommCamera2.cpp QCameraParameters.cpp
+      else
+        LOCAL_HAL_FILES := QualcommCamera.cpp QualcommCameraHardware.cpp
+      endif
+
+      LOCAL_CFLAGS+= -DHW_ENCODE
+
+      # if debug service layer and up , use stub camera!
+      LOCAL_C_INCLUDES += \
+        frameworks/base/services/camera/libcameraservice #
+
+      LOCAL_SRC_FILES := $(MM_CAM_FILES) $(LOCAL_HAL_FILES)
+
+      LOCAL_CFLAGS+= -DNUM_PREVIEW_BUFFERS=4 -D_ANDROID_
+
+      # To Choose neon/C routines for YV12 conversion
+      LOCAL_CFLAGS+= -DUSE_NEON_CONVERSION
+      # Uncomment below line to enable smooth zoom
+      #LOCAL_CFLAGS+= -DCAMERA_SMOOTH_ZOOM
+
+       LOCAL_C_INCLUDES+= \
+        $(TARGET_OUT_HEADERS)/mm-camera \
+        $(TARGET_OUT_HEADERS)/mm-camera/common \
+        $(TARGET_OUT_HEADERS)/mm-still \
+        $(TARGET_OUT_HEADERS)/mm-still/jpeg \
+
+      ifeq ($(V4L2_BASED_LIBCAM),true)
+        LOCAL_C_INCLUDES+= hardware/qcom/media/mm-core/inc
+        LOCAL_C_INCLUDES+= $(TARGET_OUT_HEADERS)/mm-still/mm-omx
+        LOCAL_C_INCLUDES+= $(LOCAL_PATH)/mm-camera-interface
+      endif
+
+      LOCAL_C_INCLUDES+= hardware/qcom/display/libgralloc
+      LOCAL_C_INCLUDES+= hardware/qcom/display/libgenlock
+      LOCAL_C_INCLUDES+= hardware/qcom/media/libstagefrighthw
+
+
+      ifeq ($(V4L2_BASED_LIBCAM),true)
+        LOCAL_SHARED_LIBRARIES:= libutils libui libcamera_client liblog libcutils libmmjpeg libmmstillomx libimage-jpeg-enc-omx-comp
+        LOCAL_SHARED_LIBRARIES += libmmcamera_interface2
+      else
+         LOCAL_SHARED_LIBRARIES:= libutils libui libcamera_client liblog libcutils libmmjpeg
+      endif
+
+      LOCAL_SHARED_LIBRARIES+= libgenlock libbinder
+      ifneq ($(DLOPEN_LIBMMCAMERA),1)
+        LOCAL_SHARED_LIBRARIES+= liboemcamera
+      else
+        LOCAL_SHARED_LIBRARIES+= libdl
+      endif
+
+      LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/socket.h
+
+      LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+      LOCAL_MODULE:= camera.$(TARGET_BOARD_PLATFORM)
+      LOCAL_MODULE_TAGS := optional
+      include $(BUILD_SHARED_LIBRARY)
+
+    endif # BUILD_TINY_ANDROID
+endif # USE_CAMERA_STUB
+
+ifeq ($(V4L2_BASED_LIBCAM),true)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+endif
diff --git a/camera/MODULE_LICENSE_BSD b/camera/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/camera/MODULE_LICENSE_BSD
diff --git a/camera/Makefile.am b/camera/Makefile.am
new file mode 100644
index 0000000..751e362
--- /dev/null
+++ b/camera/Makefile.am
@@ -0,0 +1,75 @@
+ACLOCAL_AMFLAGS = -I m4
+
+#libcamera.so
+lib_LTLIBRARIES = libcamera.la
+
+libcamera_la_CFLAGS := $(DEBUG_CFLAGS)
+libcamera_la_CFLAGS += -DAMSS_VERSION=$(AMSS_VERSION)
+libcamera_la_CFLAGS += -DMSM_CAMERA_GCC
+libcamera_la_CFLAGS += -DLINUX_ENABLED
+libcamera_la_CFLAGS += -Dstrlcpy=g_strlcpy
+libcamera_la_CFLAGS += -fPIC
+
+LOCAL_CFLAGS:= -DDLOPEN_LIBMMCAMERA=1
+
+libcamera_la_CFLAGS += -DHW_ENCODE
+
+if MSM7X27A
+libcamera_la_CFLAGS+= -DNUM_PREVIEW_BUFFERS=6
+libcamera_la_CFLAGS+= -DVFE_7X27A
+else
+libcamera_la_CFLAGS+= -DNUM_PREVIEW_BUFFERS=4
+endif
+
+# To Choose neon/C routines for YV12 conversion
+libcamera_la_CFLAGS+= -DUSE_NEON_CONVERSION
+# Uncomment below line to enable smooth zoom
+#libcamera_la_CFLAGS+= -DCAMERA_SMOOTH_ZOOM
+
+libcamera_la_SOURCES := mm_camera.c
+libcamera_la_SOURCES += mm_camera_channel.c
+libcamera_la_SOURCES += mm_camera_helper.c
+libcamera_la_SOURCES += mm_camera_interface2.c
+libcamera_la_SOURCES += mm_camera_notify.c
+libcamera_la_SOURCES += mm_camera_poll_thread.c
+libcamera_la_SOURCES += mm_camera_sock.c
+libcamera_la_SOURCES += mm_camera_stream.c
+
+if BUILD_JPEG
+libcamera_la_SOURCES += mm_jpeg_encoder.c
+libcamera_la_SOURCES += mm_omx_jpeg_encoder.c
+endif
+
+if BUILD_UNIFIED_CODE
+if MSM8960
+libcamera_la_SOURCES += QCameraHAL.cpp
+libcamera_la_SOURCES += QCameraHWI_Parm.cpp
+libcamera_la_SOURCES += QCameraHWI.cpp
+libcamera_la_SOURCES += QCameraHWI_Preview.cpp
+libcamera_la_SOURCES += QCameraHWI_Record.cpp
+libcamera_la_SOURCES += QCameraHWI_Still.cpp
+libcamera_la_SOURCES += QCameraHWI_Mem.cpp
+libcamera_la_SOURCES += QCameraHWI_Display.cpp
+libcamera_la_SOURCES += QCameraStream.cpp
+libcamera_la_SOURCES += QualcommCamera2.cpp
+elif MSM7X27A
+libcamera_la_SOURCES += QCameraHAL.cpp
+libcamera_la_SOURCES += QCameraHWI_Parm.cpp
+libcamera_la_SOURCES += QCameraHWI.cpp
+libcamera_la_SOURCES += QCameraHWI_Preview_7x27A.cpp
+libcamera_la_SOURCES += QCameraHWI_Record_7x27A.cpp
+libcamera_la_SOURCES += QCameraHWI_Still.cpp
+libcamera_la_SOURCES += QCameraHWI_Mem.cpp
+libcamera_la_SOURCES += QCameraHWI_Display.cpp
+libcamera_la_SOURCES += QCameraStream.cpp
+libcamera_la_SOURCES += QualcommCamera2.cpp
+endif
+endif
+
+libcamera_la_LDFLAGS := $(DEBUG_LDFLAGS)
+libcamera_la_LDFLAGS += -shared
+libcamera_la_LIBADD = -ldl
+
+dirs :=
+SUBDIRS = $(dirs)
+
diff --git a/camera/QCameraHAL.cpp b/camera/QCameraHAL.cpp
new file mode 100644
index 0000000..9863683
--- /dev/null
+++ b/camera/QCameraHAL.cpp
@@ -0,0 +1,133 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHAL"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+
+/* include QCamera Hardware Interface Header*/
+#include "QCameraHAL.h"
+
+int HAL_numOfCameras = 0;
+camera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+int HAL_currentCameraMode;
+
+namespace android {
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+extern "C" int HAL_getNumberOfCameras()
+{
+    /* try to query every time we get the call!*/
+    uint8_t num_camera = 0;
+    mm_camera_t * handle_base = 0;
+    ALOGV("%s: E", __func__);
+
+    handle_base= mm_camera_query(&num_camera);
+
+    if (!handle_base) {
+        HAL_numOfCameras = 0;
+    }
+    else
+    {
+        camera_info_t* p_camera_info = 0;
+        HAL_numOfCameras=num_camera;
+
+        ALOGI("Handle base =0x%p",handle_base);
+        ALOGI("getCameraInfo: numOfCameras = %d", HAL_numOfCameras);
+        for(int i = 0; i < HAL_numOfCameras; i++) {
+            ALOGI("Handle [%d]=0x%p",i,handle_base+i);
+            HAL_camerahandle[i]=handle_base + i;
+            p_camera_info = &(HAL_camerahandle[i]->camera_info);
+            if (p_camera_info) {
+                ALOGI("Camera sensor %d info:", i);
+                ALOGI("camera_id: %d", p_camera_info->camera_id);
+                ALOGI("modes_supported: %x", p_camera_info->modes_supported);
+                ALOGI("position: %d", p_camera_info->position);
+                ALOGI("sensor_mount_angle: %d", p_camera_info->sensor_mount_angle);
+            }
+        }
+    }
+
+    ALOGV("%s: X", __func__);
+
+    return HAL_numOfCameras;
+}
+
+extern "C" int HAL_isIn3DMode()
+{
+    return HAL_currentCameraMode == CAMERA_MODE_3D;
+}
+
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
+{
+    mm_camera_t *mm_camer_obj = 0;
+    ALOGV("%s: E", __func__);
+
+    if (!HAL_numOfCameras || HAL_numOfCameras < cameraId || !cameraInfo)
+        return;
+    else
+        mm_camer_obj = HAL_camerahandle[cameraId];
+
+    if (!mm_camer_obj)
+        return;
+    else {
+        cameraInfo->facing =
+            (FRONT_CAMERA == mm_camer_obj->camera_info.position)?
+            CAMERA_FACING_FRONT : CAMERA_FACING_BACK;
+
+        cameraInfo->orientation = mm_camer_obj->camera_info.sensor_mount_angle;
+#if 0
+        // TODO: fix me
+        /* We always supprot ZSL in our stack*/
+        cameraInfo->mode = CAMERA_SUPPORT_MODE_ZSL;
+        if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_2D) {
+            cameraInfo->mode |= CAMERA_SUPPORT_MODE_2D;
+        }
+        if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_3D) {
+            cameraInfo->mode |= CAMERA_SUPPORT_MODE_3D;
+        }
+#endif
+    }
+   ALOGV("%s: X", __func__);
+   return;
+}
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void * HAL_openCameraHardware(int cameraId, int mode)
+{
+    ALOGV("%s: E", __func__);
+    if (!HAL_numOfCameras || HAL_numOfCameras < cameraId ||cameraId < 0) {
+      return NULL;
+    }
+    return QCameraHAL_openCameraHardware(cameraId, mode);
+}
+
+
+}; // namespace android
diff --git a/camera/QCameraHAL.h b/camera/QCameraHAL.h
new file mode 100644
index 0000000..c13d9bc
--- /dev/null
+++ b/camera/QCameraHAL.h
@@ -0,0 +1,37 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HAL_H
+#define ANDROID_HARDWARE_QCAMERA_HAL_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+#include <mm_camera_interface2.h>
+}
+namespace android {
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void *
+       QCameraHAL_openCameraHardware(int  cameraId, int mode);
+extern "C" int HAL_getNumberOfCameras();
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
+
+}; // namespace android
+
+#endif
+
diff --git a/camera/QCameraHWI.cpp b/camera/QCameraHWI.cpp
new file mode 100644
index 0000000..54b4098
--- /dev/null
+++ b/camera/QCameraHWI.cpp
@@ -0,0 +1,2502 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NIDEBUG 0
+
+#define ALOG_TAG "QCameraHWI"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+static void HAL_event_cb(mm_camera_event_t *evt, void *user_data)
+{
+  QCameraHardwareInterface *obj = (QCameraHardwareInterface *)user_data;
+  if (obj) {
+    obj->processEvent(evt);
+  } else {
+    ALOGE("%s: NULL user_data", __func__);
+  }
+}
+
+int32_t QCameraHardwareInterface::createRecord()
+{
+    int32_t ret = MM_CAMERA_OK;
+    ALOGV("%s : BEGIN",__func__);
+
+    /*
+    * Creating Instance of record stream.
+    */
+    ALOGE("Mymode Record = %d",myMode);
+    mStreamRecord = QCameraStream_record::createInstance(mCameraId,
+                                                         myMode);
+
+    if (!mStreamRecord) {
+        ALOGE("%s: error - can't creat record stream!", __func__);
+        return BAD_VALUE;
+    }
+
+    /* Store HAL object in record stream Object */
+    mStreamRecord->setHALCameraControl(this);
+
+    /*Init Channel */
+    ret = mStreamRecord->init();
+    if (MM_CAMERA_OK != ret){
+        ALOGE("%s: error - can't init Record channel!", __func__);
+        return BAD_VALUE;
+    }
+    ALOGV("%s : END",__func__);
+    return ret;
+}
+
+int32_t QCameraHardwareInterface::createSnapshot()
+{
+    int32_t ret = MM_CAMERA_OK;
+    ALOGV("%s : BEGIN",__func__);
+
+    /*
+    * Creating Instance of Snapshot stream.
+    */
+    ALOGE("Mymode Snap = %d",myMode);
+    mStreamSnap = QCameraStream_Snapshot::createInstance(mCameraId,
+                                                         myMode);
+    if (!mStreamSnap) {
+        ALOGE("%s: error - can't creat snapshot stream!", __func__);
+        return BAD_VALUE;
+    }
+
+    /* Store HAL object in Snapshot stream Object */
+    mStreamSnap->setHALCameraControl(this);
+
+    /*Init Channel */
+    ret = mStreamSnap->init();
+    if (MM_CAMERA_OK != ret){
+        ALOGE("%s: error - can't init Snapshot channel!", __func__);
+        return BAD_VALUE;
+    }
+    ALOGV("%s : END",__func__);
+    return ret;
+}
+
+int32_t QCameraHardwareInterface::createPreview()
+{
+    int32_t ret = MM_CAMERA_OK;
+    ALOGV("%s : BEGIN",__func__);
+
+    ALOGE("Mymode Preview = %d",myMode);
+    mStreamDisplay = QCameraStream_preview::createInstance(mCameraId,
+                                                           myMode);
+    if (!mStreamDisplay) {
+        ALOGE("%s: error - can't creat preview stream!", __func__);
+        return BAD_VALUE;
+    }
+
+    mStreamDisplay->setHALCameraControl(this);
+
+    /*now init all the buffers and send to steam object*/
+    ret = mStreamDisplay->init();
+    if (MM_CAMERA_OK != ret){
+        ALOGE("%s: error - can't init Preview channel!", __func__);
+        return BAD_VALUE;
+    }
+    ALOGV("%s : END",__func__);
+    return ret;
+}
+
+/* constructor */
+QCameraHardwareInterface::
+QCameraHardwareInterface(int cameraId, int mode)
+                  : mCameraId(cameraId),
+                    mParameters(),
+                    mMsgEnabled(0),
+                    mNotifyCb(0),
+                    mDataCb(0),
+                    mDataCbTimestamp(0),
+                    mCallbackCookie(0),
+                    //mPreviewHeap(0),
+                    mStreamDisplay (NULL), mStreamRecord(NULL), mStreamSnap(NULL),
+                    mFps(0),
+                    mDebugFps(0),
+                    mMaxZoom(0),
+                    mCurrentZoom(0),
+                    mSupportedPictureSizesCount(15),
+                    mDumpFrmCnt(0), mDumpSkipCnt(0),
+                    mPictureSizeCount(15),
+                    mPreviewSizeCount(13),
+                    mVideoSizeCount(0),
+                    mAutoFocusRunning(false),
+                    mHasAutoFocusSupport(false),
+                    mInitialized(false),
+                    mIs3DModeOn(0),
+                    mSmoothZoomRunning(false),
+                    mParamStringInitialized(false),
+                    mFaceDetectOn(0),
+                    mDisEnabled(0),
+                    mZoomSupported(false),
+                    mFullLiveshotEnabled(true),
+                    mRecordingHint(0),
+                    mStatsOn(0), mCurrentHisto(-1), mSendData(false), mStatHeap(NULL),
+                    mZslLookBackMode(0),
+                    mZslLookBackValue(0),
+                    mZslEmptyQueueFlag(FALSE),
+                    mPictureSizes(NULL),
+                    mVideoSizes(NULL),
+                    mCameraState(CAMERA_STATE_UNINITED),
+                    mPostPreviewHeap(NULL),
+                    mHdrMode(HDR_BRACKETING_OFF),
+                    mStreamLiveSnap(NULL),
+                    mExifTableNumEntries(0),
+                    mDenoiseValue(0),
+                    mSnapshotFormat(0),
+                    mStartRecording(0),
+                    mZslInterval(1),
+                    mNoDisplayMode(0),
+                    mBrightness(0),
+                    mContrast(0),
+                    mEffects(0),
+                    mBestShotMode(0),
+                    mHJR(0),
+                    mSkinToneEnhancement(0),
+                    mRotation(0),
+                    mFocusMode(AF_MODE_MAX),
+                    mPreviewFormat(CAMERA_YUV_420_NV21),
+                    mRestartPreview(false),
+                    mReleasedRecordingFrame(false)
+{
+    ALOGI("QCameraHardwareInterface: E");
+    int32_t result = MM_CAMERA_E_GENERAL;
+    char value[PROPERTY_VALUE_MAX];
+
+    pthread_mutex_init(&mAsyncCmdMutex, NULL);
+    pthread_cond_init(&mAsyncCmdWait, NULL);
+
+    property_get("persist.debug.sf.showfps", value, "0");
+    mDebugFps = atoi(value);
+    mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+    mPreviewWindow = NULL;
+    property_get("camera.hal.fps", value, "0");
+    mFps = atoi(value);
+
+    ALOGI("Init mPreviewState = %d", mPreviewState);
+
+    property_get("persist.camera.hal.multitouchaf", value, "0");
+    mMultiTouch = atoi(value);
+
+    property_get("persist.camera.full.liveshot", value, "1");
+    mFullLiveshotEnabled = atoi(value);
+
+    property_get("persist.camera.hal.dis", value, "0");
+    mDisEnabled = atoi(value);
+
+    /* Open camera stack! */
+    result=cam_ops_open(mCameraId, MM_CAMERA_OP_MODE_NOTUSED);
+    if (result == MM_CAMERA_OK) {
+      int i;
+      mm_camera_event_type_t evt;
+      for (i = 0; i < MM_CAMERA_EVT_TYPE_MAX; i++) {
+        evt = (mm_camera_event_type_t) i;
+        if (cam_evt_is_event_supported(mCameraId, evt)){
+            cam_evt_register_event_notify(mCameraId,
+              HAL_event_cb, (void *)this, evt);
+        }
+      }
+    }
+    ALOGV("Cam open returned %d",result);
+    if(MM_CAMERA_OK != result) {
+          ALOGE("startCamera: cam_ops_open failed: id = %d", mCameraId);
+          return;
+    }
+
+    loadTables();
+    /* Setup Picture Size and Preview size tables */
+    setPictureSizeTable();
+    ALOGD("%s: Picture table size: %d", __func__, mPictureSizeCount);
+    ALOGD("%s: Picture table: ", __func__);
+    for(unsigned int i=0; i < mPictureSizeCount;i++) {
+      ALOGD(" %d  %d", mPictureSizes[i].width, mPictureSizes[i].height);
+    }
+
+    setPreviewSizeTable();
+    ALOGD("%s: Preview table size: %d", __func__, mPreviewSizeCount);
+    ALOGD("%s: Preview table: ", __func__);
+    for(unsigned int i=0; i < mPreviewSizeCount;i++) {
+      ALOGD(" %d  %d", mPreviewSizes[i].width, mPreviewSizes[i].height);
+    }
+
+    setVideoSizeTable();
+    ALOGD("%s: Video table size: %d", __func__, mVideoSizeCount);
+    ALOGD("%s: Video table: ", __func__);
+    for(unsigned int i=0; i < mVideoSizeCount;i++) {
+      ALOGD(" %d  %d", mVideoSizes[i].width, mVideoSizes[i].height);
+    }
+
+    /* set my mode - update myMode member variable due to difference in
+     enum definition between upper and lower layer*/
+    setMyMode(mode);
+    initDefaultParameters();
+
+    //Create Stream Objects
+    //Preview
+    result = createPreview();
+    if(result != MM_CAMERA_OK) {
+        ALOGE("%s X: Failed to create Preview Object",__func__);
+        return;
+    }
+
+    //Record
+    result = createRecord();
+    if(result != MM_CAMERA_OK) {
+        ALOGE("%s X: Failed to create Record Object",__func__);
+        return;
+    }
+
+    //Snapshot
+    result = createSnapshot();
+    if(result != MM_CAMERA_OK) {
+        ALOGE("%s X: Failed to create Record Object",__func__);
+        return;
+    }
+    mCameraState = CAMERA_STATE_READY;
+
+    ALOGI("QCameraHardwareInterface: X");
+}
+
+QCameraHardwareInterface::~QCameraHardwareInterface()
+{
+    ALOGI("~QCameraHardwareInterface: E");
+    int result;
+
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        stopPreview();
+    break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        stopRecordingInternal();
+        stopPreview();
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+        cancelPictureInternal();
+        break;
+    default:
+        break;
+    }
+    mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+    freePictureTable();
+    freeVideoSizeTable();
+    if(mStatHeap != NULL) {
+      mStatHeap.clear( );
+      mStatHeap = NULL;
+    }
+    /* Join the threads, complete operations and then delete
+       the instances. */
+    cam_ops_close(mCameraId);
+    if(mStreamDisplay){
+        QCameraStream_preview::deleteInstance (mStreamDisplay);
+        mStreamDisplay = NULL;
+    }
+    if(mStreamRecord) {
+        QCameraStream_record::deleteInstance (mStreamRecord);
+        mStreamRecord = NULL;
+    }
+    if(mStreamSnap) {
+        QCameraStream_Snapshot::deleteInstance (mStreamSnap);
+        mStreamSnap = NULL;
+    }
+
+    if (mStreamLiveSnap){
+        QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+        mStreamLiveSnap = NULL;
+    }
+
+    pthread_mutex_destroy(&mAsyncCmdMutex);
+    pthread_cond_destroy(&mAsyncCmdWait);
+
+    ALOGI("~QCameraHardwareInterface: X");
+}
+
+bool QCameraHardwareInterface::isCameraReady()
+{
+    ALOGE("isCameraReady mCameraState %d", mCameraState);
+    return (mCameraState == CAMERA_STATE_READY);
+}
+
+void QCameraHardwareInterface::release()
+{
+    ALOGI("release: E");
+    Mutex::Autolock l(&mLock);
+
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        stopPreviewInternal();
+    break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        stopRecordingInternal();
+        stopPreviewInternal();
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+        cancelPictureInternal();
+        break;
+    default:
+        break;
+    }
+#if 0
+    if (isRecordingRunning()) {
+        stopRecordingInternal();
+        ALOGI("release: stopRecordingInternal done.");
+    }
+    if (isPreviewRunning()) {
+        stopPreview(); //stopPreviewInternal();
+        ALOGI("release: stopPreviewInternal done.");
+    }
+    if (isSnapshotRunning()) {
+        cancelPictureInternal();
+        ALOGI("release: cancelPictureInternal done.");
+    }
+    if (mCameraState == CAMERA_STATE_ERROR) {
+        //TBD: If Error occurs then tear down
+        ALOGI("release: Tear down.");
+    }
+#endif
+    mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+    ALOGI("release: X");
+}
+
+void QCameraHardwareInterface::setCallbacks(
+    camera_notify_callback notify_cb,
+    camera_data_callback data_cb,
+    camera_data_timestamp_callback data_cb_timestamp,
+    camera_request_memory get_memory,
+    void *user)
+{
+    ALOGE("setCallbacks: E");
+    Mutex::Autolock lock(mLock);
+    mNotifyCb        = notify_cb;
+    mDataCb          = data_cb;
+    mDataCbTimestamp = data_cb_timestamp;
+    mGetMemory       = get_memory;
+    mCallbackCookie  = user;
+    ALOGI("setCallbacks: X");
+}
+
+void QCameraHardwareInterface::enableMsgType(int32_t msgType)
+{
+    ALOGI("enableMsgType: E, msgType =0x%x", msgType);
+    Mutex::Autolock lock(mLock);
+    mMsgEnabled |= msgType;
+    ALOGI("enableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+void QCameraHardwareInterface::disableMsgType(int32_t msgType)
+{
+    ALOGI("disableMsgType: E");
+    Mutex::Autolock lock(mLock);
+    mMsgEnabled &= ~msgType;
+    ALOGI("disableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+int QCameraHardwareInterface::msgTypeEnabled(int32_t msgType)
+{
+    ALOGI("msgTypeEnabled: E");
+    Mutex::Autolock lock(mLock);
+    return (mMsgEnabled & msgType);
+    ALOGI("msgTypeEnabled: X");
+}
+#if 0
+status_t QCameraHardwareInterface::dump(int fd, const Vector<String16>& args) const
+{
+    ALOGI("dump: E");
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    AutoMutex lock(&mLock);
+    write(fd, result.string(), result.size());
+    ALOGI("dump: E");
+    return NO_ERROR;
+}
+#endif
+
+int QCameraHardwareInterface::dump(int fd)
+{
+    ALOGE("%s: not supported yet", __func__);
+    return -1;
+}
+
+status_t QCameraHardwareInterface::sendCommand(int32_t command, int32_t arg1,
+                                         int32_t arg2)
+{
+    ALOGI("sendCommand: E");
+    status_t rc = NO_ERROR;
+    Mutex::Autolock l(&mLock);
+
+    switch (command) {
+    #if 0
+        case CAMERA_CMD_HISTOGRAM_ON:
+            ALOGE("histogram set to on");
+            rc = setHistogram(1);
+            break;
+        case CAMERA_CMD_HISTOGRAM_OFF:
+            ALOGE("histogram set to off");
+            rc = setHistogram(0);
+            break;
+        case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+            ALOGE("histogram send data");
+            mSendData = true;
+            rc = NO_ERROR;
+            break;
+        case CAMERA_CMD_START_FACE_DETECTION:
+           if(supportsFaceDetection() == false){
+                ALOGE("Face detection support is not available");
+                return NO_ERROR;
+           }
+           setFaceDetection("on");
+           return runFaceDetection();
+        case CAMERA_CMD_STOP_FACE_DETECTION:
+           if(supportsFaceDetection() == false){
+                ALOGE("Face detection support is not available");
+                return NO_ERROR;
+           }
+           setFaceDetection("off");
+           return runFaceDetection();
+        case CAMERA_CMD_SEND_META_DATA:
+           mMetaDataWaitLock.lock();
+           if(mFaceDetectOn == true) {
+               mSendMetaData = true;
+           }
+           mMetaDataWaitLock.unlock();
+           return NO_ERROR;
+#endif
+#if 0 /* To Do: will enable it later */
+        case CAMERA_CMD_START_SMOOTH_ZOOM :
+            ALOGV("HAL sendcmd start smooth zoom %d %d", arg1 , arg2);
+            /*TO DO: get MaxZoom from parameter*/
+            int MaxZoom = 100;
+
+            switch(mCameraState ) {
+                case CAMERA_STATE_PREVIEW:
+                case CAMERA_STATE_RECORD_CMD_SENT:
+                case CAMERA_STATE_RECORD:
+                    mTargetSmoothZoom = arg1;
+                    mCurrentZoom = mParameters.getInt("zoom");
+                    mSmoothZoomStep = (mCurrentZoom > mTargetSmoothZoom)? -1: 1;
+                   if(mCurrentZoom == mTargetSmoothZoom) {
+                        ALOGV("Smoothzoom target zoom value is same as "
+                        "current zoom value, return...");
+                        mNotifyCallback(CAMERA_MSG_ZOOM,
+                        mCurrentZoom, 1, mCallbackCookie);
+                    } else if(mCurrentZoom < 0 || mCurrentZoom > MaxZoom ||
+                        mTargetSmoothZoom < 0 || mTargetSmoothZoom > MaxZoom)  {
+                        ALOGE(" ERROR : beyond supported zoom values, break..");
+                        mNotifyCallback(CAMERA_MSG_ZOOM,
+                        mCurrentZoom, 0, mCallbackCookie);
+                    } else {
+                        mSmoothZoomRunning = true;
+                        mCurrentZoom += mSmoothZoomStep;
+                        if ((mSmoothZoomStep < 0 && mCurrentZoom < mTargetSmoothZoom)||
+                        (mSmoothZoomStep > 0 && mCurrentZoom > mTargetSmoothZoom )) {
+                            mCurrentZoom = mTargetSmoothZoom;
+                        }
+                        mParameters.set("zoom", mCurrentZoom);
+                        setZoom(mParameters);
+                    }
+                    break;
+                default:
+                    ALOGV(" No preview, no smoothzoom ");
+                    break;
+            }
+            rc = NO_ERROR;
+            break;
+
+        case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+            if(mSmoothZoomRunning) {
+                mSmoothZoomRunning = false;
+                /*To Do: send cmd to stop zooming*/
+            }
+            ALOGV("HAL sendcmd stop smooth zoom");
+            rc = NO_ERROR;
+            break;
+#endif
+        default:
+            break;
+    }
+    ALOGI("sendCommand: X");
+    return rc;
+}
+
+void QCameraHardwareInterface::setMyMode(int mode)
+{
+    ALOGI("setMyMode: E");
+    //if (mode & CAMERA_SUPPORT_MODE_3D) {
+    //    myMode = CAMERA_MODE_3D;
+    //}else {
+        /* default mode is 2D */
+        myMode = CAMERA_MODE_2D;
+    //}
+
+    //if (mode & CAMERA_SUPPORT_MODE_ZSL) {
+    //    myMode = (camera_mode_t)(myMode |CAMERA_ZSL_MODE);
+    //}else {
+       myMode = (camera_mode_t) (myMode | CAMERA_NONZSL_MODE);
+    //}
+    ALOGI("setMyMode: Set mode to %d (passed mode: %d)", myMode, mode);
+    ALOGI("setMyMode: X");
+}
+/* static factory function */
+QCameraHardwareInterface *QCameraHardwareInterface::createInstance(int cameraId, int mode)
+{
+    ALOGI("createInstance: E");
+    QCameraHardwareInterface *cam = new QCameraHardwareInterface(cameraId, mode);
+    if (cam ) {
+      if (cam->mCameraState != CAMERA_STATE_READY) {
+        ALOGE("createInstance: Failed");
+        delete cam;
+        cam = NULL;
+      }
+    }
+
+    if (cam) {
+      //sp<CameraHardwareInterface> hardware(cam);
+      ALOGI("createInstance: X");
+      return cam;
+    } else {
+      return NULL;
+    }
+}
+/* external plug in function */
+extern "C" void *
+QCameraHAL_openCameraHardware(int  cameraId, int mode)
+{
+    ALOGI("QCameraHAL_openCameraHardware: E");
+    return (void *) QCameraHardwareInterface::createInstance(cameraId, mode);
+}
+
+#if 0
+bool QCameraHardwareInterface::useOverlay(void)
+{
+    ALOGI("useOverlay: E");
+    mUseOverlay = TRUE;
+    ALOGI("useOverlay: X");
+    return mUseOverlay;
+}
+#endif
+
+bool QCameraHardwareInterface::isPreviewRunning() {
+    ALOGI("isPreviewRunning: E");
+    bool ret = false;
+    ALOGI("isPreviewRunning: camera state:%d", mCameraState);
+
+    if((mCameraState == CAMERA_STATE_PREVIEW) ||
+       (mCameraState == CAMERA_STATE_PREVIEW_START_CMD_SENT) ||
+       (mCameraState == CAMERA_STATE_RECORD) ||
+       (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT) ||
+       (mCameraState == CAMERA_STATE_ZSL) ||
+       (mCameraState == CAMERA_STATE_ZSL_START_CMD_SENT)){
+       return true;
+    }
+    ALOGI("isPreviewRunning: X");
+    return ret;
+}
+
+bool QCameraHardwareInterface::isRecordingRunning() {
+    ALOGE("isRecordingRunning: E");
+    bool ret = false;
+    if(QCAMERA_HAL_RECORDING_STARTED == mPreviewState)
+      ret = true;
+    //if((mCameraState == CAMERA_STATE_RECORD) ||
+    //   (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT)) {
+    //   return true;
+    //}
+    ALOGE("isRecordingRunning: X");
+    return ret;
+}
+
+bool QCameraHardwareInterface::isSnapshotRunning() {
+    ALOGE("isSnapshotRunning: E");
+    bool ret = false;
+    //if((mCameraState == CAMERA_STATE_SNAP_CMD_ACKED) ||
+    //   (mCameraState == CAMERA_STATE_SNAP_START_CMD_SENT)) {
+    //    return true;
+    //}
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_PREVIEW_START:
+    case QCAMERA_HAL_PREVIEW_STARTED:
+    case QCAMERA_HAL_RECORDING_STARTED:
+    default:
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+        ret = true;
+        break;
+    }
+    ALOGI("isSnapshotRunning: X");
+    return ret;
+}
+
+bool QCameraHardwareInterface::isZSLMode() {
+    return (myMode & CAMERA_ZSL_MODE);
+}
+
+int QCameraHardwareInterface::getHDRMode() {
+    return mHdrMode;
+}
+
+void QCameraHardwareInterface::debugShowPreviewFPS() const
+{
+    static int mFrameCount;
+    static int mLastFrameCount = 0;
+    static nsecs_t mLastFpsTime = 0;
+    static float mFps = 0;
+    mFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mLastFpsTime;
+    if (diff > ms2ns(250)) {
+        mFps =  ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+        ALOGI("Preview Frames Per Second: %.4f", mFps);
+        mLastFpsTime = now;
+        mLastFrameCount = mFrameCount;
+    }
+}
+
+void QCameraHardwareInterface::
+processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+    ALOGI("processPreviewChannelEvent: E");
+    switch(channelEvent) {
+        case MM_CAMERA_CH_EVT_STREAMING_ON:
+            mCameraState =
+                isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_PREVIEW;
+            break;
+        case MM_CAMERA_CH_EVT_STREAMING_OFF:
+            mCameraState = CAMERA_STATE_READY;
+            break;
+        case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+            break;
+        default:
+            break;
+    }
+    ALOGI("processPreviewChannelEvent: X");
+    return;
+}
+
+void QCameraHardwareInterface::processRecordChannelEvent(
+  mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+    ALOGI("processRecordChannelEvent: E");
+    switch(channelEvent) {
+        case MM_CAMERA_CH_EVT_STREAMING_ON:
+            mCameraState = CAMERA_STATE_RECORD;
+            break;
+        case MM_CAMERA_CH_EVT_STREAMING_OFF:
+            mCameraState = CAMERA_STATE_PREVIEW;
+            break;
+        case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+            break;
+        default:
+            break;
+    }
+    ALOGI("processRecordChannelEvent: X");
+    return;
+}
+
+void QCameraHardwareInterface::
+processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+    ALOGI("processSnapshotChannelEvent: E evt=%d state=%d", channelEvent,
+      mCameraState);
+    switch(channelEvent) {
+        case MM_CAMERA_CH_EVT_STREAMING_ON:
+          if (!mFullLiveshotEnabled) {
+            mCameraState =
+              isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_SNAP_CMD_ACKED;
+          }
+          break;
+        case MM_CAMERA_CH_EVT_STREAMING_OFF:
+          if (!mFullLiveshotEnabled) {
+            mCameraState = CAMERA_STATE_READY;
+          }
+          break;
+        case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+            break;
+        case MM_CAMERA_CH_EVT_DATA_REQUEST_MORE:
+            if (isZSLMode()) {
+                /* ZSL Mode: In ZSL Burst Mode, users may request for number of
+                snapshots larger than internal size of ZSL queue. So we'll need
+                process the remaining frames as they become available.
+                In such case, we'll get this event */
+                if(NULL != mStreamSnap)
+                  mStreamSnap->takePictureZSL();
+            }
+            break;
+        default:
+            break;
+    }
+    ALOGI("processSnapshotChannelEvent: X");
+    return;
+}
+
+void QCameraHardwareInterface::processChannelEvent(
+  mm_camera_ch_event_t *event, app_notify_cb_t *app_cb)
+{
+    ALOGI("processChannelEvent: E");
+    Mutex::Autolock lock(mLock);
+    switch(event->ch) {
+        case MM_CAMERA_CH_PREVIEW:
+            processPreviewChannelEvent(event->evt, app_cb);
+            break;
+        case MM_CAMERA_CH_VIDEO:
+            processRecordChannelEvent(event->evt, app_cb);
+            break;
+        case MM_CAMERA_CH_SNAPSHOT:
+            processSnapshotChannelEvent(event->evt, app_cb);
+            break;
+        default:
+            break;
+    }
+    ALOGI("processChannelEvent: X");
+    return;
+}
+
+void QCameraHardwareInterface::processCtrlEvent(mm_camera_ctrl_event_t *event, app_notify_cb_t *app_cb)
+{
+    ALOGI("processCtrlEvent: %d, E",event->evt);
+    Mutex::Autolock lock(mLock);
+    switch(event->evt)
+    {
+        case MM_CAMERA_CTRL_EVT_ZOOM_DONE:
+            zoomEvent(&event->status, app_cb);
+            break;
+        case MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE:
+            autoFocusEvent(&event->status, app_cb);
+            break;
+        case MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT:
+            break;
+        case MM_CAMERA_CTRL_EVT_WDN_DONE:
+            wdenoiseEvent(event->status, (void *)(event->cookie));
+            break;
+        case MM_CAMERA_CTRL_EVT_ERROR:
+            app_cb->notifyCb  = mNotifyCb;
+            app_cb->argm_notify.msg_type = CAMERA_MSG_ERROR;
+            app_cb->argm_notify.ext1 = CAMERA_ERROR_UNKNOWN;
+            app_cb->argm_notify.cookie =  mCallbackCookie;
+            break;
+       default:
+            break;
+    }
+    ALOGI("processCtrlEvent: X");
+    return;
+}
+
+void  QCameraHardwareInterface::processStatsEvent(
+  mm_camera_stats_event_t *event, app_notify_cb_t *app_cb)
+{
+    ALOGI("processStatsEvent: E");
+    if (!isPreviewRunning( )) {
+        ALOGE("preview is not running");
+        return;
+    }
+
+    switch (event->event_id) {
+
+        case MM_CAMERA_STATS_EVT_HISTO:
+        {
+        #if 0
+            ALOGE("HAL process Histo: mMsgEnabled=0x%x, mStatsOn=%d, mSendData=%d, mDataCb=%p ",
+            (mMsgEnabled & CAMERA_MSG_STATS_DATA), mStatsOn, mSendData, mDataCb);
+            int msgEnabled = mMsgEnabled;
+            /*get stats buffer based on index*/
+            camera_preview_histogram_info* hist_info =
+                (camera_preview_histogram_info*) mHistServer.camera_memory[event->e.stats_histo.index]->data;
+
+            if(mStatsOn == QCAMERA_PARM_ENABLE && mSendData &&
+                            mDataCb && (msgEnabled & CAMERA_MSG_STATS_DATA) ) {
+                uint32_t *dest;
+                mSendData = false;
+                mCurrentHisto = (mCurrentHisto + 1) % 3;
+                // The first element of the array will contain the maximum hist value provided by driver.
+                *(uint32_t *)((unsigned int)(mStatsMapped[mCurrentHisto]->data)) = hist_info->max_value;
+                memcpy((uint32_t *)((unsigned int)mStatsMapped[mCurrentHisto]->data + sizeof(int32_t)),
+                                                    (uint32_t *)hist_info->buffer,(sizeof(int32_t) * 256));
+
+                app_cb->dataCb  = mDataCb;
+                app_cb->argm_data_cb.msg_type = CAMERA_MSG_STATS_DATA;
+                app_cb->argm_data_cb.data = mStatsMapped[mCurrentHisto];
+                app_cb->argm_data_cb.index = 0;
+                app_cb->argm_data_cb.metadata = NULL;
+                app_cb->argm_data_cb.cookie =  mCallbackCookie;
+            }
+        #endif
+            break;
+
+        }
+        default:
+        break;
+    }
+  ALOGV("receiveCameraStats X");
+}
+
+void  QCameraHardwareInterface::processInfoEvent(
+  mm_camera_info_event_t *event, app_notify_cb_t *app_cb) {
+    ALOGI("processInfoEvent: %d, E",event->event_id);
+    //Mutex::Autolock lock(eventLock);
+    switch(event->event_id)
+    {
+        case MM_CAMERA_INFO_EVT_ROI:
+            roiEvent(event->e.roi, app_cb);
+            break;
+        default:
+            break;
+    }
+    ALOGI("processInfoEvent: X");
+    return;
+}
+
+void  QCameraHardwareInterface::processEvent(mm_camera_event_t *event)
+{
+    app_notify_cb_t app_cb;
+    ALOGE("processEvent: type :%d E",event->event_type);
+    if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED){
+	ALOGE("Stop recording issued. Return from process Event");
+        return;
+    }
+    memset(&app_cb, 0, sizeof(app_notify_cb_t));
+    switch(event->event_type)
+    {
+        case MM_CAMERA_EVT_TYPE_CH:
+            processChannelEvent(&event->e.ch, &app_cb);
+            break;
+        case MM_CAMERA_EVT_TYPE_CTRL:
+            processCtrlEvent(&event->e.ctrl, &app_cb);
+            break;
+        case MM_CAMERA_EVT_TYPE_STATS:
+            processStatsEvent(&event->e.stats, &app_cb);
+            break;
+        case MM_CAMERA_EVT_TYPE_INFO:
+            processInfoEvent(&event->e.info, &app_cb);
+            break;
+        default:
+            break;
+    }
+    ALOGE(" App_cb Notify %p, datacb=%p", app_cb.notifyCb, app_cb.dataCb);
+    if (app_cb.notifyCb) {
+      app_cb.notifyCb(app_cb.argm_notify.msg_type,
+        app_cb.argm_notify.ext1, app_cb.argm_notify.ext2,
+        app_cb.argm_notify.cookie);
+    }
+    if (app_cb.dataCb) {
+      app_cb.dataCb(app_cb.argm_data_cb.msg_type,
+        app_cb.argm_data_cb.data, app_cb.argm_data_cb.index,
+        app_cb.argm_data_cb.metadata, app_cb.argm_data_cb.cookie);
+    }
+    ALOGI("processEvent: X");
+    return;
+}
+
+bool QCameraHardwareInterface::preview_parm_config (cam_ctrl_dimension_t* dim,
+                                   QCameraParameters& parm)
+{
+    ALOGI("preview_parm_config: E");
+    bool matching = true;
+    int display_width = 0;  /* width of display      */
+    int display_height = 0; /* height of display */
+    uint16_t video_width = 0;  /* width of the video  */
+    uint16_t video_height = 0; /* height of the video */
+
+    /* First check if the preview resolution is the same, if not, change it*/
+    parm.getPreviewSize(&display_width,  &display_height);
+    if (display_width && display_height) {
+        matching = (display_width == dim->display_width) &&
+            (display_height == dim->display_height);
+
+        if (!matching) {
+            dim->display_width  = display_width;
+            dim->display_height = display_height;
+        }
+    }
+    else
+        matching = false;
+
+    cam_format_t value = getPreviewFormat();
+
+    if(value != NOT_FOUND && value != dim->prev_format ) {
+        //Setting to Parameter requested by the Upper layer
+        dim->prev_format = value;
+    }else{
+        //Setting to default Format.
+        dim->prev_format = CAMERA_YUV_420_NV21;
+    }
+    mPreviewFormat = dim->prev_format;
+
+    dim->prev_padding_format =  getPreviewPadding( );
+
+    dim->enc_format = CAMERA_YUV_420_NV12;
+    dim->orig_video_width = mDimension.orig_video_width;
+    dim->orig_video_height = mDimension.orig_video_height;
+    dim->video_width = mDimension.video_width;
+    dim->video_height = mDimension.video_height;
+    dim->video_chroma_width = mDimension.video_width;
+    dim->video_chroma_height  = mDimension.video_height;
+    /* Reset the Main image and thumbnail formats here,
+     * since they might have been changed when video size
+     * livesnapshot was taken. */
+    if (mSnapshotFormat == 1)
+      dim->main_img_format = CAMERA_YUV_422_NV61;
+    else
+      dim->main_img_format = CAMERA_YUV_420_NV21;
+    dim->thumb_format = CAMERA_YUV_420_NV21;
+    ALOGI("preview_parm_config: X");
+    return matching;
+}
+
+status_t QCameraHardwareInterface::startPreview()
+{
+    status_t retVal = NO_ERROR;
+
+    ALOGE("%s: mPreviewState =%d", __func__, mPreviewState);
+    Mutex::Autolock lock(mLock);
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_TAKE_PICTURE:
+        mPreviewState = QCAMERA_HAL_PREVIEW_START;
+        ALOGE("%s:  HAL::startPreview begin", __func__);
+
+        if(QCAMERA_HAL_PREVIEW_START == mPreviewState &&
+           (mPreviewWindow || isNoDisplayMode())) {
+            ALOGE("%s:  start preview now", __func__);
+            retVal = startPreview2();
+            if(retVal == NO_ERROR)
+                mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+        } else {
+            ALOGE("%s:  received startPreview, but preview window = null", __func__);
+        }
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+    case QCAMERA_HAL_PREVIEW_STARTED:
+    break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        ALOGE("%s: cannot start preview in recording state", __func__);
+        break;
+    default:
+        ALOGE("%s: unknow state %d received", __func__, mPreviewState);
+        retVal = UNKNOWN_ERROR;
+        break;
+    }
+    return retVal;
+}
+
+status_t QCameraHardwareInterface::startPreview2()
+{
+    ALOGI("startPreview2: E");
+    status_t ret = NO_ERROR;
+
+    cam_ctrl_dimension_t dim;
+    mm_camera_dimension_t maxDim;
+    bool initPreview = false;
+
+    if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED) { //isPreviewRunning()){
+        ALOGE("%s:Preview already started  mCameraState = %d!", __func__, mCameraState);
+        ALOGE("%s: X", __func__);
+        return NO_ERROR;
+    }
+
+    /*  get existing preview information, by qury mm_camera*/
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+    if (MM_CAMERA_OK != ret) {
+      ALOGE("%s: error - can't get preview dimension!", __func__);
+      ALOGE("%s: X", __func__);
+      return BAD_VALUE;
+    }
+
+    /* config the parmeters and see if we need to re-init the stream*/
+    initPreview = preview_parm_config (&dim, mParameters);
+
+    if (mRecordingHint && mFullLiveshotEnabled) {
+#if 0
+      /* Camcorder mode and Full resolution liveshot enabled
+       * TBD lookup table for correct aspect ratio matching size */
+      memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+      getMaxPictureDimension(&maxDim);
+      if (!maxDim.width || !maxDim.height) {
+        maxDim.width  = DEFAULT_LIVESHOT_WIDTH;
+        maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+      }
+      /* TODO Remove this hack after adding code to get live shot dimension */
+      if (!mCameraId) {
+        maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+        maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+      }
+      dim.picture_width = maxDim.width;
+      dim.picture_height = maxDim.height;
+      mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+      ALOGI("%s Setting Liveshot dimension as %d x %d", __func__,
+           maxDim.width, maxDim.height);
+#endif
+        int mPictureWidth, mPictureHeight;
+        bool matching;
+        /* First check if the picture resolution is the same, if not, change it*/
+        getPictureSize(&mPictureWidth, &mPictureHeight);
+
+        matching = (mPictureWidth == dim.picture_width) &&
+            (mPictureHeight == dim.picture_height);
+
+        if (!matching) {
+            dim.picture_width  = mPictureWidth;
+            dim.picture_height = mPictureHeight;
+            dim.ui_thumbnail_height = dim.display_height;
+            dim.ui_thumbnail_width = dim.display_width;
+        }
+        ALOGE("%s: Fullsize Liveshaot Picture size to set: %d x %d", __func__,
+             dim.picture_width, dim.picture_height);
+        mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+    }
+
+    ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+    if (MM_CAMERA_OK != ret) {
+      ALOGE("%s X: error - can't config preview parms!", __func__);
+      return BAD_VALUE;
+    }
+
+    mStreamDisplay->setMode(myMode & CAMERA_ZSL_MODE);
+    mStreamSnap->setMode(myMode & CAMERA_ZSL_MODE);
+    mStreamRecord->setMode(myMode & CAMERA_ZSL_MODE);
+    ALOGE("%s: myMode = %d", __func__, myMode);
+
+    ALOGE("%s: setPreviewWindow", __func__);
+    mStreamDisplay->setPreviewWindow(mPreviewWindow);
+
+    if(isZSLMode()) {
+        /* Start preview streaming */
+        ret = mStreamDisplay->start();
+        if (MM_CAMERA_OK != ret){
+            ALOGE("%s: X -error - can't start nonZSL stream!", __func__);
+            return BAD_VALUE;
+        }
+
+        /* Start ZSL stream */
+        ret =  mStreamSnap->start();
+        if (MM_CAMERA_OK != ret){
+            ALOGE("%s: error - can't start Snapshot stream!", __func__);
+            mStreamDisplay->stop();
+            return BAD_VALUE;
+        }
+    }else{
+        ret = mStreamDisplay->start();
+    }
+
+    /*call QCameraStream_noneZSL::start() */
+    if (MM_CAMERA_OK != ret){
+      ALOGE("%s: X error - can't start stream!", __func__);
+      return BAD_VALUE;
+    }
+    if(MM_CAMERA_OK == ret)
+        mCameraState = CAMERA_STATE_PREVIEW_START_CMD_SENT;
+    else
+        mCameraState = CAMERA_STATE_ERROR;
+
+    if(mPostPreviewHeap != NULL) {
+        mPostPreviewHeap.clear();
+        mPostPreviewHeap = NULL;
+    }
+
+    ALOGI("startPreview: X");
+    return ret;
+}
+
+void QCameraHardwareInterface::stopPreview()
+{
+    ALOGI("%s: stopPreview: E", __func__);
+    Mutex::Autolock lock(mLock);
+    mm_camera_util_profile("HAL: stopPreview(): E");
+    mFaceDetectOn = false;
+
+    // reset recording hint to the value passed from Apps
+    const char * str = mParameters.get(QCameraParameters::KEY_RECORDING_HINT);
+    if((str != NULL) && !strcmp(str, "true")){
+        mRecordingHint = TRUE;
+    } else {
+        mRecordingHint = FALSE;
+    }
+
+    switch(mPreviewState) {
+      case QCAMERA_HAL_PREVIEW_START:
+          //mPreviewWindow = NULL;
+          mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+          break;
+      case QCAMERA_HAL_PREVIEW_STARTED:
+          stopPreviewInternal();
+          mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+          break;
+      case QCAMERA_HAL_RECORDING_STARTED:
+            stopRecordingInternal();
+            stopPreviewInternal();
+            mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+            break;
+      case QCAMERA_HAL_TAKE_PICTURE:
+      case QCAMERA_HAL_PREVIEW_STOPPED:
+      default:
+            break;
+    }
+    ALOGI("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+
+#if 0 //mzhu
+void QCameraHardwareInterface::stopPreviewZSL()
+{
+    ALOGI("stopPreviewZSL: E");
+
+    if(!mStreamDisplay || !mStreamSnap) {
+        ALOGE("mStreamDisplay/mStreamSnap is null");
+        return;
+    }
+    ALOGI("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+#endif
+void QCameraHardwareInterface::stopPreviewInternal()
+{
+    ALOGI("stopPreviewInternal: E");
+    status_t ret = NO_ERROR;
+
+    if(!mStreamDisplay) {
+        ALOGE("mStreamDisplay is null");
+        return;
+    }
+
+    if(isZSLMode()) {
+        /* take care snapshot object for ZSL mode */
+        mStreamSnap->stop();
+    }
+    mStreamDisplay->stop();
+
+    mCameraState = CAMERA_STATE_PREVIEW_STOP_CMD_SENT;
+    ALOGI("stopPreviewInternal: X");
+}
+
+int QCameraHardwareInterface::previewEnabled()
+{
+    ALOGI("previewEnabled: E");
+    Mutex::Autolock lock(mLock);
+    ALOGE("%s: mCameraState = %d", __func__, mCameraState);
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_TAKE_PICTURE:
+    default:
+        return false;
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+    case QCAMERA_HAL_PREVIEW_STARTED:
+    case QCAMERA_HAL_RECORDING_STARTED:
+        return true;
+        break;
+    }
+    return false;
+}
+
+status_t QCameraHardwareInterface::startRecording()
+{
+    ALOGI("startRecording: E");
+    status_t ret = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+        ALOGE("%s: preview has not been started", __func__);
+        ret = UNKNOWN_ERROR;
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+        ALOGE("%s: no preview native window", __func__);
+        ret = UNKNOWN_ERROR;
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        if (mRecordingHint == FALSE || mRestartPreview) {
+            ALOGE("%s: start recording when hint is false, stop preview first", __func__);
+            stopPreviewInternal();
+            mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+            // Set recording hint to TRUE
+            mRecordingHint = TRUE;
+            setRecordingHintValue(mRecordingHint);
+
+            // start preview again
+            mPreviewState = QCAMERA_HAL_PREVIEW_START;
+            if (startPreview2() == NO_ERROR)
+                mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+            mRestartPreview = false;
+        }
+        ret =  mStreamRecord->start();
+        if (MM_CAMERA_OK != ret){
+            ALOGE("%s: error - mStreamRecord->start!", __func__);
+            ret = BAD_VALUE;
+            break;
+        }
+        if(MM_CAMERA_OK == ret)
+            mCameraState = CAMERA_STATE_RECORD_START_CMD_SENT;
+        else
+            mCameraState = CAMERA_STATE_ERROR;
+        mPreviewState = QCAMERA_HAL_RECORDING_STARTED;
+        break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        ALOGE("%s: ", __func__);
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+    default:
+       ret = BAD_VALUE;
+       break;
+    }
+    ALOGI("startRecording: X");
+    return ret;
+}
+
+void QCameraHardwareInterface::stopRecording()
+{
+    ALOGI("stopRecording: E");
+    Mutex::Autolock lock(mLock);
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_PREVIEW_START:
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        stopRecordingInternal();
+        mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+    default:
+        break;
+    }
+    ALOGI("stopRecording: X");
+
+}
+void QCameraHardwareInterface::stopRecordingInternal()
+{
+    ALOGI("stopRecordingInternal: E");
+    status_t ret = NO_ERROR;
+
+    if(!mStreamRecord) {
+        ALOGE("mStreamRecord is null");
+        return;
+    }
+
+    /*
+    * call QCameraStream_record::stop()
+    * Unregister Callback, action stop
+    */
+    mStreamRecord->stop();
+    mCameraState = CAMERA_STATE_PREVIEW;  //TODO : Apurva : Hacked for 2nd time Recording
+    mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+    ALOGI("stopRecordingInternal: X");
+    return;
+}
+
+int QCameraHardwareInterface::recordingEnabled()
+{
+    int ret = 0;
+    Mutex::Autolock lock(mLock);
+    ALOGV("%s: E", __func__);
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_PREVIEW_START:
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        ret = 1;
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+    default:
+        break;
+    }
+    ALOGV("%s: X, ret = %d", __func__, ret);
+    return ret;   //isRecordingRunning();
+}
+
+/**
+* Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+*/
+void QCameraHardwareInterface::releaseRecordingFrame(const void *opaque)
+{
+    ALOGV("%s : BEGIN",__func__);
+    if(mStreamRecord == NULL) {
+        ALOGE("Record stream Not Initialized");
+        return;
+    }
+    mStreamRecord->releaseRecordingFrame(opaque);
+    ALOGV("%s : END",__func__);
+    return;
+}
+
+status_t QCameraHardwareInterface::autoFocusEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+    ALOGE("autoFocusEvent: E");
+    int ret = NO_ERROR;
+/************************************************************
+  BEGIN MUTEX CODE
+*************************************************************/
+
+    ALOGE("%s:%d: Trying to acquire AF bit lock",__func__,__LINE__);
+    mAutofocusLock.lock();
+    ALOGE("%s:%d: Acquired AF bit lock",__func__,__LINE__);
+
+    if(mAutoFocusRunning==false) {
+      ALOGE("%s:AF not running, discarding stale event",__func__);
+      mAutofocusLock.unlock();
+      return ret;
+    }
+
+    mAutoFocusRunning = false;
+    mAutofocusLock.unlock();
+
+/************************************************************
+  END MUTEX CODE
+*************************************************************/
+    if(status==NULL) {
+      ALOGE("%s:NULL ptr received for status",__func__);
+      return BAD_VALUE;
+    }
+
+    /* update focus distances after autofocus is done */
+    if(updateFocusDistances() != NO_ERROR) {
+       ALOGE("%s: updateFocusDistances failed for %d", __FUNCTION__, mFocusMode);
+    }
+
+    /*(Do?) we need to make sure that the call back is the
+      last possible step in the execution flow since the same
+      context might be used if a fail triggers another round
+      of AF then the mAutoFocusRunning flag and other state
+      variables' validity will be under question*/
+
+    if (mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS)){
+      ALOGE("%s:Issuing callback to service",__func__);
+
+      /* "Accepted" status is not appropriate it should be used for
+        initial cmd, event reporting should only give use SUCCESS/FAIL
+        */
+
+      app_cb->notifyCb  = mNotifyCb;
+      app_cb->argm_notify.msg_type = CAMERA_MSG_FOCUS;
+      app_cb->argm_notify.ext2 = 0;
+      app_cb->argm_notify.cookie =  mCallbackCookie;
+
+      ALOGE("Auto foucs state =%d", *status);
+      if(*status==CAM_CTRL_SUCCESS) {
+        app_cb->argm_notify.ext1 = true;
+      }
+      else if(*status==CAM_CTRL_FAILED){
+        app_cb->argm_notify.ext1 = false;
+      }
+      else{
+        app_cb->notifyCb  = NULL;
+        ALOGE("%s:Unknown AF status (%d) received",__func__,*status);
+      }
+
+    }/*(mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS))*/
+    else{
+      ALOGE("%s:Call back not enabled",__func__);
+    }
+
+    ALOGE("autoFocusEvent: X");
+    return ret;
+
+}
+
+status_t QCameraHardwareInterface::cancelPicture()
+{
+    ALOGI("cancelPicture: E");
+    status_t ret = MM_CAMERA_OK;
+    Mutex::Autolock lock(mLock);
+
+    switch(mPreviewState) {
+        case QCAMERA_HAL_PREVIEW_STOPPED:
+        case QCAMERA_HAL_PREVIEW_START:
+        case QCAMERA_HAL_PREVIEW_STARTED:
+        case QCAMERA_HAL_RECORDING_STARTED:
+        default:
+            break;
+        case QCAMERA_HAL_TAKE_PICTURE:
+            ret = cancelPictureInternal();
+            break;
+    }
+    ALOGI("cancelPicture: X");
+    return ret;
+}
+
+status_t QCameraHardwareInterface::cancelPictureInternal()
+{
+    ALOGI("cancelPictureInternal: E");
+    status_t ret = MM_CAMERA_OK;
+    if(mCameraState != CAMERA_STATE_READY) {
+        if(mStreamSnap) {
+            mStreamSnap->stop();
+            mCameraState = CAMERA_STATE_SNAP_STOP_CMD_SENT;
+        }
+    } else {
+        ALOGE("%s: Cannot process cancel picture as snapshot is already done",__func__);
+    }
+    ALOGI("cancelPictureInternal: X");
+    return ret;
+}
+
+void QCameraHardwareInterface::pausePreviewForSnapshot()
+{
+    stopPreviewInternal( );
+}
+status_t QCameraHardwareInterface::resumePreviewAfterSnapshot()
+{
+    status_t ret = NO_ERROR;
+    ret = mStreamDisplay->start();
+    return ret;
+}
+
+void liveshot_callback(mm_camera_ch_data_buf_t *recvd_frame,
+                                void *user_data)
+{
+    QCameraHardwareInterface *pme = (QCameraHardwareInterface *)user_data;
+    cam_ctrl_dimension_t dim;
+    int mJpegMaxSize;
+    status_t ret;
+    ALOGE("%s: E", __func__);
+
+
+    mm_camera_ch_data_buf_t* frame =
+         (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        cam_evt_buf_done(pme->mCameraId, recvd_frame);
+		return ;
+    }
+    memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+
+   ALOGE("<DEBUG> Liveshot buffer idx:%d",frame->video.video.idx);
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(pme->mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != ret) {
+        ALOGE("%s: error - can't get dimension!", __func__);
+        ALOGE("%s: X", __func__);
+    }
+
+#if 1
+    ALOGE("Live Snapshot Enabled");
+    frame->snapshot.main.frame = frame->video.video.frame;
+    frame->snapshot.main.idx = frame->video.video.idx;
+    frame->snapshot.thumbnail.frame = frame->video.video.frame;
+    frame->snapshot.thumbnail.idx = frame->video.video.idx;
+
+    dim.picture_width = pme->mDimension.video_width;
+    dim.picture_height = pme->mDimension.video_height;
+    dim.ui_thumbnail_width = pme->mDimension.video_width;
+    dim.ui_thumbnail_height = pme->mDimension.video_height;
+    dim.main_img_format = pme->mDimension.enc_format;
+    dim.thumb_format = pme->mDimension.enc_format;
+
+    mJpegMaxSize = pme->mDimension.video_width * pme->mDimension.video_width * 1.5;
+
+    ALOGE("Picture w = %d , h = %d, size = %d",dim.picture_width,dim.picture_height,mJpegMaxSize);
+     if (pme->mStreamLiveSnap){
+        ALOGE("%s:Deleting old Snapshot stream instance",__func__);
+        QCameraStream_Snapshot::deleteInstance (pme->mStreamLiveSnap);
+        pme->mStreamLiveSnap = NULL;
+    }
+
+    pme->mStreamLiveSnap = (QCameraStream_Snapshot*)QCameraStream_Snapshot::createInstance(pme->mCameraId,
+                                                       pme->myMode);
+
+    if (!pme->mStreamLiveSnap) {
+        ALOGE("%s: error - can't creat snapshot stream!", __func__);
+        return ;
+    }
+    pme->mStreamLiveSnap->setModeLiveSnapshot(true);
+    pme->mStreamLiveSnap->setHALCameraControl(pme);
+    pme->mStreamLiveSnap->initSnapshotBuffers(&dim,1);
+    ALOGE("Calling live shot");
+
+
+    ((QCameraStream_Snapshot*)(pme->mStreamLiveSnap))->takePictureLiveshot(frame,&dim,mJpegMaxSize);
+
+#else
+
+
+
+
+  if(MM_CAMERA_OK != cam_evt_buf_done(pme->mCameraId,frame )) {
+    ALOGE(" BUF DONE FAILED");
+  }
+#endif
+  ALOGE("%s: X", __func__);
+
+}
+
+status_t  QCameraHardwareInterface::takePicture()
+{
+    ALOGI("takePicture: E");
+    status_t ret = MM_CAMERA_OK;
+    Mutex::Autolock lock(mLock);
+
+    mStreamSnap->resetSnapshotCounters( );
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        mStreamSnap->setFullSizeLiveshot(false);
+        if (isZSLMode()) {
+            if (mStreamSnap != NULL) {
+                pausePreviewForZSL();
+                ret = mStreamSnap->takePictureZSL();
+                if (ret != MM_CAMERA_OK) {
+                    ALOGE("%s: Error taking ZSL snapshot!", __func__);
+                    ret = BAD_VALUE;
+                }
+            }
+            else {
+                ALOGE("%s: ZSL stream not active! Failure!!", __func__);
+                ret = BAD_VALUE;
+            }
+            return ret;
+        }
+
+        /*prepare snapshot, e.g LED*/
+        takePicturePrepareHardware( );
+        /* There's an issue where we have a glimpse of corrupted data between
+           a time we stop a preview and display the postview. It happens because
+           when we call stopPreview we deallocate the preview buffers hence overlay
+           displays garbage value till we enqueue postview buffer to be displayed.
+           Hence for temporary fix, we'll do memcopy of the last frame displayed and
+           queue it to overlay*/
+        // mzhu storePreviewFrameForPostview();
+
+        /* stop preview */
+        pausePreviewForSnapshot();
+
+        /* call Snapshot start() :*/
+        ret =  mStreamSnap->start();
+        if (MM_CAMERA_OK != ret){
+            /* mzhu: fix me, restore preview */
+            ALOGE("%s: error - can't start Snapshot stream!", __func__);
+            return BAD_VALUE;
+        }
+
+        if(MM_CAMERA_OK == ret)
+            mCameraState = CAMERA_STATE_SNAP_START_CMD_SENT;
+        else
+            mCameraState = CAMERA_STATE_ERROR;
+        mPreviewState = QCAMERA_HAL_TAKE_PICTURE;
+        break;
+      case QCAMERA_HAL_TAKE_PICTURE:
+          break;
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_PREVIEW_START:
+      ret = UNKNOWN_ERROR;
+      break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+      if (canTakeFullSizeLiveshot()) {
+        ALOGD(" Calling takeFullSizeLiveshot");
+        takeFullSizeLiveshot();
+      }else{
+          (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+                                                    liveshot_callback,
+                                                    MM_CAMERA_REG_BUF_CB_COUNT,
+                                                    1,
+                                                    this);
+      }
+
+      break;
+    default:
+        ret = UNKNOWN_ERROR;
+        break;
+    }
+    ALOGI("takePicture: X");
+    return ret;
+}
+
+bool QCameraHardwareInterface::canTakeFullSizeLiveshot() {
+    bool ret;
+    if (mFullLiveshotEnabled && !isLowPowerCamcorder()) {
+      /* Full size liveshot enabled. */
+
+      /* If Picture size is same as video size, switch to Video size
+       * live snapshot */
+      if ((mDimension.picture_width == mDimension.video_width) &&
+          (mDimension.picture_height == mDimension.video_height)) {
+        return FALSE;
+      }
+
+      if (mDisEnabled) {
+       /* If DIS is enabled and Picture size is
+        * less than (video size + 10% DIS Margin)
+        * then fall back to Video size liveshot. */
+        if ((mDimension.picture_width <
+               (int)(mDimension.video_width * 1.1)) ||
+             (mDimension.picture_height <
+               (int)(mDimension.video_height * 1.1))) {
+          ret = FALSE;
+        } else {
+          /* Go with Full size live snapshot. */
+          ret = TRUE;
+        }
+      } else {
+        /* DIS Disabled. Go with Full size live snapshot */
+        ret = TRUE;
+      }
+    } else {
+      /* Full size liveshot disabled. Fallback to Video size liveshot. */
+      ret = FALSE;
+    }
+
+    return ret;
+}
+
+status_t QCameraHardwareInterface::takeFullSizeLiveshot()
+{
+    status_t ret = NO_ERROR;
+    if (mStreamLiveSnap){
+        ALOGE("%s:Deleting old Snapshot stream instance",__func__);
+        QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+        mStreamLiveSnap = NULL;
+    }
+    mStreamLiveSnap = QCameraStream_Snapshot::createInstance(mCameraId, myMode);
+
+    if (!mStreamLiveSnap) {
+        ALOGE("%s: error - can't creat snapshot stream!", __func__);
+        /* mzhu: fix me, restore preview */
+        return BAD_VALUE;
+    }
+
+    /* Store HAL object in snapshot stream Object */
+    mStreamLiveSnap->setHALCameraControl(this);
+
+    mStreamLiveSnap->setFullSizeLiveshot(true);
+
+    /* Call snapshot init*/
+    ret =  mStreamLiveSnap->init();
+    if (MM_CAMERA_OK != ret){
+        ALOGE("%s: error - can't init Snapshot stream!", __func__);
+        return BAD_VALUE;
+    }
+
+    /* call Snapshot start() :*/
+    mStreamLiveSnap->resetSnapshotCounters( );
+    ret =  mStreamLiveSnap->start();
+    if (MM_CAMERA_OK != ret){
+        /* mzhu: fix me, restore preview */
+        ALOGE("%s: error - can't start Snapshot stream!", __func__);
+        return BAD_VALUE;
+    }
+    return ret;
+}
+
+status_t  QCameraHardwareInterface::takeLiveSnapshot()
+{
+    status_t ret = NO_ERROR;
+    ALOGI("takeLiveSnapshot: E");
+    mStreamRecord->takeLiveSnapshot();
+    ALOGI("takeLiveSnapshot: X");
+    return ret;
+}
+
+status_t QCameraHardwareInterface::autoFocus()
+{
+    ALOGI("autoFocus: E");
+    status_t ret = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+    ALOGI("autoFocus: Got lock");
+    bool status = true;
+    isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+
+    if(mAutoFocusRunning==true){
+      ALOGE("%s:AF already running should not have got this call",__func__);
+      return NO_ERROR;
+    }
+
+    if (afMode == AF_MODE_MAX) {
+      /* This should never happen. We cannot send a
+       * callback notifying error from this place because
+       * the CameraService has called this function after
+       * acquiring the lock. So if we try to issue a callback
+       * from this place, the callback will try to acquire
+       * the same lock in CameraService and it will result
+       * in deadlock. So, let the call go in to the lower
+       * layer. The lower layer will anyway return error if
+       * the autofocus is not supported or if the focus
+       * value is invalid.
+       * Just print out the error. */
+      ALOGE("%s:Invalid AF mode (%d)", __func__, afMode);
+    }
+
+    ALOGI("%s:AF start (mode %d)", __func__, afMode);
+    if(MM_CAMERA_OK != cam_ops_action(mCameraId, TRUE,
+                                    MM_CAMERA_OPS_FOCUS, &afMode)) {
+      ALOGE("%s: AF command failed err:%d error %s",
+           __func__, errno, strerror(errno));
+      return UNKNOWN_ERROR;
+    }
+
+    mAutoFocusRunning = true;
+    ALOGI("autoFocus: X");
+    return ret;
+}
+
+status_t QCameraHardwareInterface::cancelAutoFocus()
+{
+    ALOGE("cancelAutoFocus: E");
+    status_t ret = NO_ERROR;
+    Mutex::Autolock lock(mLock);
+
+/**************************************************************
+  BEGIN MUTEX CODE
+*************************************************************/
+
+    mAutofocusLock.lock();
+    if(mAutoFocusRunning) {
+
+      mAutoFocusRunning = false;
+      mAutofocusLock.unlock();
+
+    }else/*(!mAutoFocusRunning)*/{
+
+      mAutofocusLock.unlock();
+      ALOGE("%s:Af not running",__func__);
+      return NO_ERROR;
+    }
+/**************************************************************
+  END MUTEX CODE
+*************************************************************/
+
+
+    if(MM_CAMERA_OK!=cam_ops_action(mCameraId,FALSE,MM_CAMERA_OPS_FOCUS,NULL )) {
+      ALOGE("%s: AF command failed err:%d error %s",__func__, errno,strerror(errno));
+    }
+
+    ALOGE("cancelAutoFocus: X");
+    return NO_ERROR;
+}
+
+#if 0 //mzhu
+/*==========================================================================
+ * FUNCTION    - prepareSnapshotAndWait -
+ *
+ * DESCRIPTION:  invoke preparesnapshot and wait for it done
+                 it can be called within takepicture, so no need
+                 to grab mLock.
+ *=========================================================================*/
+void QCameraHardwareInterface::prepareSnapshotAndWait()
+{
+    ALOGI("prepareSnapshotAndWait: E");
+    int rc = 0;
+    /*To Do: call mm camera preparesnapshot */
+    if(!rc ) {
+        mPreparingSnapshot = true;
+        pthread_mutex_lock(&mAsyncCmdMutex);
+        pthread_cond_wait(&mAsyncCmdWait, &mAsyncCmdMutex);
+        pthread_mutex_unlock(&mAsyncCmdMutex);
+        mPreparingSnapshot = false;
+    }
+    ALOGI("prepareSnapshotAndWait: X");
+}
+#endif //mzhu
+
+/*==========================================================================
+ * FUNCTION    - processprepareSnapshotEvent -
+ *
+ * DESCRIPTION:  Process the event of preparesnapshot done msg
+                 unblock prepareSnapshotAndWait( )
+ *=========================================================================*/
+void QCameraHardwareInterface::processprepareSnapshotEvent(cam_ctrl_status_t *status)
+{
+    ALOGI("processprepareSnapshotEvent: E");
+    pthread_mutex_lock(&mAsyncCmdMutex);
+    pthread_cond_signal(&mAsyncCmdWait);
+    pthread_mutex_unlock(&mAsyncCmdMutex);
+    ALOGI("processprepareSnapshotEvent: X");
+}
+
+void QCameraHardwareInterface::roiEvent(fd_roi_t roi,app_notify_cb_t *app_cb)
+{
+    ALOGE("roiEvent: E");
+
+    if(mStreamDisplay) mStreamDisplay->notifyROIEvent(roi);
+#if 0 //TODO: move to preview obj
+    mCallbackLock.lock();
+    data_callback mcb = mDataCb;
+    void *mdata = mCallbackCookie;
+    int msgEnabled = mMsgEnabled;
+    mCallbackLock.unlock();
+
+    mMetaDataWaitLock.lock();
+    if (mFaceDetectOn == true && mSendMetaData == true) {
+        mSendMetaData = false;
+        int faces_detected = roi.rect_num;
+        int max_faces_detected = MAX_ROI * 4;
+        int array[max_faces_detected + 1];
+
+        array[0] = faces_detected * 4;
+        for (int i = 1, j = 0;j < MAX_ROI; j++, i = i + 4) {
+            if (j < faces_detected) {
+                array[i]   = roi.faces[j].x;
+                array[i+1] = roi.faces[j].y;
+                array[i+2] = roi.faces[j].dx;
+                array[i+3] = roi.faces[j].dy;
+            } else {
+                array[i]   = -1;
+                array[i+1] = -1;
+                array[i+2] = -1;
+                array[i+3] = -1;
+            }
+        }
+        if(mMetaDataHeap != NULL){
+            ALOGV("mMetaDataHEap is non-NULL");
+            memcpy((uint32_t *)mMetaDataHeap->mHeap->base(), (uint32_t *)array, (sizeof(int)*(MAX_ROI*4+1)));
+            mMetaDataWaitLock.unlock();
+
+            if  (mcb != NULL && (msgEnabled & CAMERA_MSG_META_DATA)) {
+                mcb(CAMERA_MSG_META_DATA, mMetaDataHeap->mBuffers[0], mdata);
+            }
+        } else {
+            mMetaDataWaitLock.unlock();
+            ALOGE("runPreviewThread mMetaDataHeap is NULL");
+        }
+    } else {
+        mMetaDataWaitLock.unlock();
+    }
+#endif // mzhu
+    ALOGE("roiEvent: X");
+}
+
+
+void QCameraHardwareInterface::handleZoomEventForSnapshot(void)
+{
+    mm_camera_ch_crop_t v4l2_crop;
+
+
+    ALOGI("%s: E", __func__);
+
+    memset(&v4l2_crop,0,sizeof(v4l2_crop));
+    v4l2_crop.ch_type=MM_CAMERA_CH_SNAPSHOT;
+
+    ALOGI("%s: Fetching crop info", __func__);
+    cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+    ALOGI("%s: Crop info received for main: %d, %d, %d, %d ", __func__,
+         v4l2_crop.snapshot.main_crop.left,
+         v4l2_crop.snapshot.main_crop.top,
+         v4l2_crop.snapshot.main_crop.width,
+         v4l2_crop.snapshot.main_crop.height);
+    ALOGI("%s: Crop info received for thumbnail: %d, %d, %d, %d ",__func__,
+         v4l2_crop.snapshot.thumbnail_crop.left,
+         v4l2_crop.snapshot.thumbnail_crop.top,
+         v4l2_crop.snapshot.thumbnail_crop.width,
+         v4l2_crop.snapshot.thumbnail_crop.height);
+
+    if(mStreamSnap) {
+        ALOGD("%s: Setting crop info for snapshot", __func__);
+        memcpy(&(mStreamSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+    }
+    if(mFullLiveshotEnabled && mStreamLiveSnap){
+        ALOGD("%s: Setting crop info for snapshot", __func__);
+        memcpy(&(mStreamLiveSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+    }
+    ALOGD("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::handleZoomEventForPreview(app_notify_cb_t *app_cb)
+{
+    mm_camera_ch_crop_t v4l2_crop;
+
+    ALOGI("%s: E", __func__);
+
+    /*regular zooming or smooth zoom stopped*/
+    if (!mSmoothZoomRunning && mPreviewWindow) {
+        memset(&v4l2_crop, 0, sizeof(v4l2_crop));
+        v4l2_crop.ch_type = MM_CAMERA_CH_PREVIEW;
+
+        ALOGI("%s: Fetching crop info", __func__);
+        cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+        ALOGI("%s: Crop info received: %d, %d, %d, %d ", __func__,
+             v4l2_crop.crop.left,
+             v4l2_crop.crop.top,
+             v4l2_crop.crop.width,
+             v4l2_crop.crop.height);
+
+        mPreviewWindow->set_crop(mPreviewWindow,
+                        v4l2_crop.crop.left,
+                        v4l2_crop.crop.top,
+                        v4l2_crop.crop.left + v4l2_crop.crop.width,
+                        v4l2_crop.crop.top + v4l2_crop.crop.height);
+        ALOGI("%s: Done setting crop", __func__);
+        ALOGI("%s: Currrent zoom :%d",__func__, mCurrentZoom);
+    }
+
+    ALOGI("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+    ALOGI("zoomEvent: state:%d E",mPreviewState);
+    switch (mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+        break;
+    case QCAMERA_HAL_PREVIEW_START:
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        handleZoomEventForPreview(app_cb);
+        if (isZSLMode())
+          handleZoomEventForSnapshot();
+        break;
+    case QCAMERA_HAL_RECORDING_STARTED:
+        handleZoomEventForPreview(app_cb);
+        if (mFullLiveshotEnabled)
+            handleZoomEventForSnapshot();
+        break;
+    case QCAMERA_HAL_TAKE_PICTURE:
+        if(isZSLMode())
+            handleZoomEventForPreview(app_cb);
+        handleZoomEventForSnapshot();
+        break;
+    default:
+        break;
+    }
+    ALOGI("zoomEvent: X");
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(const void * data, uint32_t size, char* name, char* ext, int index)
+{
+    char buf[32];
+    int file_fd;
+    if ( data != NULL) {
+        char * str;
+        snprintf(buf, sizeof(buf), "/data/%s_%d.%s", name, index, ext);
+        ALOGE("marvin, %s size =%d", buf, size);
+        file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+        write(file_fd, data, size);
+        close(file_fd);
+    }
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(struct msm_frame* newFrame,
+  HAL_cam_dump_frm_type_t frm_type)
+{
+  int32_t enabled = 0;
+  int frm_num;
+  uint32_t  skip_mode;
+  char value[PROPERTY_VALUE_MAX];
+  char buf[32];
+  int main_422 = 1;
+  property_get("persist.camera.dumpimg", value, "0");
+  enabled = atoi(value);
+
+  ALOGV(" newFrame =%p, frm_type = %d", newFrame, frm_type);
+  if(enabled & HAL_DUMP_FRM_MASK_ALL) {
+    if((enabled & frm_type) && newFrame) {
+      frm_num = ((enabled & 0xffff0000) >> 16);
+      if(frm_num == 0) frm_num = 10; /*default 10 frames*/
+      if(frm_num > 256) frm_num = 256; /*256 buffers cycle around*/
+      skip_mode = ((enabled & 0x0000ff00) >> 8);
+      if(skip_mode == 0) skip_mode = 1; /*no -skip */
+
+      if( mDumpSkipCnt % skip_mode == 0) {
+        if (mDumpFrmCnt >= 0 && mDumpFrmCnt <= frm_num) {
+          int w, h;
+          int file_fd;
+          switch (frm_type) {
+          case  HAL_DUMP_FRM_PREVIEW:
+            w = mDimension.display_width;
+            h = mDimension.display_height;
+            snprintf(buf, sizeof(buf), "/data/%dp_%dx%d.yuv", mDumpFrmCnt, w, h);
+            file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+            break;
+          case HAL_DUMP_FRM_VIDEO:
+            w = mDimension.video_width;
+            h = mDimension.video_height;
+            snprintf(buf, sizeof(buf),"/data/%dv_%dx%d.yuv", mDumpFrmCnt, w, h);
+            file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+            break;
+          case HAL_DUMP_FRM_MAIN:
+            w = mDimension.picture_width;
+            h = mDimension.picture_height;
+            snprintf(buf, sizeof(buf), "/data/%dm_%dx%d.yuv", mDumpFrmCnt, w, h);
+            file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+            if (mDimension.main_img_format == CAMERA_YUV_422_NV16 ||
+                mDimension.main_img_format == CAMERA_YUV_422_NV61)
+              main_422 = 2;
+            break;
+          case HAL_DUMP_FRM_THUMBNAIL:
+            w = mDimension.ui_thumbnail_width;
+            h = mDimension.ui_thumbnail_height;
+            snprintf(buf, sizeof(buf),"/data/%dt_%dx%d.yuv", mDumpFrmCnt, w, h);
+            file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+            break;
+          default:
+            w = h = 0;
+            file_fd = -1;
+            break;
+          }
+
+          if (file_fd < 0) {
+            ALOGE("%s: cannot open file:type=%d\n", __func__, frm_type);
+          } else {
+            ALOGE("%s: %d %d", __func__, newFrame->y_off, newFrame->cbcr_off);
+            write(file_fd, (const void *)(newFrame->buffer+newFrame->y_off), w * h);
+            write(file_fd, (const void *)
+              (newFrame->buffer + newFrame->cbcr_off), w * h / 2 * main_422);
+            close(file_fd);
+            ALOGE("dump %s", buf);
+          }
+        } else if(frm_num == 256){
+          mDumpFrmCnt = 0;
+        }
+        mDumpFrmCnt++;
+      }
+      mDumpSkipCnt++;
+    }
+  }  else {
+    mDumpFrmCnt = 0;
+  }
+}
+
+status_t QCameraHardwareInterface::setPreviewWindow(preview_stream_ops_t* window)
+{
+    status_t retVal = NO_ERROR;
+    ALOGE(" %s: E mPreviewState = %d, mStreamDisplay = %p", __FUNCTION__, mPreviewState, mStreamDisplay);
+    if( window == NULL) {
+        ALOGE("%s:Received Setting NULL preview window", __func__);
+    }
+    Mutex::Autolock lock(mLock);
+    switch(mPreviewState) {
+    case QCAMERA_HAL_PREVIEW_START:
+        mPreviewWindow = window;
+        if(mPreviewWindow) {
+            /* we have valid surface now, start preview */
+            ALOGE("%s:  calling startPreview2", __func__);
+            retVal = startPreview2();
+            if(retVal == NO_ERROR)
+                mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+            ALOGE("%s:  startPreview2 done, mPreviewState = %d", __func__, mPreviewState);
+        } else
+            ALOGE("%s: null window received, mPreviewState = %d", __func__, mPreviewState);
+        break;
+    case QCAMERA_HAL_PREVIEW_STARTED:
+        /* new window comes */
+        ALOGE("%s: bug, cannot handle new window in started state", __func__);
+        //retVal = UNKNOWN_ERROR;
+        break;
+    case QCAMERA_HAL_PREVIEW_STOPPED:
+    case QCAMERA_HAL_TAKE_PICTURE:
+        mPreviewWindow = window;
+        ALOGE("%s: mPreviewWindow = 0x%p, mStreamDisplay = 0x%p",
+                                    __func__, mPreviewWindow, mStreamDisplay);
+        if(mStreamDisplay)
+            retVal = mStreamDisplay->setPreviewWindow(window);
+        break;
+    default:
+        ALOGE("%s: bug, cannot handle new window in state %d", __func__, mPreviewState);
+        retVal = UNKNOWN_ERROR;
+        break;
+    }
+    ALOGE(" %s : X, mPreviewState = %d", __FUNCTION__, mPreviewState);
+    return retVal;
+}
+
+int QCameraHardwareInterface::storeMetaDataInBuffers(int enable)
+{
+    /* this is a dummy func now. fix me later */
+    mStoreMetaDataInFrame = enable;
+    return 0;
+}
+
+status_t QCameraHardwareInterface::sendMappingBuf(int ext_mode, int idx, int fd,
+                                                  uint32_t size, int cameraid,
+                                                  mm_camera_socket_msg_type msg_type)
+{
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = msg_type;
+    packet.payload.frame_fd_map.ext_mode = ext_mode;
+    packet.payload.frame_fd_map.frame_idx = idx;
+    packet.payload.frame_fd_map.fd = fd;
+    packet.payload.frame_fd_map.size = size;
+
+    if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), packet.payload.frame_fd_map.fd) <= 0 ) {
+        ALOGE("%s: sending frame mapping buf msg Failed", __func__);
+        return FAILED_TRANSACTION;
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+                                                    mm_camera_socket_msg_type msg_type)
+{
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = msg_type;
+    packet.payload.frame_fd_unmap.ext_mode = ext_mode;
+    packet.payload.frame_fd_unmap.frame_idx = idx;
+    if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), 0) <= 0 ) {
+        ALOGE("%s: sending frame unmapping buf msg Failed", __func__);
+        return FAILED_TRANSACTION;
+    }
+    return NO_ERROR;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+  int rc = 0;
+  struct ion_handle_data handle_data;
+
+  p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+  if (p_camera_memory->main_ion_fd[cnt] < 0) {
+    ALOGE("Ion dev open failed\n");
+    ALOGE("Error is %s\n", strerror(errno));
+    goto ION_OPEN_FAILED;
+  }
+  p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+  /* to make it page size aligned */
+  p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+  p_camera_memory->alloc[cnt].align = 4096;
+  p_camera_memory->alloc[cnt].flags = ion_type;
+
+  rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+  if (rc < 0) {
+    ALOGE("ION allocation failed\n");
+    goto ION_ALLOC_FAILED;
+  }
+
+  p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+  rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+  if (rc < 0) {
+    ALOGE("ION map failed %s\n", strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+  p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+  return 0;
+
+ION_MAP_FAILED:
+  handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+  ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+  close(p_camera_memory->main_ion_fd[cnt]);
+  p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+  return -1;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt)
+{
+  struct ion_handle_data handle_data;
+  int rc = 0;
+
+  if (p_camera_memory->main_ion_fd[cnt] > 0) {
+      handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+      ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+      close(p_camera_memory->main_ion_fd[cnt]);
+      p_camera_memory->main_ion_fd[cnt] = -1;
+  }
+  return rc;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+  int rc = 0;
+  struct ion_handle_data handle_data;
+
+  p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+  if (p_camera_memory->main_ion_fd[cnt] < 0) {
+    ALOGE("Ion dev open failed\n");
+    ALOGE("Error is %s\n", strerror(errno));
+    goto ION_OPEN_FAILED;
+  }
+  p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+  /* to make it page size aligned */
+  p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+  p_camera_memory->alloc[cnt].align = 4096;
+  p_camera_memory->alloc[cnt].flags = (0x1 << ion_type | 0x1 << ION_IOMMU_HEAP_ID);
+
+  rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+  if (rc < 0) {
+    ALOGE("ION allocation failed\n");
+    goto ION_ALLOC_FAILED;
+  }
+
+  p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+  rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+  if (rc < 0) {
+    ALOGE("ION map failed %s\n", strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+  p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+  return 0;
+
+ION_MAP_FAILED:
+  handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+  ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+  close(p_camera_memory->main_ion_fd[cnt]);
+  p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+  return -1;
+}
+
+int QCameraHardwareInterface::cache_ops(int ion_fd,
+  struct ion_flush_data *cache_data, int type)
+{
+  int rc = 0;
+
+  rc = ioctl(ion_fd, type, cache_data);
+  if (rc < 0)
+    ALOGE("%s: Cache Invalidate failed\n", __func__);
+  else
+    ALOGV("%s: Cache OPs type(%d) success", __func__);
+
+  return rc;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt)
+{
+  struct ion_handle_data handle_data;
+  int rc = 0;
+
+  if (p_camera_memory->main_ion_fd[cnt] > 0) {
+      handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+      ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+      close(p_camera_memory->main_ion_fd[cnt]);
+      p_camera_memory->main_ion_fd[cnt] = -1;
+  }
+  return rc;
+}
+
+int QCameraHardwareInterface::initHeapMem( QCameraHalHeap_t *heap,
+                            int num_of_buf,
+                            int buf_len,
+                            int y_off,
+                            int cbcr_off,
+                            int pmem_type,
+                            mm_cameara_stream_buf_t *StreamBuf,
+                            mm_camera_buf_def_t *buf_def,
+                            uint8_t num_planes,
+                            uint32_t *planes
+)
+{
+    int rc = 0;
+    int i;
+    int path;
+    struct msm_frame *frame;
+    ALOGE("Init Heap =%p. stream_buf =%p, pmem_type =%d, num_of_buf=%d. buf_len=%d, cbcr_off=%d",
+         heap, StreamBuf, pmem_type, num_of_buf, buf_len, cbcr_off);
+    if(num_of_buf > MM_CAMERA_MAX_NUM_FRAMES || heap == NULL ||
+       mGetMemory == NULL ) {
+        ALOGE("Init Heap error");
+        rc = -1;
+        return rc;
+    }
+    memset(heap, 0, sizeof(QCameraHalHeap_t));
+    for (i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+        heap->main_ion_fd[i] = -1;
+        heap->fd[i] = -1;
+    }
+    heap->buffer_count = num_of_buf;
+    heap->size = buf_len;
+    heap->y_offset = y_off;
+    heap->cbcr_offset = cbcr_off;
+
+    if (StreamBuf != NULL) {
+        StreamBuf->num = num_of_buf;
+                StreamBuf->frame_len = buf_len;
+        switch (pmem_type) {
+            case  MSM_PMEM_MAINIMG:
+            case  MSM_PMEM_RAW_MAINIMG:
+                path = OUTPUT_TYPE_S;
+                break;
+
+            case  MSM_PMEM_THUMBNAIL:
+                path = OUTPUT_TYPE_T;
+                break;
+
+            default:
+                rc = -1;
+                return rc;
+        }
+    }
+
+
+    for(i = 0; i < num_of_buf; i++) {
+#ifdef USE_ION
+      if (isZSLMode())
+        rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+         (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+      else
+        rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ION_HEAP_ID) |
+         (0x1 << CAMERA_ION_FALLBACK_HEAP_ID)));
+
+      if (rc < 0) {
+        ALOGE("%sION allocation failed\n", __func__);
+        break;
+      }
+#else
+        if (pmem_type == MSM_PMEM_MAX)
+            heap->fd[i] = -1;
+        else {
+            heap->fd[i] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+            if ( heap->fd[i] <= 0) {
+                rc = -1;
+                ALOGE("Open fail: heap->fd[%d] =%d", i, heap->fd[i]);
+                break;
+            }
+        }
+#endif
+        heap->camera_memory[i] =  mGetMemory( heap->fd[i], buf_len, 1, (void *)this);
+
+        if (heap->camera_memory[i] == NULL ) {
+            ALOGE("Getmem fail %d: ", i);
+            rc = -1;
+            break;
+        }
+        if (StreamBuf != NULL) {
+            frame = &(StreamBuf->frame[i]);
+            memset(frame, 0, sizeof(struct msm_frame));
+            frame->fd = heap->fd[i];
+            frame->phy_offset = 0;
+            frame->buffer = (uint32_t) heap->camera_memory[i]->data;
+            frame->path = path;
+            frame->cbcr_off =  planes[0]+heap->cbcr_offset;
+            frame->y_off =  heap->y_offset;
+            frame->fd_data = heap->ion_info_fd[i];
+            frame->ion_alloc = heap->alloc[i];
+            frame->ion_dev_fd = heap->main_ion_fd[i];
+            ALOGD("%s: Buffer idx: %d  addr: %x fd: %d phy_offset: %d"
+                 "cbcr_off: %d y_off: %d frame_len: %d", __func__,
+                 i, (unsigned int)frame->buffer, frame->fd,
+                 frame->phy_offset, cbcr_off, y_off, frame->ion_alloc.len);
+
+            buf_def->buf.mp[i].frame = *frame;
+            buf_def->buf.mp[i].frame_offset = 0;
+            buf_def->buf.mp[i].num_planes = num_planes;
+            /* Plane 0 needs to be set seperately. Set other planes
+             * in a loop. */
+            buf_def->buf.mp[i].planes[0].length = planes[0];
+            buf_def->buf.mp[i].planes[0].m.userptr = frame->fd;
+            buf_def->buf.mp[i].planes[0].data_offset = y_off;
+            buf_def->buf.mp[i].planes[0].reserved[0] =
+              buf_def->buf.mp[i].frame_offset;
+            for (int j = 1; j < num_planes; j++) {
+                 buf_def->buf.mp[i].planes[j].length = planes[j];
+                 buf_def->buf.mp[i].planes[j].m.userptr = frame->fd;
+                 buf_def->buf.mp[i].planes[j].data_offset = cbcr_off;
+                 buf_def->buf.mp[i].planes[j].reserved[0] =
+                     buf_def->buf.mp[i].planes[j-1].reserved[0] +
+                     buf_def->buf.mp[i].planes[j-1].length;
+            }
+        } else {
+        }
+
+        ALOGE("heap->fd[%d] =%d, camera_memory=%p", i, heap->fd[i], heap->camera_memory[i]);
+        heap->local_flag[i] = 1;
+    }
+    if( rc < 0) {
+        releaseHeapMem(heap);
+    }
+    return rc;
+
+}
+
+int QCameraHardwareInterface::releaseHeapMem( QCameraHalHeap_t *heap)
+{
+	int rc = 0;
+	ALOGE("Release %p", heap);
+	if (heap != NULL) {
+
+		for (int i = 0; i < heap->buffer_count; i++) {
+			if(heap->camera_memory[i] != NULL) {
+				heap->camera_memory[i]->release( heap->camera_memory[i] );
+				heap->camera_memory[i] = NULL;
+			} else if (heap->fd[i] <= 0) {
+				ALOGE("impossible: amera_memory[%d] = %p, fd = %d",
+				i, heap->camera_memory[i], heap->fd[i]);
+			}
+
+			if(heap->fd[i] > 0) {
+				close(heap->fd[i]);
+				heap->fd[i] = -1;
+			}
+#ifdef USE_ION
+            deallocate_ion_memory(heap, i);
+#endif
+		}
+        heap->buffer_count = 0;
+        heap->size = 0;
+        heap->y_offset = 0;
+        heap->cbcr_offset = 0;
+	}
+	return rc;
+}
+
+preview_format_info_t  QCameraHardwareInterface::getPreviewFormatInfo( )
+{
+  return mPreviewFormatInfo;
+}
+
+void QCameraHardwareInterface::wdenoiseEvent(cam_ctrl_status_t status, void *cookie)
+{
+    ALOGI("wdnEvent: preview state:%d E",mPreviewState);
+    if (mStreamSnap != NULL) {
+        ALOGI("notifyWDNEvent to snapshot stream");
+        mStreamSnap->notifyWDenoiseEvent(status, cookie);
+    }
+}
+
+bool QCameraHardwareInterface::isWDenoiseEnabled()
+{
+    return mDenoiseValue;
+}
+
+void QCameraHardwareInterface::takePicturePrepareHardware()
+{
+    ALOGV("%s: E", __func__);
+
+    /* Prepare snapshot*/
+    cam_ops_action(mCameraId,
+                  TRUE,
+                  MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+                  this);
+    ALOGV("%s: X", __func__);
+}
+
+bool QCameraHardwareInterface::isNoDisplayMode()
+{
+  return (mNoDisplayMode != 0);
+}
+
+void QCameraHardwareInterface::pausePreviewForZSL()
+{
+    if(mRestartPreview) {
+        stopPreviewInternal();
+        mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+        startPreview2();
+        mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+        mRestartPreview = false;
+    }
+}
+}; // namespace android
+
diff --git a/camera/QCameraHWI.h b/camera/QCameraHWI.h
new file mode 100644
index 0000000..3c61698
--- /dev/null
+++ b/camera/QCameraHWI.h
@@ -0,0 +1,833 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+#define ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+
+
+#include <utils/threads.h>
+//#include <camera/CameraHardwareInterface.h>
+#include <hardware/camera.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <camera/Camera.h>
+#include "QCameraParameters.h"
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+#include <mm_camera_interface2.h>
+#include "mm_omx_jpeg_encoder.h"
+} //extern C
+
+#include "QCameraHWI_Mem.h"
+#include "QCameraStream.h"
+#include "QCamera_Intf.h"
+
+//Error codes
+#define  NOT_FOUND -1
+#define MAX_ZOOM_RATIOS 62
+
+#ifdef Q12
+#undef Q12
+#endif
+
+#define Q12 4096
+#define QCAMERA_PARM_ENABLE   1
+#define QCAMERA_PARM_DISABLE  0
+#define PREVIEW_TBL_MAX_SIZE  14
+#define VIDEO_TBL_MAX_SIZE    14
+#define THUMB_TBL_MAX_SIZE    16
+#define HFR_TBL_MAX_SIZE      2
+
+struct str_map {
+    const char *const desc;
+    int val;
+};
+
+struct preview_format_info_t {
+   int Hal_format;
+   cam_format_t mm_cam_format;
+   cam_pad_format_t padding;
+   int num_planar;
+};
+
+typedef enum {
+  CAMERA_STATE_UNINITED,
+  CAMERA_STATE_READY,
+  CAMERA_STATE_PREVIEW_START_CMD_SENT,
+  CAMERA_STATE_PREVIEW_STOP_CMD_SENT,
+  CAMERA_STATE_PREVIEW,
+  CAMERA_STATE_RECORD_START_CMD_SENT,  /*5*/
+  CAMERA_STATE_RECORD_STOP_CMD_SENT,
+  CAMERA_STATE_RECORD,
+  CAMERA_STATE_SNAP_START_CMD_SENT,
+  CAMERA_STATE_SNAP_STOP_CMD_SENT,
+  CAMERA_STATE_SNAP_CMD_ACKED,  /*10 - snapshot comd acked, snapshot not done yet*/
+  CAMERA_STATE_ZSL_START_CMD_SENT,
+  CAMERA_STATE_ZSL,
+  CAMERA_STATE_AF_START_CMD_SENT,
+  CAMERA_STATE_AF_STOP_CMD_SENT,
+  CAMERA_STATE_ERROR, /*15*/
+
+  /*Add any new state above*/
+  CAMERA_STATE_MAX
+} HAL_camera_state_type_t;
+
+enum {
+  BUFFER_NOT_OWNED,
+  BUFFER_UNLOCKED,
+  BUFFER_LOCKED,
+};
+
+typedef enum {
+  HAL_DUMP_FRM_PREVIEW = 1,
+  HAL_DUMP_FRM_VIDEO = 1<<1,
+  HAL_DUMP_FRM_MAIN = 1<<2,
+  HAL_DUMP_FRM_THUMBNAIL = 1<<3,
+
+  /*8 bits mask*/
+  HAL_DUMP_FRM_MAX = 1 << 8
+} HAL_cam_dump_frm_type_t;
+
+
+typedef enum {
+  HAL_CAM_MODE_ZSL = 1,
+
+  /*add new entry before and update the max entry*/
+  HAL_CAM_MODE_MAX = HAL_CAM_MODE_ZSL << 1,
+} qQamera_mode_t;
+
+#define HAL_DUMP_FRM_MASK_ALL ( HAL_DUMP_FRM_PREVIEW + HAL_DUMP_FRM_VIDEO + \
+    HAL_DUMP_FRM_MAIN + HAL_DUMP_FRM_THUMBNAIL)
+#define QCAMERA_HAL_PREVIEW_STOPPED    0
+#define QCAMERA_HAL_PREVIEW_START      1
+#define QCAMERA_HAL_PREVIEW_STARTED    2
+#define QCAMERA_HAL_RECORDING_STARTED  3
+#define QCAMERA_HAL_TAKE_PICTURE       4
+
+
+typedef struct {
+     int                     buffer_count;
+	 buffer_handle_t        *buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+	 struct private_handle_t *private_buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+	 int                     stride[MM_CAMERA_MAX_NUM_FRAMES];
+	 uint32_t                addr_offset[MM_CAMERA_MAX_NUM_FRAMES];
+	 uint8_t                 local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+	 camera_memory_t        *camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+     int                     main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+     struct ion_fd_data      ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalMemory_t;
+
+
+typedef struct {
+     int                     buffer_count;
+     uint32_t                size;
+     uint32_t                y_offset;
+     uint32_t                cbcr_offset;
+	 int                     fd[MM_CAMERA_MAX_NUM_FRAMES];
+	 int                     local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+	 camera_memory_t*        camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+     camera_memory_t*        metadata_memory[MM_CAMERA_MAX_NUM_FRAMES];
+     int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+     struct ion_allocation_data alloc[MM_CAMERA_MAX_NUM_FRAMES];
+     struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalHeap_t;
+
+typedef struct {
+     camera_memory_t*  camera_memory[3];
+     int main_ion_fd[3];
+     struct ion_allocation_data alloc[3];
+     struct ion_fd_data ion_info_fd[3];
+     int fd[3];
+     int size;
+} QCameraStatHeap_t;
+
+typedef struct {
+  int32_t msg_type;
+  int32_t ext1;
+  int32_t ext2;
+  void    *cookie;
+} argm_notify_t;
+
+typedef struct {
+  int32_t                  msg_type;
+  camera_memory_t         *data;
+  unsigned int             index;
+  camera_frame_metadata_t *metadata;
+  void                    *cookie;
+} argm_data_cb_t;
+
+typedef struct {
+  camera_notify_callback notifyCb;
+  camera_data_callback   dataCb;
+  argm_notify_t argm_notify;
+  argm_data_cb_t        argm_data_cb;
+} app_notify_cb_t;
+
+/* camera_area_t
+ * rectangle with weight to store the focus and metering areas.
+ * x1, y1, x2, y2: from -1000 to 1000
+ * weight: 0 to 1000
+ */
+typedef struct {
+    int x1, y1, x2, y2;
+    int weight;
+} camera_area_t;
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+//EXIF detfines
+#define MAX_EXIF_TABLE_ENTRIES           14
+#define GPS_PROCESSING_METHOD_SIZE       101
+#define FOCAL_LENGTH_DECIMAL_PRECISION   100
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+
+typedef struct{
+    //GPS tags
+    rat_t       latitude[3];
+    rat_t       longitude[3];
+    char        lonRef[2];
+    char        latRef[2];
+    rat_t       altitude;
+    rat_t       gpsTimeStamp[3];
+    char        gpsDateStamp[20];
+    char        gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE+GPS_PROCESSING_METHOD_SIZE];
+    //Other tags
+    char        dateTime[20];
+    rat_t       focalLength;
+    uint16_t    flashMode;
+    uint16_t    isoSpeed;
+
+    bool        mAltitude;
+    bool        mLongitude;
+    bool        mLatitude;
+    bool        mTimeStamp;
+    bool        mGpsProcess;
+
+    int         mAltitude_ref;
+    long        mGPSTimestamp;
+
+} exif_values_t;
+
+namespace android {
+
+class QCameraStream;
+
+class QCameraHardwareInterface : public virtual RefBase {
+public:
+
+    QCameraHardwareInterface(int  cameraId, int mode);
+
+    /** Set the ANativeWindow to which preview frames are sent */
+    int setPreviewWindow(preview_stream_ops_t* window);
+
+    /** Set the notification and data callbacks */
+    void setCallbacks(camera_notify_callback notify_cb,
+            camera_data_callback data_cb,
+            camera_data_timestamp_callback data_cb_timestamp,
+            camera_request_memory get_memory,
+            void *user);
+
+    /**
+     * The following three functions all take a msg_type, which is a bitmask of
+     * the messages defined in include/ui/Camera.h
+     */
+
+    /**
+     * Enable a message, or set of messages.
+     */
+    void enableMsgType(int32_t msg_type);
+
+    /**
+     * Disable a message, or a set of messages.
+     *
+     * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
+     * HAL should not rely on its client to call releaseRecordingFrame() to
+     * release video recording frames sent out by the cameral HAL before and
+     * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
+     * clients must not modify/access any video recording frame after calling
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+     */
+    void disableMsgType(int32_t msg_type);
+
+    /**
+     * Query whether a message, or a set of messages, is enabled.  Note that
+     * this is operates as an AND, if any of the messages queried are off, this
+     * will return false.
+     */
+    int msgTypeEnabled(int32_t msg_type);
+
+    /**
+     * Start preview mode.
+     */
+    int startPreview();
+    int startPreview2();
+
+    /**
+     * Stop a previously started preview.
+     */
+    void stopPreview();
+
+    /**
+     * Returns true if preview is enabled.
+     */
+    int previewEnabled();
+
+
+    /**
+     * Request the camera HAL to store meta data or real YUV data in the video
+     * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
+     * it is not called, the default camera HAL behavior is to store real YUV
+     * data in the video buffers.
+     *
+     * This method should be called before startRecording() in order to be
+     * effective.
+     *
+     * If meta data is stored in the video buffers, it is up to the receiver of
+     * the video buffers to interpret the contents and to find the actual frame
+     * data with the help of the meta data in the buffer. How this is done is
+     * outside of the scope of this method.
+     *
+     * Some camera HALs may not support storing meta data in the video buffers,
+     * but all camera HALs should support storing real YUV data in the video
+     * buffers. If the camera HAL does not support storing the meta data in the
+     * video buffers when it is requested to do do, INVALID_OPERATION must be
+     * returned. It is very useful for the camera HAL to pass meta data rather
+     * than the actual frame data directly to the video encoder, since the
+     * amount of the uncompressed frame data can be very large if video size is
+     * large.
+     *
+     * @param enable if true to instruct the camera HAL to store
+     *        meta data in the video buffers; false to instruct
+     *        the camera HAL to store real YUV data in the video
+     *        buffers.
+     *
+     * @return OK on success.
+     */
+    int storeMetaDataInBuffers(int enable);
+
+    /**
+     * Start record mode. When a record image is available, a
+     * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
+     * frame. Every record frame must be released by a camera HAL client via
+     * releaseRecordingFrame() before the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+     * responsibility to manage the life-cycle of the video recording frames,
+     * and the client must not modify/access any video recording frames.
+     */
+    int startRecording();
+
+    /**
+     * Stop a previously started recording.
+     */
+    void stopRecording();
+
+    /**
+     * Returns true if recording is enabled.
+     */
+    int recordingEnabled();
+
+    /**
+     * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+     *
+     * It is camera HAL client's responsibility to release video recording
+     * frames sent out by the camera HAL before the camera HAL receives a call
+     * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
+     * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+     * responsibility to manage the life-cycle of the video recording frames.
+     */
+    void releaseRecordingFrame(const void *opaque);
+
+    /**
+     * Start auto focus, the notification callback routine is called with
+     * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
+     * called again if another auto focus is needed.
+     */
+    int autoFocus();
+
+    /**
+     * Cancels auto-focus function. If the auto-focus is still in progress,
+     * this function will cancel it. Whether the auto-focus is in progress or
+     * not, this function will return the focus position to the default.  If
+     * the camera does not support auto-focus, this is a no-op.
+     */
+    int cancelAutoFocus();
+
+    /**
+     * Take a picture.
+     */
+    int takePicture();
+
+    /**
+     * Cancel a picture that was started with takePicture. Calling this method
+     * when no picture is being taken is a no-op.
+     */
+    int cancelPicture();
+
+    /**
+     * Set the camera parameters. This returns BAD_VALUE if any parameter is
+     * invalid or not supported.
+     */
+    int setParameters(const char *parms);
+
+    //status_t setParameters(const QCameraParameters& params);
+    /** Retrieve the camera parameters.  The buffer returned by the camera HAL
+        must be returned back to it with put_parameters, if put_parameters
+        is not NULL.
+     */
+    int getParameters(char **parms);
+
+    /** The camera HAL uses its own memory to pass us the parameters when we
+        call get_parameters.  Use this function to return the memory back to
+        the camera HAL, if put_parameters is not NULL.  If put_parameters
+        is NULL, then you have to use free() to release the memory.
+    */
+    void putParameters(char *);
+
+    /**
+     * Send command to camera driver.
+     */
+    int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+    /**
+     * Release the hardware resources owned by this object.  Note that this is
+     * *not* done in the destructor.
+     */
+    void release();
+
+    /**
+     * Dump state of the camera hardware
+     */
+    int dump(int fd);
+
+    //virtual sp<IMemoryHeap> getPreviewHeap() const;
+    //virtual sp<IMemoryHeap> getRawHeap() const;
+
+
+    status_t    takeLiveSnapshot();
+    status_t    takeFullSizeLiveshot();
+    bool        canTakeFullSizeLiveshot();
+
+    //virtual status_t          getBufferInfo( sp<IMemory>& Frame,
+    //size_t *alignedSize);
+    void         getPictureSize(int *picture_width, int *picture_height) const;
+    void         getPreviewSize(int *preview_width, int *preview_height) const;
+    cam_format_t getPreviewFormat() const;
+
+    cam_pad_format_t getPreviewPadding() const;
+
+    //bool     useOverlay(void);
+    //virtual status_t setOverlay(const sp<Overlay> &overlay);
+    void processEvent(mm_camera_event_t *);
+    int  getJpegQuality() const;
+    int  getNumOfSnapshots(void) const;
+    int  getNumOfSnapshots(const QCameraParameters& params);
+    int  getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+                                     int *picture_width,
+                                     int *picture_height);
+    bool isRawSnapshot();
+    bool mShutterSoundPlayed;
+    void dumpFrameToFile(struct msm_frame*, HAL_cam_dump_frm_type_t);
+
+    static QCameraHardwareInterface *createInstance(int, int);
+    status_t setZSLBurstLookBack(const QCameraParameters& params);
+    status_t setZSLBurstInterval(const QCameraParameters& params);
+    int getZSLBurstInterval(void);
+    int getZSLQueueDepth(void) const;
+    int getZSLBackLookCount(void) const;
+
+    ~QCameraHardwareInterface();
+    int initHeapMem(QCameraHalHeap_t *heap, int num_of_buf, int pmem_type,
+      int frame_len, int cbcr_off, int y_off, mm_cameara_stream_buf_t *StreamBuf,
+      mm_camera_buf_def_t *buf_def, uint8_t num_planes, uint32_t *planes);
+
+    int releaseHeapMem( QCameraHalHeap_t *heap);
+    status_t sendMappingBuf(int ext_mode, int idx, int fd, uint32_t size,
+      int cameraid, mm_camera_socket_msg_type msg_type);
+    status_t sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+      mm_camera_socket_msg_type msg_type);
+
+    int allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt,
+      int ion_type);
+    int deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt);
+
+    int allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt,
+      int ion_type);
+    int deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt);
+
+    int cache_ops(int ion_fd, struct ion_flush_data *cache_inv_data, int type);
+
+    void dumpFrameToFile(const void * data, uint32_t size, char* name,
+      char* ext, int index);
+    preview_format_info_t getPreviewFormatInfo( );
+    bool isCameraReady();
+    bool isNoDisplayMode();
+
+private:
+    int16_t  zoomRatios[MAX_ZOOM_RATIOS];
+    struct camera_size_type default_preview_sizes[PREVIEW_TBL_MAX_SIZE];
+    struct camera_size_type default_video_sizes[VIDEO_TBL_MAX_SIZE];
+    struct camera_size_type default_hfr_sizes[HFR_TBL_MAX_SIZE];
+    struct camera_size_type default_thumbnail_sizes[THUMB_TBL_MAX_SIZE];
+    unsigned int preview_sizes_count;
+    unsigned int video_sizes_count;
+    unsigned int thumbnail_sizes_count;
+    unsigned int hfr_sizes_count;
+
+
+    bool mUseOverlay;
+
+    void loadTables();
+    void initDefaultParameters();
+    bool getMaxPictureDimension(mm_camera_dimension_t *dim);
+
+    status_t updateFocusDistances();
+
+    bool native_set_parms(mm_camera_parm_type_t type, uint16_t length, void *value);
+    bool native_set_parms( mm_camera_parm_type_t type, uint16_t length, void *value, int *result);
+
+    void hasAutoFocusSupport();
+    void debugShowPreviewFPS() const;
+    //void prepareSnapshotAndWait();
+
+    bool isPreviewRunning();
+    bool isRecordingRunning();
+    bool isSnapshotRunning();
+
+    void processChannelEvent(mm_camera_ch_event_t *, app_notify_cb_t *);
+    void processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+    void processRecordChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+    void processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+    void processCtrlEvent(mm_camera_ctrl_event_t *, app_notify_cb_t *);
+    void processStatsEvent(mm_camera_stats_event_t *, app_notify_cb_t *);
+    void processInfoEvent(mm_camera_info_event_t *event, app_notify_cb_t *);
+    void processprepareSnapshotEvent(cam_ctrl_status_t *);
+    void roiEvent(fd_roi_t roi, app_notify_cb_t *);
+    void zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *);
+    void autofocusevent(cam_ctrl_status_t *status, app_notify_cb_t *);
+    void handleZoomEventForPreview(app_notify_cb_t *);
+    void handleZoomEventForSnapshot(void);
+    status_t autoFocusEvent(cam_ctrl_status_t *, app_notify_cb_t *);
+
+    void filterPictureSizes();
+    bool supportsSceneDetection();
+    bool supportsSelectableZoneAf();
+    bool supportsFaceDetection();
+    bool supportsRedEyeReduction();
+    bool preview_parm_config (cam_ctrl_dimension_t* dim,QCameraParameters& parm);
+
+    void stopPreviewInternal();
+    void stopRecordingInternal();
+    //void stopPreviewZSL();
+    status_t cancelPictureInternal();
+    //status_t startPreviewZSL();
+    void pausePreviewForSnapshot();
+    void pausePreviewForZSL();
+    status_t resumePreviewAfterSnapshot();
+
+    status_t runFaceDetection();
+
+    status_t           setParameters(const QCameraParameters& params);
+    QCameraParameters&  getParameters() ;
+
+    status_t setCameraMode(const QCameraParameters& params);
+    status_t setPictureSizeTable(void);
+    status_t setPreviewSizeTable(void);
+    status_t setVideoSizeTable(void);
+    status_t setPreviewSize(const QCameraParameters& params);
+    status_t setJpegThumbnailSize(const QCameraParameters& params);
+    status_t setPreviewFpsRange(const QCameraParameters& params);
+    status_t setPreviewFrameRate(const QCameraParameters& params);
+    status_t setPreviewFrameRateMode(const QCameraParameters& params);
+    status_t setVideoSize(const QCameraParameters& params);
+    status_t setPictureSize(const QCameraParameters& params);
+    status_t setJpegQuality(const QCameraParameters& params);
+    status_t setNumOfSnapshot(const QCameraParameters& params);
+    status_t setJpegRotation(int isZSL);
+    int getJpegRotation(void);
+    int getISOSpeedValue();
+    status_t setAntibanding(const QCameraParameters& params);
+    status_t setEffect(const QCameraParameters& params);
+    status_t setExposureCompensation(const QCameraParameters &params);
+    status_t setAutoExposure(const QCameraParameters& params);
+    status_t setWhiteBalance(const QCameraParameters& params);
+    status_t setFlash(const QCameraParameters& params);
+    status_t setGpsLocation(const QCameraParameters& params);
+    status_t setRotation(const QCameraParameters& params);
+    status_t setZoom(const QCameraParameters& params);
+    status_t setFocusMode(const QCameraParameters& params);
+    status_t setBrightness(const QCameraParameters& params);
+    status_t setSkinToneEnhancement(const QCameraParameters& params);
+    status_t setOrientation(const QCameraParameters& params);
+    status_t setLensshadeValue(const QCameraParameters& params);
+    status_t setMCEValue(const QCameraParameters& params);
+    status_t setISOValue(const QCameraParameters& params);
+    status_t setPictureFormat(const QCameraParameters& params);
+    status_t setSharpness(const QCameraParameters& params);
+    status_t setContrast(const QCameraParameters& params);
+    status_t setSaturation(const QCameraParameters& params);
+    status_t setWaveletDenoise(const QCameraParameters& params);
+    status_t setSceneMode(const QCameraParameters& params);
+    status_t setContinuousAf(const QCameraParameters& params);
+    status_t setFaceDetection(const char *str);
+    status_t setSceneDetect(const QCameraParameters& params);
+    status_t setStrTextures(const QCameraParameters& params);
+    status_t setPreviewFormat(const QCameraParameters& params);
+    status_t setSelectableZoneAf(const QCameraParameters& params);
+    status_t setOverlayFormats(const QCameraParameters& params);
+    status_t setHighFrameRate(const QCameraParameters& params);
+    status_t setRedeyeReduction(const QCameraParameters& params);
+    status_t setAEBracket(const QCameraParameters& params);
+    status_t setFaceDetect(const QCameraParameters& params);
+    status_t setDenoise(const QCameraParameters& params);
+    status_t setAecAwbLock(const QCameraParameters & params);
+    status_t setHistogram(int histogram_en);
+    status_t setRecordingHint(const QCameraParameters& params);
+    status_t setRecordingHintValue(const int32_t value);
+    status_t setFocusAreas(const QCameraParameters& params);
+    status_t setMeteringAreas(const QCameraParameters& params);
+    status_t setFullLiveshot(void);
+    status_t setDISMode(void);
+    status_t setCaptureBurstExp(void);
+    status_t setPowerMode(const QCameraParameters& params);
+    void takePicturePrepareHardware( );
+    status_t setNoDisplayMode(const QCameraParameters& params);
+
+    isp3a_af_mode_t getAutoFocusMode(const QCameraParameters& params);
+    bool isValidDimension(int w, int h);
+
+    String8 create_values_str(const str_map *values, int len);
+
+    void setMyMode(int mode);
+    bool isZSLMode();
+    bool isWDenoiseEnabled();
+    void wdenoiseEvent(cam_ctrl_status_t status, void *cookie);
+    bool isLowPowerCamcorder();
+    void freePictureTable(void);
+    void freeVideoSizeTable(void);
+
+    int32_t createPreview();
+    int32_t createRecord();
+    int32_t createSnapshot();
+
+    int getHDRMode();
+    //EXIF
+    void addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+                        uint32_t count, uint8_t copy, void *data);
+    void setExifTags();
+    void initExifData();
+    void deinitExifData();
+    void setExifTagsGPS();
+    exif_tags_info_t* getExifData(){ return mExifData; }
+    int getExifTableNumEntries() { return mExifTableNumEntries; }
+    void parseGPSCoordinate(const char *latlonString, rat_t* coord);
+
+    int           mCameraId;
+    camera_mode_t myMode;
+
+    QCameraParameters    mParameters;
+    //sp<Overlay>         mOverlay;
+    int32_t             mMsgEnabled;
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    camera_request_memory          mGetMemory;
+    void                           *mCallbackCookie;
+
+    //sp<MemoryHeapBase>  mPreviewHeap;  //@Guru : Need to remove
+    sp<AshmemPool>      mMetaDataHeap;
+
+    mutable Mutex       mLock;
+    //mutable Mutex       eventLock;
+    Mutex         mCallbackLock;
+    Mutex         mPreviewMemoryLock;
+    Mutex         mRecordingMemoryLock;
+    Mutex         mAutofocusLock;
+    Mutex         mMetaDataWaitLock;
+    Mutex         mRecordFrameLock;
+    Mutex         mRecordLock;
+    Condition     mRecordWait;
+    pthread_mutex_t     mAsyncCmdMutex;
+    pthread_cond_t      mAsyncCmdWait;
+
+    QCameraStream       *mStreamDisplay;
+    QCameraStream       *mStreamRecord;
+    QCameraStream       *mStreamSnap;
+    QCameraStream       *mStreamLiveSnap;
+
+    cam_ctrl_dimension_t mDimension;
+    int  mPreviewWidth, mPreviewHeight;
+    int  videoWidth, videoHeight;
+    int  thumbnailWidth, thumbnailHeight;
+    int  maxSnapshotWidth, maxSnapshotHeight;
+    int  mPreviewFormat;
+    int  mFps;
+    int  mDebugFps;
+    int  mBrightness;
+    int  mContrast;
+    int  mBestShotMode;
+    int  mEffects;
+    int  mSkinToneEnhancement;
+    int  mDenoiseValue;
+    int  mHJR;
+    int  mRotation;
+    int  mJpegQuality;
+    int  mThumbnailQuality;
+    int  mTargetSmoothZoom;
+    int  mSmoothZoomStep;
+    int  mMaxZoom;
+    int  mCurrentZoom;
+    int  mSupportedPictureSizesCount;
+    int  mFaceDetectOn;
+    int  mDumpFrmCnt;
+    int  mDumpSkipCnt;
+    int  mFocusMode;
+
+    unsigned int mPictureSizeCount;
+    unsigned int mPreviewSizeCount;
+    int mPowerMode;
+    unsigned int mVideoSizeCount;
+
+    bool mAutoFocusRunning;
+    bool mMultiTouch;
+    bool mHasAutoFocusSupport;
+    bool mInitialized;
+    bool mDisEnabled;
+    bool strTexturesOn;
+    bool mIs3DModeOn;
+    bool mSmoothZoomRunning;
+    bool mPreparingSnapshot;
+    bool mParamStringInitialized;
+    bool mZoomSupported;
+    bool mSendMetaData;
+    bool mFullLiveshotEnabled;
+    bool mRecordingHint;
+    bool mStartRecording;
+    bool mReleasedRecordingFrame;
+    int mHdrMode;
+    int mSnapshotFormat;
+    int mZslInterval;
+    bool mRestartPreview;
+
+/*for histogram*/
+    int            mStatsOn;
+    int            mCurrentHisto;
+    bool           mSendData;
+    sp<AshmemPool> mStatHeap;
+    camera_memory_t *mStatsMapped[3];
+    QCameraStatHeap_t mHistServer;
+    int32_t        mStatSize;
+
+    bool mZslLookBackMode;
+    int mZslLookBackValue;
+	int mHFRLevel;
+    bool mZslEmptyQueueFlag;
+    String8 mEffectValues;
+    String8 mIsoValues;
+    String8 mSceneModeValues;
+    String8 mSceneDetectValues;
+    String8 mFocusModeValues;
+    String8 mSelectableZoneAfValues;
+    String8 mAutoExposureValues;
+    String8 mWhitebalanceValues;
+    String8 mAntibandingValues;
+    String8 mFrameRateModeValues;
+    String8 mTouchAfAecValues;
+    String8 mPreviewSizeValues;
+    String8 mPictureSizeValues;
+    String8 mVideoSizeValues;
+    String8 mFlashValues;
+    String8 mLensShadeValues;
+    String8 mMceValues;
+    String8 mHistogramValues;
+    String8 mSkinToneEnhancementValues;
+    String8 mPictureFormatValues;
+    String8 mDenoiseValues;
+    String8 mZoomRatioValues;
+    String8 mPreviewFrameRateValues;
+    String8 mPreviewFormatValues;
+    String8 mFaceDetectionValues;
+    String8 mHfrValues;
+    String8 mHfrSizeValues;
+    String8 mRedeyeReductionValues;
+    String8 denoise_value;
+    String8 mFpsRangesSupportedValues;
+    String8 mZslValues;
+    String8 mFocusDistance;
+
+    friend class QCameraStream;
+    friend class QCameraStream_record;
+    friend class QCameraStream_preview;
+    friend class QCameraStream_Snapshot;
+
+    camera_size_type* mPictureSizes;
+    camera_size_type* mPreviewSizes;
+    camera_size_type* mVideoSizes;
+    const camera_size_type * mPictureSizesPtr;
+    HAL_camera_state_type_t mCameraState;
+
+     /* Temporary - can be removed after Honeycomb*/
+#ifdef USE_ION
+    sp<IonPool>  mPostPreviewHeap;
+#else
+    sp<PmemPool> mPostPreviewHeap;
+#endif
+     mm_cameara_stream_buf_t mPrevForPostviewBuf;
+     int mStoreMetaDataInFrame;
+     preview_stream_ops_t *mPreviewWindow;
+     Mutex                mStateLock;
+     int                  mPreviewState;
+     /*preview memory with display case: memory is allocated and freed via
+     gralloc */
+     QCameraHalMemory_t   mPreviewMemory;
+
+     /*preview memory without display case: memory is allocated
+      directly by camera */
+     QCameraHalHeap_t     mNoDispPreviewMemory;
+
+     QCameraHalHeap_t     mSnapshotMemory;
+     QCameraHalHeap_t     mThumbnailMemory;
+     QCameraHalHeap_t     mRecordingMemory;
+     QCameraHalHeap_t     mJpegMemory;
+     QCameraHalHeap_t     mRawMemory;
+     camera_frame_metadata_t mMetadata;
+     camera_face_t           mFace[MAX_ROI];
+     preview_format_info_t  mPreviewFormatInfo;
+     friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+
+     //EXIF
+     exif_tags_info_t       mExifData[MAX_EXIF_TABLE_ENTRIES];  //Exif tags for JPEG encoder
+     exif_values_t          mExifValues;                        //Exif values in usable format
+     int                    mExifTableNumEntries;            //NUmber of entries in mExifData
+     int                 mNoDisplayMode;
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraHWI_Display.cpp b/camera/QCameraHWI_Display.cpp
new file mode 100644
index 0000000..55bec50
--- /dev/null
+++ b/camera/QCameraHWI_Display.cpp
@@ -0,0 +1,76 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Display"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include "QCameraHWI_Display.h"
+
+
+namespace android {
+
+int QCameraDisplay_Overlay::Display_prepare_buffers()
+{
+	return 0;
+}
+
+
+int QCameraDisplay_Overlay::Display_set_crop()
+{
+	return 0;
+}
+
+
+int QCameraDisplay_Overlay::Display_set_geometry()
+{
+	return 0;
+}
+
+
+void QCameraDisplay_Overlay::Display_enqueue()
+{
+	return ;
+}
+
+
+
+void QCameraDisplay_Overlay::Display_dequeue()
+{
+	return ;
+}
+
+
+void QCameraDisplay_Overlay::Display_release_buffers()
+{
+	return ;
+}
+
+QCameraDisplay::~QCameraDisplay(){}
+
+QCameraDisplay_Overlay::~QCameraDisplay_Overlay()
+{
+	return ;
+}
+}; // namespace android
diff --git a/camera/QCameraHWI_Display.h b/camera/QCameraHWI_Display.h
new file mode 100644
index 0000000..8298b6b
--- /dev/null
+++ b/camera/QCameraHWI_Display.h
@@ -0,0 +1,69 @@
+/*
+** Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERAHWI_DISPLAY_H
+#define ANDROID_HARDWARE_QCAMERAHWI_DISPLAY_H
+
+
+#include <utils/threads.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include "QCamera_Intf.h"
+extern "C" {
+#include <mm_camera_interface2.h>
+}
+
+namespace android {
+
+/*===============================
+	Base Display Class
+================================*/
+
+class QCameraDisplay {
+
+public:
+	virtual int Display_prepare_buffers() = 0;
+	virtual int Display_set_crop( ) = 0;
+	virtual int Display_set_geometry( ) =0;
+	virtual void Display_enqueue( ) = 0;
+	virtual void Display_dequeue( ) = 0;
+	virtual void Display_release_buffers( ) =0;
+	virtual ~QCameraDisplay( );
+};
+
+/*================================
+	Overlay Derivative
+==================================*/
+class QCameraDisplay_Overlay: public QCameraDisplay {
+
+public:
+	int Display_prepare_buffers();
+	int Display_set_crop( );
+	int Display_set_geometry( );
+	void Display_enqueue( );
+	void Display_dequeue( );
+	void Display_release_buffers( );
+	virtual ~QCameraDisplay_Overlay( );
+
+
+};
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraHWI_Mem.cpp b/camera/QCameraHWI_Mem.cpp
new file mode 100644
index 0000000..5afa7aa
--- /dev/null
+++ b/camera/QCameraHWI_Mem.cpp
@@ -0,0 +1,405 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Mem"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "QCameraHWI_Mem.h"
+
+#define CAMERA_HAL_UNUSED(expr) do { (void)(expr); } while (0)
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+
+
+static bool register_buf(int size,
+                         int frame_size,
+                         int cbcr_offset,
+                         int yoffset,
+                         int pmempreviewfd,
+                         uint32_t offset,
+                         uint8_t *buf,
+                         int pmem_type,
+                         bool vfe_can_write,
+                         bool register_buffer = true);
+
+#if 0
+MMCameraDL::MMCameraDL(){
+    ALOGV("MMCameraDL: E");
+    libmmcamera = NULL;
+#if DLOPEN_LIBMMCAMERA
+    libmmcamera = ::dlopen("liboemcamera.so", RTLD_NOW);
+#endif
+    ALOGV("Open MM camera DL libeomcamera loaded at %p ", libmmcamera);
+    ALOGV("MMCameraDL: X");
+}
+
+void * MMCameraDL::pointer(){
+    return libmmcamera;
+}
+
+MMCameraDL::~MMCameraDL(){
+    ALOGV("~MMCameraDL: E");
+    LINK_mm_camera_destroy();
+    if (libmmcamera != NULL) {
+        ::dlclose(libmmcamera);
+        ALOGV("closed MM Camera DL ");
+    }
+    libmmcamera = NULL;
+    ALOGV("~MMCameraDL: X");
+}
+
+
+wp<MMCameraDL> MMCameraDL::instance;
+Mutex MMCameraDL::singletonLock;
+
+
+sp<MMCameraDL> MMCameraDL::getInstance(){
+    Mutex::Autolock instanceLock(singletonLock);
+    sp<MMCameraDL> mmCamera = instance.promote();
+    if(mmCamera == NULL){
+        mmCamera = new MMCameraDL();
+        instance = mmCamera;
+    }
+    return mmCamera;
+}
+#endif
+
+MemPool::MemPool(int buffer_size, int num_buffers,
+                                         int frame_size,
+                                         const char *name) :
+    mBufferSize(buffer_size),
+    mNumBuffers(num_buffers),
+    mFrameSize(frame_size),
+    mBuffers(NULL), mName(name)
+{
+    int page_size_minus_1 = getpagesize() - 1;
+    mAlignedBufferSize = (buffer_size + page_size_minus_1) & (~page_size_minus_1);
+}
+
+void MemPool::completeInitialization()
+{
+    // If we do not know how big the frame will be, we wait to allocate
+    // the buffers describing the individual frames until we do know their
+    // size.
+
+    if (mFrameSize > 0) {
+        mBuffers = new sp<MemoryBase>[mNumBuffers];
+        for (int i = 0; i < mNumBuffers; i++) {
+            mBuffers[i] = new
+                MemoryBase(mHeap,
+                           i * mAlignedBufferSize,
+                           mFrameSize);
+        }
+    }
+}
+
+AshmemPool::AshmemPool(int buffer_size, int num_buffers,
+                                               int frame_size,
+                                               const char *name) :
+    MemPool(buffer_size,
+                                    num_buffers,
+                                    frame_size,
+                                    name)
+{
+    ALOGV("constructing MemPool %s backed by ashmem: "
+         "%d frames @ %d uint8_ts, "
+         "buffer size %d",
+         mName,
+         num_buffers, frame_size, buffer_size);
+
+    int page_mask = getpagesize() - 1;
+    int ashmem_size = buffer_size * num_buffers;
+    ashmem_size += page_mask;
+    ashmem_size &= ~page_mask;
+
+    mHeap = new MemoryHeapBase(ashmem_size);
+
+    completeInitialization();
+}
+
+static bool register_buf(int size,
+                         int frame_size,
+                         int cbcr_offset,
+                         int yoffset,
+                         int pmempreviewfd,
+                         uint32_t offset,
+                         uint8_t *buf,
+                         int pmem_type,
+                         bool vfe_can_write,
+                         bool register_buffer)
+{
+    /*TODO*/
+    /*
+    struct msm_pmem_info pmemBuf;
+    CAMERA_HAL_UNUSED(frame_size);
+
+    pmemBuf.type     = pmem_type;
+    pmemBuf.fd       = pmempreviewfd;
+    pmemBuf.offset   = offset;
+    pmemBuf.len      = size;
+    pmemBuf.vaddr    = buf;
+    pmemBuf.y_off    = yoffset;
+    pmemBuf.cbcr_off = cbcr_offset;
+
+    pmemBuf.active   = vfe_can_write;
+
+    ALOGV("register_buf:  reg = %d buffer = %p",
+         !register_buffer, buf);
+    if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+        CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+         ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM  error %s",
+               strerror(errno));
+         return false;
+         }*/
+
+    return true;
+
+}
+
+#if 0
+bool register_record_buffers(bool register_buffer) {
+    ALOGI("%s: (%d) E", __FUNCTION__, register_buffer);
+    struct msm_pmem_info pmemBuf;
+
+    for (int cnt = 0; cnt < VIDEO_BUFFER_COUNT; ++cnt) {
+        pmemBuf.type     = MSM_PMEM_VIDEO;
+        pmemBuf.fd       = mRecordHeap->mHeap->getHeapID();
+        pmemBuf.offset   = mRecordHeap->mAlignedBufferSize * cnt;
+        pmemBuf.len      = mRecordHeap->mBufferSize;
+        pmemBuf.vaddr    = (uint8_t *)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+        pmemBuf.y_off    = 0;
+        pmemBuf.cbcr_off = recordframes[0].cbcr_off;
+        if(register_buffer == true) {
+            pmemBuf.active   = (cnt<ACTIVE_VIDEO_BUFFERS);
+            if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+                pmemBuf.type = MSM_PMEM_VIDEO_VPE;
+                pmemBuf.active = 1;
+            }
+        } else {
+            pmemBuf.active   = false;
+        }
+
+        ALOGV("register_buf:  reg = %d buffer = %p", !register_buffer,
+          (void *)pmemBuf.vaddr);
+        if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+                CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+            ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM  error %s",
+                strerror(errno));
+            return false;
+        }
+    }
+    return true;
+}
+#endif
+#if 0
+PmemPool::PmemPool(const char *pmem_pool,
+                                           int flags,
+                                           int pmem_type,
+                                           int buffer_size, int num_buffers,
+                                           int frame_size, int cbcr_offset,
+                                           int yOffset, const char *name) :
+    MemPool(buffer_size,num_buffers,frame_size,name),
+    mPmemType(pmem_type),
+    mCbCrOffset(cbcr_offset),
+    myOffset(yOffset)
+{
+    ALOGI("constructing MemPool %s backed by pmem pool %s: "
+         "%d frames @ %d bytes, buffer size %d",
+         mName,
+         pmem_pool, num_buffers, frame_size,
+         buffer_size);
+
+    //mMMCameraDLRef = MMCameraDL::getInstance();
+
+
+    // Make a new mmap'ed heap that can be shared across processes.
+    // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+    mAlignedSize = mAlignedBufferSize * num_buffers;
+
+    sp<MemoryHeapBase> masterHeap =
+        new MemoryHeapBase(pmem_pool, mAlignedSize, flags);
+
+    if (masterHeap->getHeapID() < 0) {
+        ALOGE("failed to construct master heap for pmem pool %s", pmem_pool);
+        masterHeap.clear();
+        return;
+    }
+
+    sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(masterHeap, flags);
+    if (pmemHeap->getHeapID() >= 0) {
+        pmemHeap->slap();
+        masterHeap.clear();
+        mHeap = pmemHeap;
+        pmemHeap.clear();
+
+        mFd = mHeap->getHeapID();
+        if (::ioctl(mFd, PMEM_GET_SIZE, &mSize)) {
+            ALOGE("pmem pool %s ioctl(PMEM_GET_SIZE) error %s (%d)",
+                 pmem_pool,
+                 ::strerror(errno), errno);
+            mHeap.clear();
+            return;
+        }
+
+        ALOGE("pmem pool %s ioctl(fd = %d, PMEM_GET_SIZE) is %ld",
+             pmem_pool,
+             mFd,
+             mSize.len);
+        ALOGE("mBufferSize=%d, mAlignedBufferSize=%d\n", mBufferSize, mAlignedBufferSize);
+
+#if 0
+        // Unregister preview buffers with the camera drivers.  Allow the VFE to write
+        // to all preview buffers except for the last one.
+        // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buf = num_buffers;
+            if(!strcmp("preview", mName)) num_buf = kPreviewBufferCount;
+            ALOGD("num_buffers = %d", num_buf);
+            for (int cnt = 0; cnt < num_buf; ++cnt) {
+                int active = 1;
+                if(pmem_type == MSM_PMEM_VIDEO){
+                     active = (cnt<ACTIVE_VIDEO_BUFFERS);
+                     //When VPE is enabled, set the last record
+                     //buffer as active and pmem type as PMEM_VIDEO_VPE
+                     //as this is a requirement from VPE operation.
+                     //No need to set this pmem type to VIDEO_VPE while unregistering,
+                     //because as per camera stack design: "the VPE AXI is also configured
+                     //when VFE is configured for VIDEO, which is as part of preview
+                     //initialization/start. So during this VPE AXI config camera stack
+                     //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+                     //change it's type to PMEM_VIDEO".
+                     if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+                         active = 1;
+                         pmem_type = MSM_PMEM_VIDEO_VPE;
+                     }
+                     ALOGV(" pmempool creating video buffers : active %d ", active);
+                }
+                else if (pmem_type == MSM_PMEM_PREVIEW){
+                    active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+                }
+                else if ((pmem_type == MSM_PMEM_MAINIMG)
+                     || (pmem_type == MSM_PMEM_THUMBNAIL)){
+                    active = (cnt < ACTIVE_ZSL_BUFFERS);
+                }
+                register_buf(mBufferSize,
+                         mFrameSize, mCbCrOffset, myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         pmem_type,
+                         active);
+            }
+        }
+#endif
+        completeInitialization();
+    }
+    else ALOGE("pmem pool %s error: could not create master heap!",
+              pmem_pool);
+    ALOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+#endif
+
+PmemPool::~PmemPool()
+{
+    ALOGI("%s: %s E", __FUNCTION__, mName);
+#if 0
+    if (mHeap != NULL) {
+        // Unregister preview buffers with the camera drivers.
+        //  Only Unregister the preview, snapshot and thumbnail
+        //  buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buffers = mNumBuffers;
+            if(!strcmp("preview", mName)) num_buffers = PREVIEW_BUFFER_COUNT;
+            for (int cnt = 0; cnt < num_buffers; ++cnt) {
+                register_buf(mBufferSize,
+                         mFrameSize,
+                         mCbCrOffset,
+                         myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         mPmemType,
+                         false,
+                         false /* unregister */);
+            }
+        }
+    }
+    mMMCameraDLRef.clear();
+#endif
+    ALOGI("%s: %s X", __FUNCTION__, mName);
+}
+MemPool::~MemPool()
+{
+    ALOGV("destroying MemPool %s", mName);
+    if (mFrameSize > 0)
+        delete [] mBuffers;
+    mHeap.clear();
+    ALOGV("destroying MemPool %s completed", mName);
+}
+
+
+status_t MemPool::dump(int fd, const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    CAMERA_HAL_UNUSED(args);
+    snprintf(buffer, 255, "QualcommCameraHardware::AshmemPool::dump\n");
+    result.append(buffer);
+    if (mName) {
+        snprintf(buffer, 255, "mem pool name (%s)\n", mName);
+        result.append(buffer);
+    }
+    if (mHeap != 0) {
+        snprintf(buffer, 255, "heap base(%p), size(%d), flags(%d), device(%s)\n",
+                 mHeap->getBase(), mHeap->getSize(),
+                 mHeap->getFlags(), mHeap->getDevice());
+        result.append(buffer);
+    }
+    snprintf(buffer, 255,
+             "buffer size (%d), number of buffers (%d), frame size(%d)",
+             mBufferSize, mNumBuffers, mFrameSize);
+    result.append(buffer);
+    write(fd, result.string(), result.size());
+    return NO_ERROR;
+}
+
+};
diff --git a/camera/QCameraHWI_Mem.h b/camera/QCameraHWI_Mem.h
new file mode 100644
index 0000000..473e583
--- /dev/null
+++ b/camera/QCameraHWI_Mem.h
@@ -0,0 +1,109 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#ifndef __QCAMERAHWI_MEM_H
+#define __QCAMERAHWI_MEM_H
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include <stdint.h>
+#include "QCamera_Intf.h"
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+}
+
+
+#define VIDEO_BUFFER_COUNT 5
+#define VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER 9
+
+#define PREVIEW_BUFFER_COUNT 5
+
+namespace android {
+
+// This class represents a heap which maintains several contiguous
+// buffers.  The heap may be backed by pmem (when pmem_pool contains
+// the name of a /dev/pmem* file), or by ashmem (when pmem_pool == NULL).
+
+struct MemPool : public RefBase {
+    MemPool(int buffer_size, int num_buffers,
+            int frame_size,
+            const char *name);
+
+    virtual ~MemPool() = 0;
+
+    void completeInitialization();
+    bool initialized() const {
+        return mHeap != NULL && mHeap->base() != MAP_FAILED;
+    }
+
+    virtual status_t dump(int fd, const Vector<String16>& args) const;
+
+    int mBufferSize;
+    int mAlignedBufferSize;
+    int mNumBuffers;
+    int mFrameSize;
+    sp<MemoryHeapBase> mHeap;
+    sp<MemoryBase> *mBuffers;
+
+    const char *mName;
+};
+
+class AshmemPool : public MemPool {
+public:
+    AshmemPool(int buffer_size, int num_buffers,
+               int frame_size,
+               const char *name);
+};
+
+class PmemPool : public MemPool {
+public:
+    PmemPool(const char *pmem_pool,
+             int flags, int pmem_type,
+             int buffer_size, int num_buffers,
+             int frame_size, int cbcr_offset,
+             int yoffset, const char *name);
+    virtual ~PmemPool();
+    int mFd;
+    int mPmemType;
+    int mCbCrOffset;
+    int myOffset;
+    int mCameraControlFd;
+    uint32_t mAlignedSize;
+    struct pmem_region mSize;
+};
+
+class IonPool : public MemPool {
+public:
+    IonPool( int flags,
+             int buffer_size, int num_buffers,
+             int frame_size, int cbcr_offset,
+             int yoffset, const char *name);
+    virtual ~IonPool();
+    int mFd;
+    int mCbCrOffset;
+    int myOffset;
+    int mCameraControlFd;
+    uint32_t mAlignedSize;
+private:
+    static const char mIonDevName[];
+};
+
+};
+#endif
diff --git a/camera/QCameraHWI_Parm.cpp b/camera/QCameraHWI_Parm.cpp
new file mode 100644
index 0000000..e01bb05
--- /dev/null
+++ b/camera/QCameraHWI_Parm.cpp
@@ -0,0 +1,4081 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Parm"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#include <limits.h>
+
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#include <linux/ion.h>
+#include <camera.h>
+#include <cam_fifo.h>
+#include <jpege.h>
+
+} // extern "C"
+
+#include "QCameraHWI.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implements the parameter logic of this class*/
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+#define EXPOSURE_COMPENSATION_STEP ((float (1))/EXPOSURE_COMPENSATION_DENOMINATOR)
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+
+#define HDR_HAL_FRAME 2
+
+#define BURST_INTREVAL_MIN 1
+#define BURST_INTREVAL_MAX 10
+#define BURST_INTREVAL_DEFAULT 1
+
+//Default FPS
+#define MINIMUM_FPS 5
+#define MAXIMUM_FPS 120
+#define DEFAULT_FIXED_FPS 30
+#define DEFAULT_FPS MAXIMUM_FPS
+
+//Default Picture Width
+#define DEFAULT_PICTURE_WIDTH  640
+#define DEFAULT_PICTURE_HEIGHT 480
+
+//Default Video Width
+#define DEFAULT_VIDEO_WIDTH 1920
+#define DEFAULT_VIDEO_HEIGHT 1088
+
+#define THUMBNAIL_SIZE_COUNT (sizeof(thumbnail_sizes)/sizeof(thumbnail_size_type))
+#define DEFAULT_THUMBNAIL_SETTING 4
+#define THUMBNAIL_WIDTH_STR "512"
+#define THUMBNAIL_HEIGHT_STR "384"
+#define THUMBNAIL_SMALL_HEIGHT 144
+
+#define DONT_CARE_COORDINATE -1
+
+//for histogram stats
+#define HISTOGRAM_STATS_SIZE 257
+
+//Supported preview fps ranges should be added to this array in the form (minFps,maxFps)
+static  android::FPSRange FpsRangesSupported[] = {
+            android::FPSRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000)
+        };
+#define FPS_RANGES_SUPPORTED_COUNT (sizeof(FpsRangesSupported)/sizeof(FpsRangesSupported[0]))
+
+
+typedef struct {
+    uint32_t aspect_ratio;
+    uint32_t width;
+    uint32_t height;
+} thumbnail_size_type;
+
+static thumbnail_size_type thumbnail_sizes[] = {
+{ 7281, 512, 288 }, //1.777778
+{ 6826, 480, 288 }, //1.666667
+{ 6808, 256, 154 }, //1.66233
+{ 6144, 432, 288 }, //1.5
+{ 5461, 512, 384 }, //1.333333
+{ 5006, 352, 288 }, //1.222222
+{ 5461, 320, 240 }, //1.33333
+{ 5006, 176, 144 }, //1.222222
+
+};
+
+static struct camera_size_type zsl_picture_sizes[] = {
+  { 1024, 768}, // 1MP XGA
+  { 800, 600}, //SVGA
+  { 800, 480}, // WVGA
+  { 640, 480}, // VGA
+  { 352, 288}, //CIF
+  { 320, 240}, // QVGA
+  { 176, 144} // QCIF
+};
+
+static camera_size_type default_picture_sizes[] = {
+  { 4000, 3000}, // 12MP
+  { 3200, 2400}, // 8MP
+  { 2592, 1944}, // 5MP
+  { 2048, 1536}, // 3MP QXGA
+  { 1920, 1088}, //HD1080
+  { 1600, 1200}, // 2MP UXGA
+  { 1280, 768}, //WXGA
+  { 1280, 720}, //HD720
+  { 1024, 768}, // 1MP XGA
+  { 800, 600}, //SVGA
+  { 800, 480}, // WVGA
+  { 640, 480}, // VGA
+  { 352, 288}, //CIF
+  { 320, 240}, // QVGA
+  { 176, 144} // QCIF
+};
+
+static int iso_speed_values[] = {
+    0, 1, 100, 200, 400, 800, 1600
+};
+
+extern int HAL_numOfCameras;
+extern camera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+extern mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+
+namespace android {
+
+static uint32_t  HFR_SIZE_COUNT=2;
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+/********************************************************************/
+static const str_map effects[] = {
+    { QCameraParameters::EFFECT_NONE,       CAMERA_EFFECT_OFF },
+    { QCameraParameters::EFFECT_MONO,       CAMERA_EFFECT_MONO },
+    { QCameraParameters::EFFECT_NEGATIVE,   CAMERA_EFFECT_NEGATIVE },
+    { QCameraParameters::EFFECT_SOLARIZE,   CAMERA_EFFECT_SOLARIZE },
+    { QCameraParameters::EFFECT_SEPIA,      CAMERA_EFFECT_SEPIA },
+    { QCameraParameters::EFFECT_POSTERIZE,  CAMERA_EFFECT_POSTERIZE },
+    { QCameraParameters::EFFECT_WHITEBOARD, CAMERA_EFFECT_WHITEBOARD },
+    { QCameraParameters::EFFECT_BLACKBOARD, CAMERA_EFFECT_BLACKBOARD },
+    { QCameraParameters::EFFECT_AQUA,       CAMERA_EFFECT_AQUA },
+    { QCameraParameters::EFFECT_EMBOSS,     CAMERA_EFFECT_EMBOSS },
+    { QCameraParameters::EFFECT_SKETCH,     CAMERA_EFFECT_SKETCH },
+    { QCameraParameters::EFFECT_NEON,       CAMERA_EFFECT_NEON }
+};
+
+static const str_map iso[] = {
+    { QCameraParameters::ISO_AUTO,  CAMERA_ISO_AUTO},
+    { QCameraParameters::ISO_HJR,   CAMERA_ISO_DEBLUR},
+    { QCameraParameters::ISO_100,   CAMERA_ISO_100},
+    { QCameraParameters::ISO_200,   CAMERA_ISO_200},
+    { QCameraParameters::ISO_400,   CAMERA_ISO_400},
+    { QCameraParameters::ISO_800,   CAMERA_ISO_800 },
+    { QCameraParameters::ISO_1600,  CAMERA_ISO_1600 }
+};
+
+static const str_map scenemode[] = {
+    { QCameraParameters::SCENE_MODE_AUTO,           CAMERA_BESTSHOT_OFF },
+    { QCameraParameters::SCENE_MODE_ASD,            CAMERA_BESTSHOT_AUTO },
+    { QCameraParameters::SCENE_MODE_ACTION,         CAMERA_BESTSHOT_ACTION },
+    { QCameraParameters::SCENE_MODE_PORTRAIT,       CAMERA_BESTSHOT_PORTRAIT },
+    { QCameraParameters::SCENE_MODE_LANDSCAPE,      CAMERA_BESTSHOT_LANDSCAPE },
+    { QCameraParameters::SCENE_MODE_NIGHT,          CAMERA_BESTSHOT_NIGHT },
+    { QCameraParameters::SCENE_MODE_NIGHT_PORTRAIT, CAMERA_BESTSHOT_NIGHT_PORTRAIT },
+    { QCameraParameters::SCENE_MODE_THEATRE,        CAMERA_BESTSHOT_THEATRE },
+    { QCameraParameters::SCENE_MODE_BEACH,          CAMERA_BESTSHOT_BEACH },
+    { QCameraParameters::SCENE_MODE_SNOW,           CAMERA_BESTSHOT_SNOW },
+    { QCameraParameters::SCENE_MODE_SUNSET,         CAMERA_BESTSHOT_SUNSET },
+    { QCameraParameters::SCENE_MODE_STEADYPHOTO,    CAMERA_BESTSHOT_ANTISHAKE },
+    { QCameraParameters::SCENE_MODE_FIREWORKS ,     CAMERA_BESTSHOT_FIREWORKS },
+    { QCameraParameters::SCENE_MODE_SPORTS ,        CAMERA_BESTSHOT_SPORTS },
+    { QCameraParameters::SCENE_MODE_PARTY,          CAMERA_BESTSHOT_PARTY },
+    { QCameraParameters::SCENE_MODE_CANDLELIGHT,    CAMERA_BESTSHOT_CANDLELIGHT },
+    { QCameraParameters::SCENE_MODE_BACKLIGHT,      CAMERA_BESTSHOT_BACKLIGHT },
+    { QCameraParameters::SCENE_MODE_FLOWERS,        CAMERA_BESTSHOT_FLOWERS },
+    { QCameraParameters::SCENE_MODE_AR,             CAMERA_BESTSHOT_AR },
+};
+
+static const str_map scenedetect[] = {
+    { QCameraParameters::SCENE_DETECT_OFF, FALSE  },
+    { QCameraParameters::SCENE_DETECT_ON, TRUE },
+};
+
+#define DONT_CARE AF_MODE_MAX
+static const str_map focus_modes[] = {
+    { QCameraParameters::FOCUS_MODE_AUTO,     AF_MODE_AUTO},
+    { QCameraParameters::FOCUS_MODE_INFINITY, AF_MODE_INFINITY },
+    { QCameraParameters::FOCUS_MODE_NORMAL,   AF_MODE_NORMAL },
+    { QCameraParameters::FOCUS_MODE_MACRO,    AF_MODE_MACRO },
+    { QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, AF_MODE_CAF},
+    { QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, AF_MODE_CAF }
+};
+
+static const str_map selectable_zone_af[] = {
+    { QCameraParameters::SELECTABLE_ZONE_AF_AUTO,  AUTO },
+    { QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING, SPOT },
+    { QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED, CENTER_WEIGHTED },
+    { QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE, AVERAGE }
+};
+
+// from qcamera/common/camera.h
+static const str_map autoexposure[] = {
+    { QCameraParameters::AUTO_EXPOSURE_FRAME_AVG,  CAMERA_AEC_FRAME_AVERAGE },
+    { QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED, CAMERA_AEC_CENTER_WEIGHTED },
+    { QCameraParameters::AUTO_EXPOSURE_SPOT_METERING, CAMERA_AEC_SPOT_METERING }
+};
+
+// from aeecamera.h
+static const str_map whitebalance[] = {
+    { QCameraParameters::WHITE_BALANCE_AUTO,            CAMERA_WB_AUTO },
+    { QCameraParameters::WHITE_BALANCE_INCANDESCENT,    CAMERA_WB_INCANDESCENT },
+    { QCameraParameters::WHITE_BALANCE_FLUORESCENT,     CAMERA_WB_FLUORESCENT },
+    { QCameraParameters::WHITE_BALANCE_DAYLIGHT,        CAMERA_WB_DAYLIGHT },
+    { QCameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, CAMERA_WB_CLOUDY_DAYLIGHT }
+};
+
+static const str_map antibanding[] = {
+    { QCameraParameters::ANTIBANDING_OFF,  CAMERA_ANTIBANDING_OFF },
+    { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+    { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ },
+    { QCameraParameters::ANTIBANDING_AUTO, CAMERA_ANTIBANDING_AUTO }
+};
+
+static const str_map frame_rate_modes[] = {
+        {QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+        {QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static const str_map touchafaec[] = {
+    { QCameraParameters::TOUCH_AF_AEC_OFF, FALSE },
+    { QCameraParameters::TOUCH_AF_AEC_ON, TRUE }
+};
+
+static const str_map hfr[] = {
+    { QCameraParameters::VIDEO_HFR_OFF, CAMERA_HFR_MODE_OFF },
+    { QCameraParameters::VIDEO_HFR_2X, CAMERA_HFR_MODE_60FPS },
+    { QCameraParameters::VIDEO_HFR_3X, CAMERA_HFR_MODE_90FPS },
+    { QCameraParameters::VIDEO_HFR_4X, CAMERA_HFR_MODE_120FPS },
+};
+static const int HFR_VALUES_COUNT = (sizeof(hfr)/sizeof(str_map));
+
+static const str_map flash[] = {
+    { QCameraParameters::FLASH_MODE_OFF,  LED_MODE_OFF },
+    { QCameraParameters::FLASH_MODE_AUTO, LED_MODE_AUTO },
+    { QCameraParameters::FLASH_MODE_ON, LED_MODE_ON },
+    { QCameraParameters::FLASH_MODE_TORCH, LED_MODE_TORCH}
+};
+
+static const str_map lensshade[] = {
+    { QCameraParameters::LENSSHADE_ENABLE, TRUE },
+    { QCameraParameters::LENSSHADE_DISABLE, FALSE }
+};
+
+static const str_map mce[] = {
+    { QCameraParameters::MCE_ENABLE, TRUE },
+    { QCameraParameters::MCE_DISABLE, FALSE }
+};
+
+static const str_map histogram[] = {
+    { QCameraParameters::HISTOGRAM_ENABLE, TRUE },
+    { QCameraParameters::HISTOGRAM_DISABLE, FALSE }
+};
+
+static const str_map skinToneEnhancement[] = {
+    { QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE, TRUE },
+    { QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE, FALSE }
+};
+
+static const str_map denoise[] = {
+    { QCameraParameters::DENOISE_OFF, FALSE },
+    { QCameraParameters::DENOISE_ON, TRUE }
+};
+
+static const str_map facedetection[] = {
+    { QCameraParameters::FACE_DETECTION_OFF, FALSE },
+    { QCameraParameters::FACE_DETECTION_ON, TRUE }
+};
+
+static const str_map redeye_reduction[] = {
+    { QCameraParameters::REDEYE_REDUCTION_ENABLE, TRUE },
+    { QCameraParameters::REDEYE_REDUCTION_DISABLE, FALSE }
+};
+
+static const str_map picture_formats[] = {
+        {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+        {QCameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static const str_map recording_Hints[] = {
+        {"false", FALSE},
+        {"true",  TRUE}
+};
+
+static const str_map preview_formats[] = {
+        {QCameraParameters::PIXEL_FORMAT_YUV420SP,   HAL_PIXEL_FORMAT_YCrCb_420_SP},
+        {QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO},
+        {QCameraParameters::PIXEL_FORMAT_YV12, HAL_PIXEL_FORMAT_YV12},
+        {QCameraParameters::PIXEL_FORMAT_YUV420P,HAL_PIXEL_FORMAT_YV12},
+        {QCameraParameters::PIXEL_FORMAT_NV12, HAL_PIXEL_FORMAT_YCbCr_420_SP}
+};
+
+static const preview_format_info_t preview_format_info_list[] = {
+  {HAL_PIXEL_FORMAT_YCrCb_420_SP, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_WORD, 2},
+  {HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_4K, 2},
+  {HAL_PIXEL_FORMAT_YCbCr_420_SP, CAMERA_YUV_420_NV12, CAMERA_PAD_TO_WORD, 2},
+  {HAL_PIXEL_FORMAT_YV12,         CAMERA_YUV_420_YV12, CAMERA_PAD_TO_WORD, 3}
+};
+
+static const str_map zsl_modes[] = {
+    { QCameraParameters::ZSL_OFF, FALSE },
+    { QCameraParameters::ZSL_ON, TRUE },
+};
+
+
+static const str_map hdr_bracket[] = {
+    { QCameraParameters::AE_BRACKET_HDR_OFF,HDR_BRACKETING_OFF},
+    { QCameraParameters::AE_BRACKET_HDR,HDR_MODE },
+    { QCameraParameters::AE_BRACKET,EXP_BRACKETING_MODE }
+};
+
+typedef enum {
+    NORMAL_POWER,
+    LOW_POWER
+} power_mode;
+
+static const str_map power_modes[] = {
+    { QCameraParameters::NORMAL_POWER,NORMAL_POWER },
+    { QCameraParameters::LOW_POWER,LOW_POWER }
+};
+
+/**************************************************************************/
+static int attr_lookup(const str_map arr[], int len, const char *name)
+{
+    if (name) {
+        for (int i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name))
+                return arr[i].val;
+        }
+    }
+    return NOT_FOUND;
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+    mm_camera_parm_type_t type, uint16_t length, void *value)
+{
+    ALOGE("%s : type : %d Value : %d",__func__,type,*((int *)value));
+    if(MM_CAMERA_OK != cam_config_set_parm(mCameraId, type,value )) {
+        ALOGE("native_set_parms failed: type %d length %d error %s",
+            type, length, strerror(errno));
+        return false;
+    }
+
+    return true;
+
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+    mm_camera_parm_type_t type, uint16_t length, void *value, int *result)
+{
+    *result= cam_config_set_parm(mCameraId, type,value );
+    if(MM_CAMERA_OK == *result) {
+        ALOGE("native_set_parms: succeeded : %d", *result);
+        return true;
+    }
+
+    ALOGE("native_set_parms failed: type %d length %d error str %s error# %d",
+        type, length, strerror(errno), errno);
+    return false;
+}
+
+//Filter Picture sizes based on max width and height
+/* TBD: do we still need this - except for ZSL? */
+void QCameraHardwareInterface::filterPictureSizes(){
+    unsigned int i;
+    if(mPictureSizeCount <= 0)
+        return;
+    maxSnapshotWidth = mPictureSizes[0].width;
+    maxSnapshotHeight = mPictureSizes[0].height;
+   // Iterate through all the width and height to find the max value
+    for(i =0; i<mPictureSizeCount;i++){
+        if(((maxSnapshotWidth < mPictureSizes[i].width) &&
+            (maxSnapshotHeight <= mPictureSizes[i].height))){
+            maxSnapshotWidth = mPictureSizes[i].width;
+            maxSnapshotHeight = mPictureSizes[i].height;
+        }
+    }
+    if(myMode & CAMERA_ZSL_MODE){
+        // due to lack of PMEM we restrict to lower resolution
+        mPictureSizesPtr = zsl_picture_sizes;
+        mSupportedPictureSizesCount = 7;
+    }else{
+        mPictureSizesPtr = mPictureSizes;
+        mSupportedPictureSizesCount = mPictureSizeCount;
+    }
+}
+
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+String8 QCameraHardwareInterface::create_values_str(const str_map *values, int len) {
+    String8 str;
+
+    if (len > 0) {
+        str.append(values[0].desc);
+    }
+    for (int i = 1; i < len; i++) {
+        str.append(",");
+        str.append(values[i].desc);
+    }
+    return str;
+}
+
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+        str.append(buffer);
+    }
+    return str;
+}
+
+static String8 create_values_range_str(int min, int max){
+    String8 str;
+    char buffer[32];
+
+    if(min <= max){
+        snprintf(buffer, sizeof(buffer), "%d", min);
+        str.append(buffer);
+
+        for (int i = min + 1; i <= max; i++) {
+            snprintf(buffer, sizeof(buffer), ",%d", i);
+            str.append(buffer);
+        }
+    }
+    return str;
+}
+
+static int parse_size(const char *str, int &width, int &height)
+{
+    // Find the width.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If an 'x' or 'X' does not immediately follow, give up.
+    if ( (*end != 'x') && (*end != 'X') )
+        return -1;
+
+    // Find the height, immediately after the 'x'.
+    int h = (int)strtol(end+1, 0, 10);
+
+    width = w;
+    height = h;
+
+    return 0;
+}
+
+bool QCameraHardwareInterface::isValidDimension(int width, int height) {
+    bool retVal = FALSE;
+    /* This function checks if a given resolution is valid or not.
+     * A particular resolution is considered valid if it satisfies
+     * the following conditions:
+     * 1. width & height should be multiple of 16.
+     * 2. width & height should be less than/equal to the dimensions
+     *    supported by the camera sensor.
+     * 3. the aspect ratio is a valid aspect ratio and is among the
+     *    commonly used aspect ratio as determined by the thumbnail_sizes
+     *    data structure.
+     */
+
+    if( (width == CEILING16(width)) && (height == CEILING16(height))
+     && (width <= maxSnapshotWidth)
+    && (height <= maxSnapshotHeight) )
+    {
+        uint32_t pictureAspectRatio = (uint32_t)((width * Q12)/height);
+        for(uint32_t i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ) {
+            if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio) {
+                retVal = TRUE;
+                break;
+            }
+        }
+    }
+    return retVal;
+}
+
+void QCameraHardwareInterface::hasAutoFocusSupport(){
+
+    ALOGV("%s",__func__);
+
+    if(isZSLMode()){
+        mHasAutoFocusSupport = false;
+        return;
+    }
+
+    if(cam_ops_is_op_supported (mCameraId, MM_CAMERA_OPS_FOCUS )) {
+        mHasAutoFocusSupport = true;
+    }
+    else {
+        ALOGE("AutoFocus is not supported");
+        mHasAutoFocusSupport = false;
+    }
+
+    ALOGV("%s:rc= %d",__func__, mHasAutoFocusSupport);
+
+}
+
+bool QCameraHardwareInterface::supportsSceneDetection() {
+   bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+   return rc;
+}
+
+bool QCameraHardwareInterface::supportsFaceDetection() {
+    bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FD);
+    return rc;
+}
+
+bool QCameraHardwareInterface::supportsSelectableZoneAf() {
+   bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FOCUS_RECT);
+   return rc;
+}
+
+bool QCameraHardwareInterface::supportsRedEyeReduction() {
+   bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_REDEYE_REDUCTION);
+   return rc;
+}
+
+static String8 create_str(int16_t *arr, int length){
+    String8 str;
+    char buffer[32] = {0};
+
+    if(length > 0){
+        snprintf(buffer, sizeof(buffer), "%d", arr[0]);
+        str.append(buffer);
+    }
+
+    for (int i =1;i<length;i++){
+        snprintf(buffer, sizeof(buffer), ",%d",arr[i]);
+        str.append(buffer);
+    }
+    return str;
+}
+
+bool QCameraHardwareInterface::getMaxPictureDimension(mm_camera_dimension_t *maxDim)
+{
+    bool ret = NO_ERROR;
+    mm_camera_dimension_t dim;
+
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+    if (ret != NO_ERROR)
+        return ret;
+
+    /* Find the first dimension in the mPictureSizes
+     * array which is smaller than the max dimension.
+     * This will be the valid max picture resolution */
+    for (unsigned int i = 0; i < mPictureSizeCount; i++) {
+        if ((mPictureSizes[i].width <= dim.width) &&
+            (mPictureSizes[i].height <= dim.height)) {
+            maxDim->height = mPictureSizes[i].height;
+            maxDim->width  = mPictureSizes[i].width;
+            break;
+        }
+    }
+    ALOGD("%s: Found Max Picture dimension: %d x %d", __func__,
+          maxDim->width, maxDim->height);
+    return ret;
+}
+void QCameraHardwareInterface::loadTables()
+{
+
+    bool ret = NO_ERROR;
+    ALOGE("%s: E", __func__);
+
+    ret = cam_config_get_parm(mCameraId,
+            MM_CAMERA_PARM_PREVIEW_SIZES_CNT, &preview_sizes_count);
+
+    default_sizes_tbl_t preview_sizes_tbl;
+    preview_sizes_tbl.tbl_size=preview_sizes_count;
+    preview_sizes_tbl.sizes_tbl=&default_preview_sizes[0];
+    if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                            MM_CAMERA_PARM_DEF_PREVIEW_SIZES, &preview_sizes_tbl)){
+        ALOGE("%s:Failed to get default preview sizes",__func__);
+    }
+    ret = cam_config_get_parm(mCameraId,
+                MM_CAMERA_PARM_VIDEO_SIZES_CNT, &video_sizes_count);
+
+    default_sizes_tbl_t video_sizes_tbl;
+    video_sizes_tbl.tbl_size=video_sizes_count;
+    video_sizes_tbl.sizes_tbl=&default_video_sizes[0];
+    if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                            MM_CAMERA_PARM_DEF_VIDEO_SIZES, &video_sizes_tbl)){
+        ALOGE("%s:Failed to get default video sizes",__func__);
+    }
+
+    ret = cam_config_get_parm(mCameraId,
+                MM_CAMERA_PARM_THUMB_SIZES_CNT, &thumbnail_sizes_count);
+
+    default_sizes_tbl_t thumbnail_sizes_tbl;
+    thumbnail_sizes_tbl.tbl_size=thumbnail_sizes_count;
+    thumbnail_sizes_tbl.sizes_tbl=&default_thumbnail_sizes[0];
+    if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                            MM_CAMERA_PARM_DEF_THUMB_SIZES, &thumbnail_sizes_tbl)){
+        ALOGE("%s:Failed to get default thumbnail sizes",__func__);
+    }
+
+    ret = cam_config_get_parm(mCameraId,
+                MM_CAMERA_PARM_HFR_SIZES_CNT, &hfr_sizes_count);
+
+    default_sizes_tbl_t hfr_sizes_tbl;
+    hfr_sizes_tbl.tbl_size=hfr_sizes_count;
+    hfr_sizes_tbl.sizes_tbl=&default_hfr_sizes[0];
+    if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                            MM_CAMERA_PARM_DEF_HFR_SIZES, &hfr_sizes_tbl)){
+        ALOGE("%s:Failed to get default HFR  sizes",__func__);
+    }
+    ALOGE("%s: X", __func__);
+}
+void QCameraHardwareInterface::initDefaultParameters()
+{
+    bool ret;
+    char prop[PROPERTY_VALUE_MAX];
+    mm_camera_dimension_t maxDim;
+    int rc = MM_CAMERA_OK;
+    ALOGI("%s: E", __func__);
+
+    memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+    ret = getMaxPictureDimension(&maxDim);
+
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Cannot get Max picture size supported", __func__);
+        return;
+    }
+    if (!maxDim.width || !maxDim.height) {
+        maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+        maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+    }
+
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.snap.format", prop, "0");
+    mSnapshotFormat = atoi(prop);
+    ALOGV("%s: prop =(%s), snap_format=%d", __func__, prop, mSnapshotFormat);
+
+    //cam_ctrl_dimension_t dim;
+    mHFRLevel = 0;
+    memset(&mDimension, 0, sizeof(cam_ctrl_dimension_t));
+    memset(&mPreviewFormatInfo, 0, sizeof(preview_format_info_t));
+    mDimension.video_width     = DEFAULT_VIDEO_WIDTH;
+    mDimension.video_height    = DEFAULT_VIDEO_HEIGHT;
+    // mzhu mDimension.picture_width   = DEFAULT_STREAM_WIDTH;
+    // mzhu mDimension.picture_height  = DEFAULT_STREAM_HEIGHT;
+    mDimension.picture_width   = maxDim.width;
+    mDimension.picture_height  = maxDim.height;
+    mDimension.display_width   = DEFAULT_STREAM_WIDTH;
+    mDimension.display_height  = DEFAULT_STREAM_HEIGHT;
+    mDimension.orig_picture_dx = mDimension.picture_width;
+    mDimension.orig_picture_dy = mDimension.picture_height;
+    mDimension.ui_thumbnail_width = DEFAULT_STREAM_WIDTH;
+    mDimension.ui_thumbnail_height = DEFAULT_STREAM_HEIGHT;
+    mDimension.orig_video_width = DEFAULT_STREAM_WIDTH;
+    mDimension.orig_video_height = DEFAULT_STREAM_HEIGHT;
+
+    mDimension.prev_format     = CAMERA_YUV_420_NV21;
+    mDimension.enc_format      = CAMERA_YUV_420_NV12;
+    if (mSnapshotFormat == 1) {
+      mDimension.main_img_format = CAMERA_YUV_422_NV61;
+    } else {
+      mDimension.main_img_format = CAMERA_YUV_420_NV21;
+    }
+    mDimension.thumb_format    = CAMERA_YUV_420_NV21;
+    ALOGV("%s: main_img_format =%d, thumb_format=%d", __func__,
+         mDimension.main_img_format, mDimension.thumb_format);
+    mDimension.prev_padding_format = CAMERA_PAD_TO_WORD;
+
+    ret = native_set_parms(MM_CAMERA_PARM_DIMENSION,
+                              sizeof(cam_ctrl_dimension_t), (void *) &mDimension);
+    if(!ret) {
+      ALOGE("MM_CAMERA_PARM_DIMENSION Failed.");
+      return;
+    }
+
+    hasAutoFocusSupport();
+
+    // Initialize constant parameter strings. This will happen only once in the
+    // lifetime of the mediaserver process.
+    if (true/*!mParamStringInitialized*/) {
+        //filter picture sizes
+        filterPictureSizes();
+        mPictureSizeValues = create_sizes_str(
+                mPictureSizesPtr, mSupportedPictureSizesCount);
+        mPreviewSizeValues = create_sizes_str(
+                mPreviewSizes,  mPreviewSizeCount);
+        mVideoSizeValues = create_sizes_str(
+                mVideoSizes,  mVideoSizeCount);
+
+        //Query for max HFR value
+        camera_hfr_mode_t maxHFR;
+        cam_config_get_parm(mCameraId, MM_CAMERA_PARM_MAX_HFR_MODE, (void *)&maxHFR);
+        //Filter HFR values and build parameter string
+        String8 str;
+        for(int i=0; i<HFR_VALUES_COUNT; i++){
+            if(hfr[i].val <= maxHFR){
+                if(i>0)	str.append(",");
+                str.append(hfr[i].desc);
+            }
+        }
+        mHfrValues = str;
+        mHfrSizeValues = create_sizes_str(
+                default_hfr_sizes, hfr_sizes_count);
+        mFpsRangesSupportedValues = create_fps_str(
+            FpsRangesSupported,FPS_RANGES_SUPPORTED_COUNT );
+        mParameters.set(
+            QCameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+            mFpsRangesSupportedValues);
+        mParameters.setPreviewFpsRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000);
+        mFlashValues = create_values_str(
+            flash, sizeof(flash) / sizeof(str_map));
+        mLensShadeValues = create_values_str(
+            lensshade,sizeof(lensshade)/sizeof(str_map));
+        mMceValues = create_values_str(
+            mce,sizeof(mce)/sizeof(str_map));
+        mEffectValues = create_values_str(effects, sizeof(effects) / sizeof(str_map));
+        mAntibandingValues = create_values_str(
+            antibanding, sizeof(antibanding) / sizeof(str_map));
+        mIsoValues = create_values_str(iso,sizeof(iso)/sizeof(str_map));
+        mAutoExposureValues = create_values_str(
+            autoexposure, sizeof(autoexposure) / sizeof(str_map));
+        mWhitebalanceValues = create_values_str(
+            whitebalance, sizeof(whitebalance) / sizeof(str_map));
+
+        if(mHasAutoFocusSupport){
+            mFocusModeValues = create_values_str(
+                    focus_modes, sizeof(focus_modes) / sizeof(str_map));
+        }
+
+        mSceneModeValues = create_values_str(scenemode, sizeof(scenemode) / sizeof(str_map));
+
+        if(mHasAutoFocusSupport){
+            mTouchAfAecValues = create_values_str(
+                touchafaec,sizeof(touchafaec)/sizeof(str_map));
+        }
+        //Currently Enabling Histogram for 8x60
+        mHistogramValues = create_values_str(
+            histogram,sizeof(histogram)/sizeof(str_map));
+
+        mSkinToneEnhancementValues = create_values_str(
+            skinToneEnhancement,sizeof(skinToneEnhancement)/sizeof(str_map));
+
+        mPictureFormatValues = create_values_str(
+            picture_formats, sizeof(picture_formats)/sizeof(str_map));
+
+        mZoomSupported=false;
+        mMaxZoom=0;
+        mm_camera_zoom_tbl_t zmt;
+        if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                             MM_CAMERA_PARM_MAXZOOM, &mMaxZoom)){
+            ALOGE("%s:Failed to get max zoom",__func__);
+        }else{
+
+            ALOGE("Max Zoom:%d",mMaxZoom);
+            /* Kernel driver limits the max amount of data that can be retreived through a control
+            command to 260 bytes hence we conservatively limit to 110 zoom ratios */
+            if(mMaxZoom>MAX_ZOOM_RATIOS) {
+                ALOGE("%s:max zoom is larger than sizeof zoomRatios table",__func__);
+                mMaxZoom=MAX_ZOOM_RATIOS-1;
+            }
+            zmt.size=mMaxZoom;
+            zmt.zoom_ratio_tbl=&zoomRatios[0];
+            if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+                                 MM_CAMERA_PARM_ZOOM_RATIO, &zmt)){
+                ALOGE("%s:Failed to get max zoom ratios",__func__);
+            }else{
+                mZoomSupported=true;
+                mZoomRatioValues =  create_str(zoomRatios, mMaxZoom);
+            }
+        }
+
+        ALOGE("Zoom supported:%d",mZoomSupported);
+
+        denoise_value = create_values_str(
+            denoise, sizeof(denoise) / sizeof(str_map));
+
+       if(supportsFaceDetection()) {
+            mFaceDetectionValues = create_values_str(
+                facedetection, sizeof(facedetection) / sizeof(str_map));
+        }
+
+        if(mHasAutoFocusSupport){
+            mSelectableZoneAfValues = create_values_str(
+                selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map));
+        }
+
+        mSceneDetectValues = create_values_str(scenedetect, sizeof(scenedetect) / sizeof(str_map));
+
+        mRedeyeReductionValues = create_values_str(
+            redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map));
+
+        mZslValues = create_values_str(
+            zsl_modes,sizeof(zsl_modes)/sizeof(str_map));
+
+        mParamStringInitialized = true;
+    }
+
+    //set supported video sizes
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_SIZES, mVideoSizeValues.string());
+
+    //set default video size to first one in supported table
+    String8 vSize = create_sizes_str(&mVideoSizes[0], 1);
+    mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, vSize.string());
+
+    //Set Preview size
+    int default_preview_width, default_preview_height;
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+            &default_preview_width);
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+            &default_preview_height);
+    mParameters.setPreviewSize(default_preview_width, default_preview_height);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+                    mPreviewSizeValues.string());
+    mDimension.display_width = default_preview_width;
+    mDimension.display_height = default_preview_height;
+
+    //Set Preview Frame Rate
+    if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+        mPreviewFrameRateValues = create_values_range_str(
+        MINIMUM_FPS, mFps);
+    }else{
+        mPreviewFrameRateValues = create_values_range_str(
+        MINIMUM_FPS, MAXIMUM_FPS);
+    }
+
+
+    if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS)) {
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+                        mPreviewFrameRateValues.string());
+     } else {
+        mParameters.set(
+            QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+            DEFAULT_FIXED_FPS);
+    }
+
+    //Set Preview Frame Rate Modes
+    mParameters.setPreviewFrameRateMode("frame-rate-auto");
+    mFrameRateModeValues = create_values_str(
+            frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map));
+      if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE)){
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES,
+                    mFrameRateModeValues.string());
+    }
+
+    //Set Preview Format
+    //mParameters.setPreviewFormat("yuv420sp"); // informative
+    mParameters.setPreviewFormat(QCameraParameters::PIXEL_FORMAT_YUV420SP);
+
+    mPreviewFormatValues = create_values_str(
+        preview_formats, sizeof(preview_formats) / sizeof(str_map));
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+            mPreviewFormatValues.string());
+
+    //Set Overlay Format
+    mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+    mParameters.set("max-num-detected-faces-hw", "2");
+
+    //Set Picture Size
+    mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH, DEFAULT_PICTURE_HEIGHT);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+                    mPictureSizeValues.string());
+
+    //Set Preview Frame Rate
+    if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+        mParameters.setPreviewFrameRate(mFps);
+    }else{
+        mParameters.setPreviewFrameRate(DEFAULT_FPS);
+    }
+
+    //Set Picture Format
+    mParameters.setPictureFormat("jpeg"); // informative
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+                    mPictureFormatValues);
+
+    mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, "85"); // max quality
+    mJpegQuality = 85;
+    //Set Video Format
+    mParameters.set(QCameraParameters::KEY_VIDEO_FRAME_FORMAT, "yuv420sp");
+
+    //Set Thumbnail parameters
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+                    THUMBNAIL_WIDTH_STR); // informative
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+                    THUMBNAIL_HEIGHT_STR); // informative
+    mDimension.ui_thumbnail_width =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+    mDimension.ui_thumbnail_height =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "90");
+    String8 valuesStr = create_sizes_str(default_thumbnail_sizes, thumbnail_sizes_count);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+                valuesStr.string());
+    // Define CAMERA_SMOOTH_ZOOM in Android.mk file , to enable smoothzoom
+#ifdef CAMERA_SMOOTH_ZOOM
+    mParameters.set(QCameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
+#endif
+    if(mZoomSupported){
+        mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "true");
+        ALOGE("max zoom is %d", mMaxZoom-1);
+        /* mMaxZoom value that the query interface returns is the size
+        ALOGV("max zoom is %d", mMaxZoom-1);
+        * mMaxZoom value that the query interface returns is the size
+         * of zoom table. So the actual max zoom value will be one
+         * less than that value.          */
+
+        mParameters.set("max-zoom",mMaxZoom-1);
+        mParameters.set(QCameraParameters::KEY_ZOOM_RATIOS,
+                            mZoomRatioValues);
+    } else
+        {
+        mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "false");
+    }
+
+    /* Enable zoom support for video application if VPE enabled */
+    if(mZoomSupported) {
+        mParameters.set("video-zoom-support", "true");
+    } else {
+        mParameters.set("video-zoom-support", "false");
+    }
+
+    //8960 supports Power modes : Low power, Normal Power.
+    mParameters.set("power-mode-supported", "true");
+    //Set Live shot support
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LIVESHOT_MAIN);
+    if(!rc) {
+        ALOGE("%s:LIVESHOT is  not supported", __func__);
+        mParameters.set("video-snapshot-supported", "false");
+    } else {
+        mParameters.set("video-snapshot-supported", "true");
+    }
+
+    //Set default power mode
+    mParameters.set(QCameraParameters::KEY_POWER_MODE,"Normal_Power");
+
+    //Set Camera Mode
+    mParameters.set(QCameraParameters::KEY_CAMERA_MODE,0);
+    mParameters.set(QCameraParameters::KEY_AE_BRACKET_HDR,"Off");
+
+    //Set Antibanding
+    mParameters.set(QCameraParameters::KEY_ANTIBANDING,
+                    QCameraParameters::ANTIBANDING_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ANTIBANDING,
+                    mAntibandingValues);
+
+    //Set Effect
+    mParameters.set(QCameraParameters::KEY_EFFECT,
+                    QCameraParameters::EFFECT_NONE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_EFFECTS, mEffectValues);
+
+    //Set Auto Exposure
+    mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE,
+                    QCameraParameters::AUTO_EXPOSURE_FRAME_AVG);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE, mAutoExposureValues);
+
+    //Set WhiteBalance
+    mParameters.set(QCameraParameters::KEY_WHITE_BALANCE,
+                    QCameraParameters::WHITE_BALANCE_AUTO);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_WHITE_BALANCE,mWhitebalanceValues);
+
+    //Set AEC_LOCK
+    mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK, "false");
+    if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AEC_LOCK)){
+        mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true");
+    } else {
+        mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false");
+    }
+    //Set AWB_LOCK
+    mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, "false");
+    if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AWB_LOCK))
+        mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true");
+    else
+        mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false");
+
+    //Set Focus Mode
+    if(mHasAutoFocusSupport){
+       mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                          QCameraParameters::FOCUS_MODE_AUTO);
+       mFocusMode = AF_MODE_AUTO;
+       mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                          mFocusModeValues);
+       mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+       mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "1");
+   } else {
+       mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+       QCameraParameters::FOCUS_MODE_INFINITY);
+       mFocusMode = DONT_CARE;
+       mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+       QCameraParameters::FOCUS_MODE_INFINITY);
+       mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "0");
+       mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "0");
+   }
+
+    mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, DEFAULT_CAMERA_AREA);
+    mParameters.set(QCameraParameters::KEY_METERING_AREAS, DEFAULT_CAMERA_AREA);
+
+    //Set Flash
+    if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE)) {
+        mParameters.set(QCameraParameters::KEY_FLASH_MODE,
+                        QCameraParameters::FLASH_MODE_OFF);
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_FLASH_MODES,
+                        mFlashValues);
+    }
+
+    //Set Sharpness
+    mParameters.set(QCameraParameters::KEY_MAX_SHARPNESS,
+            CAMERA_MAX_SHARPNESS);
+    mParameters.set(QCameraParameters::KEY_SHARPNESS,
+                    CAMERA_DEF_SHARPNESS);
+
+    //Set Contrast
+    mParameters.set(QCameraParameters::KEY_MAX_CONTRAST,
+            CAMERA_MAX_CONTRAST);
+    mParameters.set(QCameraParameters::KEY_CONTRAST,
+                    CAMERA_DEF_CONTRAST);
+
+    //Set Saturation
+    mParameters.set(QCameraParameters::KEY_MAX_SATURATION,
+            CAMERA_MAX_SATURATION);
+    mParameters.set(QCameraParameters::KEY_SATURATION,
+                    CAMERA_DEF_SATURATION);
+
+    //Set Brightness/luma-adaptaion
+    mParameters.set("luma-adaptation", "3");
+
+    mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT,
+                    QCameraParameters::PIXEL_FORMAT_JPEG);
+
+    //Set Lensshading
+    mParameters.set(QCameraParameters::KEY_LENSSHADE,
+                    QCameraParameters::LENSSHADE_ENABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES,
+                    mLensShadeValues);
+
+    //Set ISO Mode
+    mParameters.set(QCameraParameters::KEY_ISO_MODE,
+                    QCameraParameters::ISO_AUTO);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ISO_MODES,
+                    mIsoValues);
+
+    //Set MCE
+    mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT,
+                    QCameraParameters::MCE_ENABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES,
+                    mMceValues);
+    //Set HFR
+    if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR)) {
+        mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE,
+                    QCameraParameters::VIDEO_HFR_OFF);
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,
+                    mHfrSizeValues.string());
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES,
+                    mHfrValues);
+    } else{
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,"");
+    }
+
+    //Set Histogram
+    mParameters.set(QCameraParameters::KEY_HISTOGRAM,
+                    QCameraParameters::HISTOGRAM_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES,
+                    mHistogramValues);
+
+    //Set SkinTone Enhancement
+    mParameters.set(QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT,
+                    QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE);
+    mParameters.set("skinToneEnhancement", "0");
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES,
+                    mSkinToneEnhancementValues);
+
+    //Set Scene Mode
+    mParameters.set(QCameraParameters::KEY_SCENE_MODE,
+                    QCameraParameters::SCENE_MODE_AUTO);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_MODES,
+                    mSceneModeValues);
+
+    //Set Streaming Textures
+    mParameters.set("strtextures", "OFF");
+
+    //Set Denoise
+    mParameters.set(QCameraParameters::KEY_DENOISE,
+                    QCameraParameters::DENOISE_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_DENOISE,
+                        denoise_value);
+    //Set Touch AF/AEC
+    mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC,
+                    QCameraParameters::TOUCH_AF_AEC_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC,
+                    mTouchAfAecValues);
+    mParameters.set("touchAfAec-dx","100");
+    mParameters.set("touchAfAec-dy","100");
+
+    //Set Scene Detection
+    mParameters.set(QCameraParameters::KEY_SCENE_DETECT,
+                   QCameraParameters::SCENE_DETECT_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_DETECT,
+                    mSceneDetectValues);
+
+    //Set Selectable Zone AF
+    mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF,
+                    QCameraParameters::SELECTABLE_ZONE_AF_AUTO);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF,
+                    mSelectableZoneAfValues);
+
+    //Set Face Detection
+    mParameters.set(QCameraParameters::KEY_FACE_DETECTION,
+                    QCameraParameters::FACE_DETECTION_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_FACE_DETECTION,
+                    mFaceDetectionValues);
+
+    //Set Red Eye Reduction
+    mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION,
+                    QCameraParameters::REDEYE_REDUCTION_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION,
+                    mRedeyeReductionValues);
+
+    //Set ZSL
+    mParameters.set(QCameraParameters::KEY_ZSL,
+                    QCameraParameters::ZSL_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ZSL_MODES,
+                    mZslValues);
+
+    //Set Focal length, horizontal and vertical view angles
+    float focalLength = 0.0f;
+    float horizontalViewAngle = 0.0f;
+    float verticalViewAngle = 0.0f;
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCAL_LENGTH,
+            (void *)&focalLength);
+    mParameters.setFloat(QCameraParameters::KEY_FOCAL_LENGTH,
+                    focalLength);
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+            (void *)&horizontalViewAngle);
+    mParameters.setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+                    horizontalViewAngle);
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE,
+            (void *)&verticalViewAngle);
+    mParameters.setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+                    verticalViewAngle);
+
+    //Set Exposure Compensation
+    mParameters.set(
+            QCameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR);
+    mParameters.set(
+            QCameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR);
+    mParameters.set(
+            QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR);
+    mParameters.setFloat(
+            QCameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
+            EXPOSURE_COMPENSATION_STEP);
+
+    mParameters.set("num-snaps-per-shutter", 1);
+
+    mParameters.set("capture-burst-captures-values", getZSLQueueDepth());
+    mParameters.set("capture-burst-interval-supported", "true");
+    mParameters.set("capture-burst-interval-max", BURST_INTREVAL_MAX); /*skip frames*/
+    mParameters.set("capture-burst-interval-min", BURST_INTREVAL_MIN); /*skip frames*/
+    mParameters.set("capture-burst-interval", BURST_INTREVAL_DEFAULT); /*skip frames*/
+    mParameters.set("capture-burst-retroactive", 0);
+    mParameters.set("capture-burst-retroactive-max", getZSLQueueDepth());
+    mParameters.set("capture-burst-exposures", "");
+    mParameters.set("capture-burst-exposures-values",
+      "-12,-11,-10,-9,-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10,11,12");
+    {
+      String8 CamModeStr;
+      char buffer[32];
+      int flag = 0;
+
+      for (int i = 0; i < HAL_CAM_MODE_MAX; i++) {
+        if ( 0 ) { /*exclude some conflicting case*/
+        } else {
+          if (flag == 0) { /*first item*/
+          snprintf(buffer, sizeof(buffer), "%d", i);
+          } else {
+            snprintf(buffer, sizeof(buffer), ",%d", i);
+          }
+          flag = 1;
+          CamModeStr.append(buffer);
+        }
+      }
+      mParameters.set("camera-mode-values", CamModeStr);
+    }
+
+    mParameters.set("ae-bracket-hdr-values",
+      create_values_str(hdr_bracket, sizeof(hdr_bracket)/sizeof(str_map) ));
+
+// if(mIs3DModeOn)
+//     mParameters.set("3d-frame-format", "left-right");
+    mParameters.set("no-display-mode", 0);
+    //mUseOverlay = useOverlay();
+    mParameters.set("zoom", 0);
+
+    int mNuberOfVFEOutputs;
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mNuberOfVFEOutputs);
+    if(ret != MM_CAMERA_OK) {
+        ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE  failed");
+        ret = BAD_VALUE;
+    }
+    if(mNuberOfVFEOutputs == 1)
+    {
+       mParameters.set(QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "true");
+    } else {
+       mParameters.set(QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "false");
+    }
+
+    if (setParameters(mParameters) != NO_ERROR) {
+        ALOGE("Failed to set default parameters?!");
+    }  
+    mInitialized = true;
+    strTexturesOn = false;
+
+    ALOGI("%s: X", __func__);
+    return;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+
+int QCameraHardwareInterface::setParameters(const char *parms)
+{
+    QCameraParameters param;
+    String8 str = String8(parms);
+    param.unflatten(str);
+    status_t ret = setParameters(param);
+	if(ret == NO_ERROR)
+		return 0;
+	else
+		return -1;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported. */
+status_t QCameraHardwareInterface::setParameters(const QCameraParameters& params)
+{
+    status_t ret = NO_ERROR;
+
+    ALOGI("%s: E", __func__);
+//    Mutex::Autolock l(&mLock);
+    status_t rc, final_rc = NO_ERROR;
+
+    if ((rc = setCameraMode(params)))                   final_rc = rc;
+    if ((rc = setPowerMode(params)))                    final_rc = rc;
+    if ((rc = setPreviewSize(params)))                  final_rc = rc;
+    if ((rc = setVideoSize(params)))                    final_rc = rc;
+    if ((rc = setPictureSize(params)))                  final_rc = rc;
+    if ((rc = setJpegThumbnailSize(params)))            final_rc = rc;
+    if ((rc = setJpegQuality(params)))                  final_rc = rc;
+    if ((rc = setEffect(params)))                       final_rc = rc;
+    if ((rc = setGpsLocation(params)))                  final_rc = rc;
+    if ((rc = setRotation(params)))                     final_rc = rc;
+    if ((rc = setZoom(params)))                         final_rc = rc;
+    if ((rc = setOrientation(params)))                  final_rc = rc;
+    if ((rc = setLensshadeValue(params)))               final_rc = rc;
+    if ((rc = setMCEValue(params)))                     final_rc = rc;
+    if ((rc = setPictureFormat(params)))                final_rc = rc;
+    if ((rc = setSharpness(params)))                    final_rc = rc;
+    if ((rc = setSaturation(params)))                   final_rc = rc;
+    if ((rc = setSceneMode(params)))                    final_rc = rc;
+    if ((rc = setContrast(params)))                     final_rc = rc;
+    if ((rc = setFaceDetect(params)))                   final_rc = rc;
+    if ((rc = setStrTextures(params)))                  final_rc = rc;
+    if ((rc = setPreviewFormat(params)))                final_rc = rc;
+    if ((rc = setSkinToneEnhancement(params)))          final_rc = rc;
+    if ((rc = setWaveletDenoise(params)))               final_rc = rc;
+    if ((rc = setAntibanding(params)))                  final_rc = rc;
+    //    if ((rc = setOverlayFormats(params)))         final_rc = rc;
+    if ((rc = setRedeyeReduction(params)))              final_rc = rc;
+    if ((rc = setCaptureBurstExp()))                    final_rc = rc;
+
+    const char *str_val = params.get("capture-burst-exposures");
+    if ( str_val == NULL || strlen(str_val)==0 ) {
+        char burst_exp[PROPERTY_VALUE_MAX];
+        memset(burst_exp, 0, sizeof(burst_exp));
+        property_get("persist.capture.burst.exposures", burst_exp, "");
+        if ( strlen(burst_exp)>0 ) {
+            mParameters.set("capture-burst-exposures", burst_exp);
+        }
+    } else {
+      mParameters.set("capture-burst-exposures", str_val);
+    }
+    mParameters.set("num-snaps-per-shutter", params.get("num-snaps-per-shutter"));
+
+    if ((rc = setAEBracket(params)))              final_rc = rc;
+    //    if ((rc = setDenoise(params)))                final_rc = rc;
+    if ((rc = setPreviewFpsRange(params)))              final_rc = rc;
+    if((rc = setRecordingHint(params)))                 final_rc = rc;
+    if ((rc = setNumOfSnapshot(params)))                final_rc = rc;
+    if ((rc = setAecAwbLock(params)))                   final_rc = rc;
+
+    const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+    int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+    if((value != NOT_FOUND) && (value == CAMERA_BESTSHOT_OFF )) {
+        //if ((rc = setPreviewFrameRateMode(params)))     final_rc = rc;
+        if ((rc = setPreviewFrameRate(params)))         final_rc = rc;
+        if ((rc = setAutoExposure(params)))             final_rc = rc;
+        if ((rc = setExposureCompensation(params)))     final_rc = rc;
+        if ((rc = setWhiteBalance(params)))             final_rc = rc;
+        if ((rc = setFlash(params)))                    final_rc = rc;
+        if ((rc = setFocusMode(params)))                final_rc = rc;
+        if ((rc = setBrightness(params)))               final_rc = rc;
+        if ((rc = setISOValue(params)))                 final_rc = rc;
+        if ((rc = setFocusAreas(params)))               final_rc = rc;
+        if ((rc = setMeteringAreas(params)))            final_rc = rc;
+    }
+    //selectableZoneAF needs to be invoked after continuous AF
+    if ((rc = setSelectableZoneAf(params)))             final_rc = rc;
+    // setHighFrameRate needs to be done at end, as there can
+    // be a preview restart, and need to use the updated parameters
+    if ((rc = setHighFrameRate(params)))  final_rc = rc;
+    if ((rc = setZSLBurstLookBack(params))) final_rc = rc;
+    if ((rc = setZSLBurstInterval(params))) final_rc = rc;
+    if ((rc = setNoDisplayMode(params))) final_rc = rc;
+    
+    //Update Exiftag values.
+    setExifTags();
+
+   ALOGI("%s: X", __func__);
+   return final_rc;
+}
+
+/** Retrieve the camera parameters.  The buffer returned by the camera HAL
+	must be returned back to it with put_parameters, if put_parameters
+	is not NULL.
+ */
+int QCameraHardwareInterface::getParameters(char **parms)
+{
+    char* rc = NULL;
+    String8 str;
+    QCameraParameters param = getParameters();
+    //param.dump();
+    str = param.flatten( );
+    rc = (char *)malloc(sizeof(char)*(str.length()+1));
+    if(rc != NULL){
+        memset(rc, 0, sizeof(char)*(str.length()+1));
+        strncpy(rc, str.string(), str.length());
+	rc[str.length()] = 0;
+	*parms = rc;
+    }
+    return 0;
+}
+
+/** The camera HAL uses its own memory to pass us the parameters when we
+	call get_parameters.  Use this function to return the memory back to
+	the camera HAL, if put_parameters is not NULL.  If put_parameters
+	is NULL, then you have to use free() to release the memory.
+*/
+void QCameraHardwareInterface::putParameters(char *rc)
+{
+    free(rc);
+    rc = NULL;
+}
+
+QCameraParameters& QCameraHardwareInterface::getParameters()
+{
+    Mutex::Autolock lock(mLock);
+    mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+    return mParameters;
+}
+
+status_t QCameraHardwareInterface::runFaceDetection()
+{
+    bool ret = true;
+
+    const char *str = mParameters.get(QCameraParameters::KEY_FACE_DETECTION);
+    if (str != NULL) {
+        int value = attr_lookup(facedetection,
+                sizeof(facedetection) / sizeof(str_map), str);
+#if 0
+        mMetaDataWaitLock.lock();
+        if (value == true) {
+            if(mMetaDataHeap != NULL)
+                mMetaDataHeap.clear();
+
+            mMetaDataHeap =
+                new AshmemPool((sizeof(int)*(MAX_ROI*4+1)),
+                        1,
+                        (sizeof(int)*(MAX_ROI*4+1)),
+                        "metadata");
+            if (!mMetaDataHeap->initialized()) {
+                ALOGE("Meta Data Heap allocation failed ");
+                mMetaDataHeap.clear();
+                ALOGE("runFaceDetection X: error initializing mMetaDataHeap");
+                mMetaDataWaitLock.unlock();
+                return UNKNOWN_ERROR;
+            }
+            mSendMetaData = true;
+        } else {
+            if(mMetaDataHeap != NULL)
+                mMetaDataHeap.clear();
+        }
+        mMetaDataWaitLock.unlock();
+#endif
+        cam_ctrl_dimension_t dim;
+        cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+        preview_parm_config (&dim, mParameters);
+        ALOGE("%s: why set_dimension everytime?", __func__);
+        ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+        ret = native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSharpness(const QCameraParameters& params)
+{
+    bool ret = false;
+    int rc = MM_CAMERA_OK;
+    ALOGE("%s",__func__);
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SHARPNESS);
+    if(!rc) {
+        ALOGE("%s:CONTRAST not supported", __func__);
+        return NO_ERROR;
+    }
+    int sharpness = params.getInt(QCameraParameters::KEY_SHARPNESS);
+    if((sharpness < CAMERA_MIN_SHARPNESS
+            || sharpness > CAMERA_MAX_SHARPNESS))
+        return UNKNOWN_ERROR;
+
+    ALOGV("setting sharpness %d", sharpness);
+    mParameters.set(QCameraParameters::KEY_SHARPNESS, sharpness);
+    ret = native_set_parms(MM_CAMERA_PARM_SHARPNESS, sizeof(sharpness),
+                               (void *)&sharpness);
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setSaturation(const QCameraParameters& params)
+{
+    bool ret = false;
+    int rc = MM_CAMERA_OK;
+    ALOGE("%s",__func__);
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SATURATION);
+    if(!rc) {
+        ALOGE("%s:MM_CAMERA_PARM_SATURATION not supported", __func__);
+        return NO_ERROR;
+    }
+    int result;
+    int saturation = params.getInt(QCameraParameters::KEY_SATURATION);
+
+    if((saturation < CAMERA_MIN_SATURATION)
+        || (saturation > CAMERA_MAX_SATURATION))
+    return UNKNOWN_ERROR;
+
+    ALOGV("Setting saturation %d", saturation);
+    mParameters.set(QCameraParameters::KEY_SATURATION, saturation);
+    ret = native_set_parms(MM_CAMERA_PARM_SATURATION, sizeof(saturation),
+        (void *)&saturation, (int *)&result);
+    if(result != MM_CAMERA_OK)
+        ALOGI("Saturation Value: %d is not set as the selected value is not supported", saturation);
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setContrast(const QCameraParameters& params)
+{
+   ALOGE("%s E", __func__ );
+   int rc = MM_CAMERA_OK;
+   rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_CONTRAST);
+   if(!rc) {
+        ALOGE("%s:CONTRAST not supported", __func__);
+        return NO_ERROR;
+    }
+   const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+   ALOGE("Contrast : %s",str);
+   int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+   if(value == CAMERA_BESTSHOT_OFF) {
+        int contrast = params.getInt(QCameraParameters::KEY_CONTRAST);
+        if((contrast < CAMERA_MIN_CONTRAST)
+                || (contrast > CAMERA_MAX_CONTRAST))
+        {
+            ALOGE("Contrast Value not matching");
+            return UNKNOWN_ERROR;
+        }
+        ALOGV("setting contrast %d", contrast);
+        mParameters.set(QCameraParameters::KEY_CONTRAST, contrast);
+        ALOGE("Calling Contrast set on Lower layer");
+        bool ret = native_set_parms(MM_CAMERA_PARM_CONTRAST, sizeof(contrast),
+                                   (void *)&contrast);
+        ALOGE("Lower layer returned %d", ret);
+        int bestshot_reconfigure;
+        cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+                            &bestshot_reconfigure);
+        if(bestshot_reconfigure) {
+             if (mContrast != contrast) {
+                  mContrast = contrast;
+                 if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+                      mRestartPreview = 1;
+                      pausePreviewForZSL();
+                  }
+             }
+        }
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    } else {
+          ALOGI(" Contrast value will not be set " \
+          "when the scenemode selected is %s", str);
+          return NO_ERROR;
+    }
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneDetect(const QCameraParameters& params)
+{
+    ALOGE("%s",__func__);
+    bool retParm;
+    int rc = MM_CAMERA_OK;
+
+    rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+    if(!rc) {
+        ALOGE("%s:MM_CAMERA_PARM_ASD_ENABLE not supported", __func__);
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_SCENE_DETECT);
+    ALOGE("Scene Detect string : %s",str);
+    if (str != NULL) {
+        int32_t value = attr_lookup(scenedetect, sizeof(scenedetect) / sizeof(str_map), str);
+        ALOGE("Scenedetect Value : %d",value);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_SCENE_DETECT, str);
+
+            retParm = native_set_parms(MM_CAMERA_PARM_ASD_ENABLE, sizeof(value),
+                                       (void *)&value);
+
+            return retParm ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+   return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setZoom(const QCameraParameters& params)
+{
+    status_t rc = NO_ERROR;
+
+    ALOGE("%s: E",__func__);
+
+
+    if( !( cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ZOOM))) {
+        ALOGE("%s:MM_CAMERA_PARM_ZOOM not supported", __func__);
+        return NO_ERROR;
+    }
+    // No matter how many different zoom values the driver can provide, HAL
+    // provides applictations the same number of zoom levels. The maximum driver
+    // zoom value depends on sensor output (VFE input) and preview size (VFE
+    // output) because VFE can only crop and cannot upscale. If the preview size
+    // is bigger, the maximum zoom ratio is smaller. However, we want the
+    // zoom ratio of each zoom level is always the same whatever the preview
+    // size is. Ex: zoom level 1 is always 1.2x, zoom level 2 is 1.44x, etc. So,
+    // we need to have a fixed maximum zoom value and do read it from the
+    // driver.
+    static const int ZOOM_STEP = 1;
+    int32_t zoom_level = params.getInt("zoom");
+    if(zoom_level >= 0 && zoom_level <= mMaxZoom-1) {
+        mParameters.set("zoom", zoom_level);
+        int32_t zoom_value = ZOOM_STEP * zoom_level;
+        bool ret = native_set_parms(MM_CAMERA_PARM_ZOOM,
+            sizeof(zoom_value), (void *)&zoom_value);
+        if(ret) {
+            mCurrentZoom=zoom_level;
+        }
+        rc = ret ? NO_ERROR : UNKNOWN_ERROR;
+    } else {
+        rc = BAD_VALUE;
+    }
+    ALOGE("%s X",__func__);
+    return rc;
+
+}
+
+status_t  QCameraHardwareInterface::setISOValue(const QCameraParameters& params) {
+
+    status_t rc = NO_ERROR;
+    ALOGE("%s",__func__);
+
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ISO);
+    if(!rc) {
+        ALOGE("%s:MM_CAMERA_PARM_ISO not supported", __func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_ISO_MODE);
+    ALOGE("ISO string : %s",str);
+    int8_t temp_hjr;
+    if (str != NULL) {
+        int value = (camera_iso_mode_type)attr_lookup(
+          iso, sizeof(iso) / sizeof(str_map), str);
+        ALOGE("ISO Value : %d",value);
+        if (value != NOT_FOUND) {
+            camera_iso_mode_type temp = (camera_iso_mode_type) value;
+            if (value == CAMERA_ISO_DEBLUR) {
+               temp_hjr = true;
+               native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+               mHJR = value;
+            }
+            else {
+               if (mHJR == CAMERA_ISO_DEBLUR) {
+                   temp_hjr = false;
+                   native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+                   mHJR = value;
+               }
+            }
+
+            mParameters.set(QCameraParameters::KEY_ISO_MODE, str);
+            native_set_parms(MM_CAMERA_PARM_ISO, sizeof(camera_iso_mode_type), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::updateFocusDistances()
+{
+    ALOGV("%s: IN", __FUNCTION__);
+    focus_distances_info_t focusDistances;
+    if(cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCUS_DISTANCES,
+      &focusDistances) == MM_CAMERA_OK) {
+        String8 str;
+        char buffer[32] = {0};
+        //set all distances to infinity if focus mode is infinity
+        if(mFocusMode == AF_MODE_INFINITY) {
+            snprintf(buffer, sizeof(buffer), "Infinity,");
+            str.append(buffer);
+            snprintf(buffer, sizeof(buffer), "Infinity,");
+            str.append(buffer);
+            snprintf(buffer, sizeof(buffer), "Infinity");
+            str.append(buffer);
+        } else {
+            snprintf(buffer, sizeof(buffer), "%f", focusDistances.focus_distance[0]);
+            str.append(buffer);
+            snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[1]);
+            str.append(buffer);
+            snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[2]);
+            str.append(buffer);
+        }
+        ALOGE("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+        mFocusDistance = str;
+        return NO_ERROR;
+    }
+    ALOGE("%s: get CAMERA_PARM_FOCUS_DISTANCES failed!!!", __FUNCTION__);
+    return BAD_VALUE;
+}
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if(num == NULL) {
+        ALOGE("Invalid output array (num == NULL)");
+        return -1;
+    }
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+        return -1;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return 0;
+}
+
+// parse string like "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+static int parseCameraAreaString(const char* str, int max_num_areas,
+                                 camera_area_t *pAreas, int *num_areas_found)
+{
+    char area_str[32];
+    const char *start, *end, *p;
+    start = str; end = NULL;
+    int values[5], index=0;
+    *num_areas_found = 0;
+
+    while(start != NULL) {
+       if(*start != '(') {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return -1;
+       }
+       end = strchr(start, ')');
+       if(end == NULL) {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return -1;
+       }
+       int i;
+       for (i=0,p=start; p<=end; p++, i++) {
+           area_str[i] = *p;
+       }
+       area_str[i] = '\0';
+       if(parseNDimVector(area_str, values, 5) < 0){
+            ALOGE("%s: error: Failed to parse the area string: %s", __func__, area_str);
+            return -1;
+       }
+       // no more areas than max_num_areas are accepted.
+       if(index >= max_num_areas) {
+            ALOGE("%s: error: too many areas specified %s", __func__, str);
+            return -1;
+       }
+       pAreas[index].x1 = values[0];
+       pAreas[index].y1 = values[1];
+       pAreas[index].x2 = values[2];
+       pAreas[index].y2 = values[3];
+       pAreas[index].weight = values[4];
+
+       index++;
+       start = strchr(end, '('); // serach for next '('
+    }
+    (*num_areas_found) = index;
+    return 0;
+}
+static bool validateCameraAreas(camera_area_t *areas, int num_areas)
+{
+    for(int i=0; i<num_areas; i++) {
+
+        // handle special case (0, 0, 0, 0, 0)
+        if((areas[i].x1 == 0) && (areas[i].y1 == 0)
+            && (areas[i].x2 == 0) && (areas[i].y2 == 0) && (areas[i].weight == 0)) {
+            continue;
+        }
+        if(areas[i].x1 < -1000) return false;               // left should be >= -1000
+        if(areas[i].y1 < -1000) return false;               // top  should be >= -1000
+        if(areas[i].x2 > 1000) return false;                // right  should be <= 1000
+        if(areas[i].y2 > 1000) return false;                // bottom should be <= 1000
+        if(areas[i].weight <= 0 || areas[i].weight > 1000)  // weight should be in [1, 1000]
+            return false;
+        if(areas[i].x1 >= areas[i].x2) {                    // left should be < right
+            return false;
+        }
+        if(areas[i].y1 >= areas[i].y2)                      // top should be < bottom
+            return false;
+    }
+    return true;
+}
+
+status_t QCameraHardwareInterface::setFocusAreas(const QCameraParameters& params)
+{
+    ALOGE("%s: E", __func__);
+    status_t rc;
+    int max_num_af_areas = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+    if(max_num_af_areas == 0) {
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_FOCUS_AREAS);
+    if (str == NULL) {
+        ALOGE("%s: Parameter string is null", __func__);
+        rc = NO_ERROR;
+    } else {
+        camera_area_t *areas = new camera_area_t[max_num_af_areas];
+        int num_areas_found=0;
+        if(parseCameraAreaString(str, max_num_af_areas, areas, &num_areas_found) < 0) {
+            ALOGE("%s: Failed to parse the string: %s", __func__, str);
+            delete areas;
+            return BAD_VALUE;
+        }
+        for(int i=0; i<num_areas_found; i++) {
+            ALOGD("FocusArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+                        (areas[i].x2), (areas[i].y2), (areas[i].weight));
+        }
+        if(validateCameraAreas(areas, num_areas_found) == false) {
+            ALOGE("%s: invalid areas specified : %s", __func__, str);
+            delete areas;
+            return BAD_VALUE;
+        }
+        mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, str);
+        num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+        //if the native_set_parms is called when preview is not started, it
+        //crashes in lower layer, so return of preview is not started
+        if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+            delete areas;
+            return NO_ERROR;
+        }
+
+        //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+        //so no action is takenby the lower layer
+        if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+            && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+            num_areas_found = 0;
+        }
+#if 1 //temp solution
+
+        roi_info_t af_roi_value;
+        memset(&af_roi_value, 0, sizeof(roi_info_t));
+        uint16_t x1, x2, y1, y2, dx, dy;
+        int previewWidth, previewHeight;
+        this->getPreviewSize(&previewWidth, &previewHeight);
+        //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+        x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+        y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+        x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+        y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+        dx = x2 - x1;
+        dy = y2 - y1;
+
+        af_roi_value.num_roi = num_areas_found;
+        af_roi_value.roi[0].x = x1;
+        af_roi_value.roi[0].y = y1;
+        af_roi_value.roi[0].dx = dx;
+        af_roi_value.roi[0].dy = dy;
+        af_roi_value.is_multiwindow = 0;
+        if (native_set_parms(MM_CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value))
+            rc = NO_ERROR;
+        else
+            rc = BAD_VALUE;
+        delete areas;
+#endif
+#if 0   //better solution with multi-roi, to be enabled later
+        af_mtr_area_t afArea;
+        afArea.num_area = num_areas_found;
+
+        uint16_t x1, x2, y1, y2, dx, dy;
+        int previewWidth, previewHeight;
+        this->getPreviewSize(&previewWidth, &previewHeight);
+
+        for(int i=0; i<num_areas_found; i++) {
+            //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+            x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+            y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+            x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+            y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+            dx = x2 - x1;
+            dy = y2 - y1;
+            afArea.mtr_area[i].x = x1;
+            afArea.mtr_area[i].y = y1;
+            afArea.mtr_area[i].dx = dx;
+            afArea.mtr_area[i].dy = dy;
+            afArea.weight[i] = areas[i].weight;
+        }
+
+        if(native_set_parms(MM_CAMERA_PARM_AF_MTR_AREA, sizeof(af_mtr_area_t), (void*)&afArea))
+            rc = NO_ERROR;
+        else
+            rc = BAD_VALUE;*/
+#endif
+    }
+    ALOGE("%s: X", __func__);
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setMeteringAreas(const QCameraParameters& params)
+{
+    ALOGE("%s: E", __func__);
+    status_t rc;
+    int max_num_mtr_areas = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_METERING_AREAS);
+    if(max_num_mtr_areas == 0) {
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_METERING_AREAS);
+    if (str == NULL) {
+        ALOGE("%s: Parameter string is null", __func__);
+        rc = NO_ERROR;
+    } else {
+        camera_area_t *areas = new camera_area_t[max_num_mtr_areas];
+        int num_areas_found=0;
+        if(parseCameraAreaString(str, max_num_mtr_areas, areas, &num_areas_found) < 0) {
+            ALOGE("%s: Failed to parse the string: %s", __func__, str);
+            delete areas;
+            return BAD_VALUE;
+        }
+        for(int i=0; i<num_areas_found; i++) {
+            ALOGD("MeteringArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+                        (areas[i].x2), (areas[i].y2), (areas[i].weight));
+        }
+        if(validateCameraAreas(areas, num_areas_found) == false) {
+            ALOGE("%s: invalid areas specified : %s", __func__, str);
+            delete areas;
+            return BAD_VALUE;
+        }
+        mParameters.set(QCameraParameters::KEY_METERING_AREAS, str);
+
+        //if the native_set_parms is called when preview is not started, it
+        //crashes in lower layer, so return of preview is not started
+        if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+            delete areas;
+            return NO_ERROR;
+        }
+
+        num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+        //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+        //so no action is takenby the lower layer
+        if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+             && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+            num_areas_found = 0;
+        }
+#if 1
+        cam_set_aec_roi_t aec_roi_value;
+        uint16_t x1, x2, y1, y2;
+        int previewWidth, previewHeight;
+        this->getPreviewSize(&previewWidth, &previewHeight);
+        //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+        x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+        y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+        x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+        y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+        delete areas;
+
+        if(num_areas_found == 1) {
+            aec_roi_value.aec_roi_enable = AEC_ROI_ON;
+            aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+            aec_roi_value.aec_roi_position.coordinate.x = (x1+x2)/2;
+            aec_roi_value.aec_roi_position.coordinate.y = (y1+y2)/2;
+        } else {
+            aec_roi_value.aec_roi_enable = AEC_ROI_OFF;
+            aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+            aec_roi_value.aec_roi_position.coordinate.x = DONT_CARE_COORDINATE;
+            aec_roi_value.aec_roi_position.coordinate.y = DONT_CARE_COORDINATE;
+        }
+
+        if(native_set_parms(MM_CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value))
+            rc = NO_ERROR;
+        else
+            rc = BAD_VALUE;
+#endif
+#if 0   //solution including multi-roi, to be enabled later
+        aec_mtr_area_t aecArea;
+        aecArea.num_area = num_areas_found;
+
+        uint16_t x1, x2, y1, y2, dx, dy;
+        int previewWidth, previewHeight;
+        this->getPreviewSize(&previewWidth, &previewHeight);
+
+        for(int i=0; i<num_areas_found; i++) {
+            //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+            x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+            y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+            x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+            y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+            dx = x2 - x1;
+            dy = y2 - y1;
+            aecArea.mtr_area[i].x = x1;
+            aecArea.mtr_area[i].y = y1;
+            aecArea.mtr_area[i].dx = dx;
+            aecArea.mtr_area[i].dy = dy;
+            aecArea.weight[i] = areas[i].weight;
+        }
+        delete areas;
+
+        if(native_set_parms(MM_CAMERA_PARM_AEC_MTR_AREA, sizeof(aec_mtr_area_t), (void*)&aecArea))
+            rc = NO_ERROR;
+        else
+            rc = BAD_VALUE;
+#endif
+    }
+    ALOGE("%s: X", __func__);
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setFocusMode(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_FOCUS_MODE);
+    const char *prev_str = mParameters.get(QCameraParameters::KEY_FOCUS_MODE);
+    ALOGE("%s",__func__);
+    if (str != NULL) {
+        ALOGE("Focus mode %s",str);
+        int32_t value = attr_lookup(focus_modes,
+                                    sizeof(focus_modes) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_FOCUS_MODE, str);
+            mFocusMode = value;
+
+            if(updateFocusDistances() != NO_ERROR) {
+               ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, str);
+               return UNKNOWN_ERROR;
+            }
+            mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+            if(mHasAutoFocusSupport){
+                bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_MODE,
+                                      sizeof(value),
+                                      (void *)&value);
+
+                int cafSupport = FALSE;
+                if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ||
+                   !strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)){
+                    cafSupport = TRUE;
+                }
+                ALOGE("Continuous Auto Focus %d", cafSupport);
+                ret = native_set_parms(MM_CAMERA_PARM_CONTINUOUS_AF, sizeof(cafSupport),
+                                       (void *)&cafSupport);
+            }
+
+            return NO_ERROR;
+        }
+        ALOGE("%s:Could not look up str value",__func__);
+    }
+    ALOGE("Invalid focus mode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneMode(const QCameraParameters& params)
+{
+    status_t rc = NO_ERROR;
+    ALOGE("%s",__func__);
+
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BESTSHOT_MODE);
+    if(!rc) {
+        ALOGE("%s:Parameter Scenemode is not supported for this sensor", __func__);
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+    ALOGE("Scene Mode string : %s",str);
+
+    if (str != NULL) {
+        int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+        ALOGE("Setting Scenemode value = %d",value );
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_SCENE_MODE, str);
+            bool ret = native_set_parms(MM_CAMERA_PARM_BESTSHOT_MODE, sizeof(value),
+                                       (void *)&value);
+            int bestshot_reconfigure;
+            cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+                                &bestshot_reconfigure);
+            if(bestshot_reconfigure) {
+                if (mBestShotMode != value) {
+                     mBestShotMode = value;
+                     if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+                           mRestartPreview = 1;
+                           pausePreviewForZSL();
+                      }
+                 }
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid scenemode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSelectableZoneAf(const QCameraParameters& params)
+{
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    if(mHasAutoFocusSupport) {
+        const char *str = params.get(QCameraParameters::KEY_SELECTABLE_ZONE_AF);
+        if (str != NULL) {
+            int32_t value = attr_lookup(selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map), str);
+            if (value != NOT_FOUND) {
+                 rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FOCUS_RECT);
+                 if(!rc) {
+                    ALOGE("SelectableZoneAF  is not supported for this sensor");
+                    return NO_ERROR;
+                 }else {
+                    mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF, str);
+                    bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_RECT, sizeof(value),
+                            (void *)&value);
+                    return ret ? NO_ERROR : UNKNOWN_ERROR;
+                 }
+            }
+        }
+        ALOGE("Invalid selectable zone af value: %s", (str == NULL) ? "NULL" : str);
+        return BAD_VALUE;
+
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setEffect(const QCameraParameters& params)
+{
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    const char *str = params.get(QCameraParameters::KEY_EFFECT);
+    int result;
+    if (str != NULL) {
+        ALOGE("Setting effect %s",str);
+        int32_t value = attr_lookup(effects, sizeof(effects) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+           rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EFFECT);
+           if(!rc) {
+               ALOGE("Camera Effect - %s mode is not supported for this sensor",str);
+               return NO_ERROR;
+           }else {
+               mParameters.set(QCameraParameters::KEY_EFFECT, str);
+               ALOGE("Setting effect to lower HAL : %d",value);
+               bool ret = native_set_parms(MM_CAMERA_PARM_EFFECT, sizeof(value),
+                                           (void *)&value,(int *)&result);
+                if(result != MM_CAMERA_OK) {
+                    ALOGI("Camera Effect: %s is not set as the selected value is not supported ", str);
+                }
+                int bestshot_reconfigure;
+                cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+                                    &bestshot_reconfigure);
+                if(bestshot_reconfigure) {
+                     if (mEffects != value) {
+                         mEffects = value;
+                         if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+                               mRestartPreview = 1;
+                               pausePreviewForZSL();
+                          }
+                   }
+               }
+               return ret ? NO_ERROR : UNKNOWN_ERROR;
+          }
+        }
+    }
+    ALOGE("Invalid effect value: %s", (str == NULL) ? "NULL" : str);
+    ALOGE("setEffect X");
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setBrightness(const QCameraParameters& params) {
+
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BRIGHTNESS);
+   if(!rc) {
+       ALOGE("MM_CAMERA_PARM_BRIGHTNESS mode is not supported for this sensor");
+       return NO_ERROR;
+   }
+   int brightness = params.getInt("luma-adaptation");
+   if (mBrightness !=  brightness) {
+       ALOGV(" new brightness value : %d ", brightness);
+       mBrightness =  brightness;
+       mParameters.set("luma-adaptation", brightness);
+       bool ret = native_set_parms(MM_CAMERA_PARM_BRIGHTNESS, sizeof(mBrightness),
+                                   (void *)&mBrightness);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+   }
+
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setAutoExposure(const QCameraParameters& params)
+{
+
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE);
+   if(!rc) {
+       ALOGE("MM_CAMERA_PARM_EXPOSURE mode is not supported for this sensor");
+       return NO_ERROR;
+   }
+   const char *str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(autoexposure, sizeof(autoexposure) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE, str);
+            bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE, sizeof(value),
+                                       (void *)&value);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid auto exposure value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setExposureCompensation(
+        const QCameraParameters & params){
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+    if(!rc) {
+       ALOGE("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+       return NO_ERROR;
+    }
+    int numerator = params.getInt(QCameraParameters::KEY_EXPOSURE_COMPENSATION);
+    if(EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR <= numerator &&
+            numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+        int16_t  numerator16 = (int16_t)(numerator & 0x0000ffff);
+        uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+        uint32_t  value = 0;
+        value = numerator16 << 16 | denominator16;
+
+        mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+                            numerator);
+       bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+                                    sizeof(value), (void *)&value);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    ALOGE("Invalid Exposure Compensation");
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setWhiteBalance(const QCameraParameters& params)
+{
+
+     ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WHITE_BALANCE);
+    if(!rc) {
+       ALOGE("MM_CAMERA_PARM_WHITE_BALANCE mode is not supported for this sensor");
+       return NO_ERROR;
+    }
+     int result;
+
+    const char *str = params.get(QCameraParameters::KEY_WHITE_BALANCE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(whitebalance, sizeof(whitebalance) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+            bool ret = native_set_parms(MM_CAMERA_PARM_WHITE_BALANCE, sizeof(value),
+                                       (void *)&value, (int *)&result);
+            if(result != MM_CAMERA_OK) {
+                ALOGI("WhiteBalance Value: %s is not set as the selected value is not supported ", str);
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid whitebalance value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setAntibanding(const QCameraParameters& params)
+{
+    int result;
+
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ANTIBANDING);
+    if(!rc) {
+       ALOGE("ANTIBANDING mode is not supported for this sensor");
+       return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_ANTIBANDING);
+    if (str != NULL) {
+        int value = (camera_antibanding_type)attr_lookup(
+          antibanding, sizeof(antibanding) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            camera_antibanding_type temp = (camera_antibanding_type) value;
+            ALOGE("Antibanding Value : %d",value);
+            mParameters.set(QCameraParameters::KEY_ANTIBANDING, str);
+            bool ret = native_set_parms(MM_CAMERA_PARM_ANTIBANDING,
+                       sizeof(camera_antibanding_type), (void *)&value ,(int *)&result);
+            if(result != MM_CAMERA_OK) {
+                ALOGI("AntiBanding Value: %s is not supported for the given BestShot Mode", str);
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid antibanding value: %s", (str == NULL) ? "NULL" : str);
+
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRate(const QCameraParameters& params)
+{
+    ALOGE("%s: E",__func__);
+    status_t rc = NO_ERROR;
+    uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+    ALOGV("%s: requested preview frame rate  is %d", __func__, fps);
+
+    mParameters.setPreviewFrameRate(fps);
+    ALOGE("%s: X",__func__);
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRateMode(const QCameraParameters& params) {
+
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS);
+    if(!rc) {
+       ALOGE(" CAMERA FPS mode is not supported for this sensor");
+       return NO_ERROR;
+    }
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE);
+    if(!rc) {
+       ALOGE("CAMERA FPS MODE mode is not supported for this sensor");
+       return NO_ERROR;
+    }
+
+    const char *previousMode = mParameters.getPreviewFrameRateMode();
+    const char *str = params.getPreviewFrameRateMode();
+    if (NULL == previousMode) {
+        ALOGE("Preview Frame Rate Mode is NULL\n");
+        return NO_ERROR;
+    }
+    if (NULL == str) {
+        ALOGE("Preview Frame Rate Mode is NULL\n");
+        return NO_ERROR;
+    }
+    if( mInitialized && !strcmp(previousMode, str)) {
+        ALOGE("frame rate mode same as previous mode %s", previousMode);
+        return NO_ERROR;
+    }
+    int32_t frameRateMode = attr_lookup(frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map),str);
+    if(frameRateMode != NOT_FOUND) {
+        ALOGV("setPreviewFrameRateMode: %s ", str);
+        mParameters.setPreviewFrameRateMode(str);
+        bool ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(frameRateMode), (void *)&frameRateMode);
+        if(!ret) return ret;
+        //set the fps value when chaging modes
+        int16_t fps = (uint16_t)params.getPreviewFrameRate();
+        if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+            mParameters.setPreviewFrameRate(fps);
+            ret = native_set_parms(MM_CAMERA_PARM_FPS,
+                                        sizeof(fps), (void *)&fps);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+        ALOGE("Invalid preview frame rate value: %d", fps);
+        return BAD_VALUE;
+    }
+    ALOGE("Invalid preview frame rate mode value: %s", (str == NULL) ? "NULL" : str);
+
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSkinToneEnhancement(const QCameraParameters& params) {
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SCE_FACTOR);
+    if(!rc) {
+       ALOGE("SkinToneEnhancement is not supported for this sensor");
+       return NO_ERROR;
+    }
+     int skinToneValue = params.getInt("skinToneEnhancement");
+     if (mSkinToneEnhancement != skinToneValue) {
+          ALOGV(" new skinTone correction value : %d ", skinToneValue);
+          mSkinToneEnhancement = skinToneValue;
+          mParameters.set("skinToneEnhancement", skinToneValue);
+          bool ret = native_set_parms(MM_CAMERA_PARM_SCE_FACTOR, sizeof(mSkinToneEnhancement),
+                        (void *)&mSkinToneEnhancement);
+          return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setWaveletDenoise(const QCameraParameters& params) {
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WAVELET_DENOISE);
+    if(rc != MM_CAMERA_PARM_SUPPORT_SET) {
+        ALOGE("Wavelet Denoise is not supported for this sensor");
+        /* TO DO */
+//        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_DENOISE);
+    if (str != NULL) {
+        int value = attr_lookup(denoise,
+                sizeof(denoise) / sizeof(str_map), str);
+        if ((value != NOT_FOUND) &&  (mDenoiseValue != value)) {
+            mDenoiseValue =  value;
+            mParameters.set(QCameraParameters::KEY_DENOISE, str);
+
+            char prop[PROPERTY_VALUE_MAX];
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.denoise.process.plates", prop, "0");
+
+            denoise_param_t temp;
+            memset(&temp, 0, sizeof(denoise_param_t));
+            temp.denoise_enable = value;
+            temp.process_plates = atoi(prop);
+            ALOGE("Denoise enable=%d, plates=%d", temp.denoise_enable, temp.process_plates);
+            bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(temp),
+                    (void *)&temp);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+        return NO_ERROR;
+    }
+    ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setVideoSize(const QCameraParameters& params)
+{
+    const char *str= NULL;
+    const char *str_t= NULL;
+    int old_vid_w = 0, old_vid_h = 0;
+    ALOGE("%s: E", __func__);
+    str = params.get(QCameraParameters::KEY_VIDEO_SIZE);
+    str_t = mParameters.get(CameraParameters::KEY_VIDEO_SIZE);
+    if(!str) {
+        mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+        //If application didn't set this parameter string, use the values from
+        //getPreviewSize() as video dimensions.
+        ALOGE("No Record Size requested, use the preview dimensions");
+        videoWidth = mPreviewWidth;
+        videoHeight = mPreviewHeight;
+    } else {
+        //Extract the record witdh and height that application requested.
+        ALOGI("%s: requested record size %s", __func__, str);
+        if(!parse_size(str, videoWidth, videoHeight)) {
+            parse_size(str_t, old_vid_w, old_vid_h);
+            if(old_vid_w != videoWidth || old_vid_h != videoHeight) {
+                mRestartPreview = true; 
+                ALOGE("%s: Video sizes changes, Restart preview...", __func__, str);
+            }
+            mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, str);
+            //VFE output1 shouldn't be greater than VFE output2.
+            if( (mPreviewWidth > videoWidth) || (mPreviewHeight > videoHeight)) {
+                //Set preview sizes as record sizes.
+                ALOGE("Preview size %dx%d is greater than record size %dx%d,\
+                        resetting preview size to record size",mPreviewWidth,
+                        mPreviewHeight, videoWidth, videoHeight);
+                mPreviewWidth = videoWidth;
+                mPreviewHeight = videoHeight;
+                mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight);
+            }
+
+            if(mIs3DModeOn == true) {
+                /* As preview and video frames are same in 3D mode,
+                 * preview size should be same as video size. This
+                 * cahnge is needed to take of video resolutions
+                 * like 720P and 1080p where the application can
+                 * request different preview sizes like 768x432
+                 */
+                ALOGE("3D mod is on");
+                mPreviewWidth = videoWidth;
+                mPreviewHeight = videoHeight;
+                mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight);
+            }
+        } else {
+            mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+            ALOGE("%s: error :failed to parse parameter record-size (%s)", __func__, str);
+            return BAD_VALUE;
+        }
+    }
+    ALOGE("%s: preview dimensions: %dx%d", __func__, mPreviewWidth, mPreviewHeight);
+    ALOGE("%s: video dimensions: %dx%d", __func__, videoWidth, videoHeight);
+    mDimension.display_width = mPreviewWidth;
+    mDimension.display_height= mPreviewHeight;
+    mDimension.orig_video_width = videoWidth;
+    mDimension.orig_video_height = videoHeight;
+    mDimension.video_width = videoWidth;
+    mDimension.video_height = videoHeight;
+
+    ALOGE("%s: X", __func__);
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCameraMode(const QCameraParameters& params) {
+    int32_t value = params.getInt(QCameraParameters::KEY_CAMERA_MODE);
+    mParameters.set(QCameraParameters::KEY_CAMERA_MODE,value);
+
+    ALOGI("ZSL is enabled  %d", value);
+    if (value == 1) {
+        myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+    } else {
+        myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPowerMode(const QCameraParameters& params) {
+    uint32_t value = NORMAL_POWER;
+    const char *powermode = NULL;
+
+    powermode = params.get(QCameraParameters::KEY_POWER_MODE);
+    if (powermode != NULL) {
+        value = attr_lookup(power_modes,
+                sizeof(power_modes) / sizeof(str_map), powermode);
+        if((value == LOW_POWER) || mHFRLevel > 1) {
+            ALOGI("Enable Low Power Mode");
+            value = LOW_POWER;
+            mPowerMode = value;
+            mParameters.set(QCameraParameters::KEY_POWER_MODE,"Low_Power");
+        } else {
+            ALOGE("Enable Normal Power Mode");
+            mPowerMode = value;
+            mParameters.set(QCameraParameters::KEY_POWER_MODE,"Normal_Power");
+        }
+    }
+
+    ALOGI("%s Low power mode %s value = %d", __func__,
+          value ? "Enabled" : "Disabled", value);
+    native_set_parms(MM_CAMERA_PARM_LOW_POWER_MODE, sizeof(value),
+                                               (void *)&value);
+    return NO_ERROR;
+}
+
+
+status_t QCameraHardwareInterface::setPreviewSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPreviewSize(&width, &height);
+    ALOGE("################requested preview size %d x %d", width, height);
+
+    // Validate the preview size
+    for (size_t i = 0; i <  mPreviewSizeCount; ++i) {
+        if (width ==  mPreviewSizes[i].width
+           && height ==  mPreviewSizes[i].height) {
+            mParameters.setPreviewSize(width, height);
+            ALOGE("setPreviewSize:  width: %d   heigh: %d", width, height);
+            mPreviewWidth = width;
+            mPreviewHeight = height;
+            mDimension.display_width = width;
+            mDimension.display_height = height;
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid preview size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPreviewFpsRange(const QCameraParameters& params)
+{
+    ALOGV("%s: E", __func__);
+    int minFps,maxFps;
+    int prevMinFps, prevMaxFps;
+    int rc = NO_ERROR;
+    bool found = false;
+
+    mParameters.getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    ALOGE("%s: Existing FpsRange Values:(%d, %d)", __func__, prevMinFps, prevMaxFps);
+    params.getPreviewFpsRange(&minFps,&maxFps);
+    ALOGE("%s: Requested FpsRange Values:(%d, %d)", __func__, minFps, maxFps);
+
+    if(mInitialized && (minFps == prevMinFps && maxFps == prevMaxFps)) {
+        ALOGE("%s: No change in FpsRange", __func__);
+        rc = NO_ERROR;
+        goto end;
+    }
+    for(size_t i=0; i<FPS_RANGES_SUPPORTED_COUNT; i++) {
+        // if the value is in the supported list
+        if(minFps==FpsRangesSupported[i].minFPS && maxFps == FpsRangesSupported[i].maxFPS){
+            found = true;
+            ALOGE("FPS: i=%d : minFps = %d, maxFps = %d ",i,FpsRangesSupported[i].minFPS,FpsRangesSupported[i].maxFPS );
+            mParameters.setPreviewFpsRange(minFps,maxFps);
+            // validate the values
+            bool valid = true;
+            // FPS can not be negative
+            if(minFps < 0 || maxFps < 0) valid = false;
+            // minFps must be >= maxFps
+            if(minFps > maxFps) valid = false;
+
+            if(valid) {
+                //Set the FPS mode
+                const char *str = (minFps == maxFps) ?
+                    QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE:
+                    QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE;
+                ALOGE("%s FPS_MODE = %s", __func__, str);
+                int32_t frameRateMode = attr_lookup(frame_rate_modes,
+                        sizeof(frame_rate_modes) / sizeof(str_map),str);
+                bool ret;
+                ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(int32_t),
+                            (void *)&frameRateMode);
+
+                //set FPS values
+                uint32_t fps;  //lower 2 bytes specify maxFps and higher 2 bytes specify minFps
+                fps = ((uint32_t)(minFps/1000) << 16) + ((uint16_t)(maxFps/1000));
+                ret = native_set_parms(MM_CAMERA_PARM_FPS, sizeof(uint32_t), (void *)&fps);
+                mParameters.setPreviewFpsRange(minFps, maxFps);
+                if(ret)
+                    rc = NO_ERROR;
+                else {
+                    rc = BAD_VALUE;
+                    ALOGE("%s: error: native_set_params failed", __func__);
+                }
+            } else {
+                ALOGE("%s: error: invalid FPS range value", __func__);
+                rc = BAD_VALUE;
+            }
+        }
+    }
+    if(found == false){
+            ALOGE("%s: error: FPS range value not supported", __func__);
+            rc = BAD_VALUE;
+    }
+end:
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setJpegThumbnailSize(const QCameraParameters& params){
+    int width = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+    ALOGE("requested jpeg thumbnail size %d x %d", width, height);
+
+    // Validate the picture size
+    for (unsigned int i = 0; i < thumbnail_sizes_count; ++i) {
+       if (width == default_thumbnail_sizes[i].width
+         && height == default_thumbnail_sizes[i].height) {
+           thumbnailWidth = width;
+           thumbnailHeight = height;
+           mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+           mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+           return NO_ERROR;
+       }
+    }
+    ALOGE("error: setting jpeg thumbnail size");
+    return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPictureSize(const QCameraParameters& params)
+{
+    int width, height;
+    ALOGE("QualcommCameraHardware::setPictureSize E");
+    params.getPictureSize(&width, &height);
+    ALOGE("requested picture size %d x %d", width, height);
+
+    // Validate the picture size
+    for (int i = 0; i < mSupportedPictureSizesCount; ++i) {
+        if (width == mPictureSizesPtr[i].width
+          && height == mPictureSizesPtr[i].height) {
+            int old_width, old_height;
+            mParameters.getPictureSize(&old_width,&old_height);
+            if(width != old_width || height != old_height) {
+                mRestartPreview = true;
+            }
+            mParameters.setPictureSize(width, height);
+            mDimension.picture_width = width;
+            mDimension.picture_height = height;
+            return NO_ERROR;
+        }
+    }
+    /* Dimension not among the ones in the list. Check if
+     * its a valid dimension, if it is, then configure the
+     * camera accordingly. else reject it.
+     */
+    if( isValidDimension(width, height) ) {
+        mParameters.setPictureSize(width, height);
+        mDimension.picture_width = width;
+        mDimension.picture_height = height;
+        return NO_ERROR;
+    } else
+        ALOGE("Invalid picture size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setJpegRotation(int isZsl) {
+    return mm_jpeg_encoder_setRotation(mRotation, isZsl);
+}
+
+int QCameraHardwareInterface::getJpegRotation(void) {
+    return mRotation;
+}
+
+int QCameraHardwareInterface::getISOSpeedValue()
+{
+    const char *iso_str = mParameters.get(QCameraParameters::KEY_ISO_MODE);
+    int iso_index = attr_lookup(iso, sizeof(iso) / sizeof(str_map), iso_str);
+    int iso_value = iso_speed_values[iso_index];
+    return iso_value;
+}
+
+
+status_t QCameraHardwareInterface::setJpegQuality(const QCameraParameters& params) {
+    status_t rc = NO_ERROR;
+    int quality = params.getInt(QCameraParameters::KEY_JPEG_QUALITY);
+    ALOGE("setJpegQuality E");
+    if (quality >= 0 && quality <= 100) {
+        mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, quality);
+        mJpegQuality = quality;
+    } else {
+        ALOGE("Invalid jpeg quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+
+    quality = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, quality);
+    } else {
+        ALOGE("Invalid jpeg thumbnail quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+    ALOGE("setJpegQuality X");
+    return rc;
+}
+
+status_t QCameraHardwareInterface::
+setNumOfSnapshot(const QCameraParameters& params) {
+    status_t rc = NO_ERROR;
+
+    int num_of_snapshot = getNumOfSnapshots(params);
+
+    if (num_of_snapshot <= 0) {
+        num_of_snapshot = 1;
+    }
+    ALOGI("number of snapshots = %d", num_of_snapshot);
+    mParameters.set("num-snaps-per-shutter", num_of_snapshot);
+
+    bool result = native_set_parms(MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,
+                                   sizeof(int),
+                                   (void *)&num_of_snapshot);
+    if(!result)
+        ALOGI("%s:Failure setting number of snapshots!!!", __func__);
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setPreviewFormat(const QCameraParameters& params) {
+    const char *str = params.getPreviewFormat();
+    int32_t previewFormat = attr_lookup(preview_formats, sizeof(preview_formats) / sizeof(str_map), str);
+    if(previewFormat != NOT_FOUND) {
+        int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+        int i;
+
+        for (i = 0; i < num; i++) {
+          if (preview_format_info_list[i].Hal_format == previewFormat) {
+            mPreviewFormatInfo = preview_format_info_list[i];
+            break;
+          }
+        }
+
+        if (i == num) {
+          mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+          mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+          return BAD_VALUE;
+        }
+        bool ret = native_set_parms(MM_CAMERA_PARM_PREVIEW_FORMAT, sizeof(cam_format_t),
+                                   (void *)&mPreviewFormatInfo.mm_cam_format);
+        mParameters.set(QCameraParameters::KEY_PREVIEW_FORMAT, str);
+        mPreviewFormat = mPreviewFormatInfo.mm_cam_format;
+        ALOGI("Setting preview format to %d, i =%d, num=%d, hal_format=%d",
+             mPreviewFormat, i, num, mPreviewFormatInfo.Hal_format);
+        return NO_ERROR;
+    } else if ( strTexturesOn ) {
+      mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+      mPreviewFormatInfo.padding = CAMERA_PAD_TO_4K;
+    } else {
+      mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+      mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+    }
+    ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setStrTextures(const QCameraParameters& params) {
+    const char *str = params.get("strtextures");
+    const char *prev_str = mParameters.get("strtextures");
+
+    if(str != NULL) {
+        if(!strcmp(str,prev_str)) {
+            return NO_ERROR;
+        }
+        int str_size = strlen(str);
+        mParameters.set("strtextures", str);
+        if(str_size == 2) {
+            if(!strncmp(str, "on", str_size) || !strncmp(str, "ON", str_size)){
+                ALOGI("Resetting mUseOverlay to false");
+                strTexturesOn = true;
+                mUseOverlay = false;
+            }
+        }else if(str_size == 3){
+            if (!strncmp(str, "off", str_size) || !strncmp(str, "OFF", str_size)) {
+                strTexturesOn = false;
+                mUseOverlay = true;
+            }
+        }
+
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFlash(const QCameraParameters& params)
+{
+    ALOGI("%s: E",__func__);
+    int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE);
+    if(!rc) {
+        ALOGE("%s:LED FLASH not supported", __func__);
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_FLASH_MODE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(flash, sizeof(flash) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+            bool ret = native_set_parms(MM_CAMERA_PARM_LED_MODE,
+                                       sizeof(value), (void *)&value);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid flash mode value: %s", (str == NULL) ? "NULL" : str);
+
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAecAwbLock(const QCameraParameters & params)
+{
+    ALOGD("%s : E", __func__);
+    status_t rc = NO_ERROR;
+    int32_t value;
+    const char* str;
+
+    //for AEC lock
+    str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+    value = (strcmp(str, "true") == 0)? 1 : 0;
+    mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK, str);
+    rc = (native_set_parms(MM_CAMERA_PARM_AEC_LOCK, sizeof(int32_t), (void *)(&value))) ?
+                            NO_ERROR : UNKNOWN_ERROR;
+
+    //for AWB lock
+    str = params.get(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+    value = (strcmp(str, "true") == 0)? 1 : 0;
+    mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, str);
+    rc = (native_set_parms(MM_CAMERA_PARM_AWB_LOCK, sizeof(int32_t), (void *)(&value))) ?
+                        NO_ERROR : UNKNOWN_ERROR;
+    ALOGD("%s : X", __func__);
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setOverlayFormats(const QCameraParameters& params)
+{
+    mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+    if(mIs3DModeOn == true) {
+       int ovFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP|HAL_3D_IN_SIDE_BY_SIDE_L_R|HAL_3D_OUT_SIDE_BY_SIDE;
+        mParameters.set("overlay-format", ovFormat);
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setMCEValue(const QCameraParameters& params)
+{
+    ALOGE("%s",__func__);
+    status_t rc = NO_ERROR;
+    rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_MCE);
+   if(!rc) {
+       ALOGE("MM_CAMERA_PARM_MCE mode is not supported for this sensor");
+       return NO_ERROR;
+   }
+   const char *str = params.get(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT);
+    if (str != NULL) {
+        int value = attr_lookup(mce, sizeof(mce) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int temp = (int8_t)value;
+            ALOGI("%s: setting MCE value of %s", __FUNCTION__, str);
+            mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT, str);
+
+            native_set_parms(MM_CAMERA_PARM_MCE, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid MCE value: %s", (str == NULL) ? "NULL" : str);
+
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setHighFrameRate(const QCameraParameters& params)
+{
+
+    bool mCameraRunning;
+
+    int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR);
+    if(!rc) {
+        ALOGE("%s: MM_CAMERA_PARM_HFR not supported", __func__);
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+    if (str != NULL) {
+        int value = attr_lookup(hfr, sizeof(hfr) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mHFRLevel = (int32_t)value;
+            //Check for change in HFR value
+            const char *oldHfr = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+            if(strcmp(oldHfr, str)){
+                mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE, str);
+//              mHFRMode = true;
+		mCameraRunning=isPreviewRunning();
+                if(mCameraRunning == true) {
+//                    mHFRThreadWaitLock.lock();
+//                    pthread_attr_t pattr;
+//                    pthread_attr_init(&pattr);
+//                    pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+//                    mHFRThreadRunning = !pthread_create(&mHFRThread,
+//                                      &pattr,
+//                                      hfr_thread,
+//                                      (void*)NULL);
+//                    mHFRThreadWaitLock.unlock();
+                    stopPreviewInternal();
+                    mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+                    native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+                    mPreviewState = QCAMERA_HAL_PREVIEW_START;
+                    if (startPreview2() == NO_ERROR)
+                        mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+                    return NO_ERROR;
+                }
+            }
+            native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid HFR value: %s", (str == NULL) ? "NULL" : str);
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setLensshadeValue(const QCameraParameters& params)
+{
+
+    int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ROLLOFF);
+    if(!rc) {
+        ALOGE("%s:LENS SHADING not supported", __func__);
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_LENSSHADE);
+    if (str != NULL) {
+        int value = attr_lookup(lensshade,
+                                    sizeof(lensshade) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int8_t temp = (int8_t)value;
+            mParameters.set(QCameraParameters::KEY_LENSSHADE, str);
+            native_set_parms(MM_CAMERA_PARM_ROLLOFF, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid lensShade value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setFaceDetect(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_FACE_DETECTION);
+    ALOGE("setFaceDetect: %s", str);
+    if (str != NULL) {
+        int value = attr_lookup(facedetection,
+                sizeof(facedetection) / sizeof(str_map), str);
+        mFaceDetectOn = value;
+        ALOGE("%s Face detection value = %d",__func__, value);
+        cam_ctrl_dimension_t dim;
+//        cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+//        preview_parm_config (&dim, mParameters);
+//        cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+        native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+        mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+        return NO_ERROR;
+    }
+    ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setFaceDetection(const char *str)
+{
+    if(supportsFaceDetection() == false){
+        ALOGE("Face detection is not enabled");
+        return NO_ERROR;
+    }
+    if (str != NULL) {
+        int value = attr_lookup(facedetection,
+                                    sizeof(facedetection) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mMetaDataWaitLock.lock();
+            mFaceDetectOn = value;
+            mMetaDataWaitLock.unlock();
+            mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+            native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+            mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAEBracket(const QCameraParameters& params)
+{
+    if(!cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_HDR) || (myMode & CAMERA_ZSL_MODE)) {
+        ALOGI("Parameter HDR is not supported for this sensor/ ZSL mode");
+
+        if (myMode & CAMERA_ZSL_MODE) {
+            ALOGE("In ZSL mode, reset AEBBracket to HDR_OFF mode");
+            exp_bracketing_t temp;
+            memset(&temp, 0, sizeof(temp));
+            mHdrMode = HDR_BRACKETING_OFF;
+            temp.hdr_enable= FALSE;
+            temp.mode = HDR_BRACKETING_OFF;
+            native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+        }
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_AE_BRACKET_HDR);
+
+    if (str != NULL) {
+        int value = attr_lookup(hdr_bracket,
+                                    sizeof(hdr_bracket) / sizeof(str_map), str);
+        exp_bracketing_t temp;
+        memset(&temp, 0, sizeof(temp));
+        switch (value) {
+            case HDR_MODE:
+                {
+                    mHdrMode = HDR_MODE;
+                    temp.hdr_enable= TRUE;
+                    temp.mode = HDR_MODE;
+                    temp.total_frames = 3;
+                    temp.total_hal_frames = getNumOfSnapshots();
+                    ALOGI("%s: setting HDR frames (%d)", __FUNCTION__, temp.total_hal_frames);
+                    native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+                }
+                break;
+            case EXP_BRACKETING_MODE:
+                {
+                    int numFrames = getNumOfSnapshots();
+                    const char *str_val = params.get("capture-burst-exposures");
+                    if ((str_val != NULL) && (strlen(str_val)>0)) {
+                        ALOGI("%s: capture-burst-exposures %s", __FUNCTION__, str_val);
+
+                        mHdrMode = EXP_BRACKETING_MODE;
+                        temp.hdr_enable = FALSE;
+                        temp.mode = EXP_BRACKETING_MODE;
+                        temp.total_frames = (numFrames >  MAX_SNAPSHOT_BUFFERS -2) ? MAX_SNAPSHOT_BUFFERS -2 : numFrames;
+                        temp.total_hal_frames = temp.total_frames;
+                        strlcpy(temp.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+                        ALOGI("%s: setting Exposure Bracketing value of %s, frame (%d)", __FUNCTION__, temp.values, temp.total_hal_frames);
+                        native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+                    }
+                    else {
+                        /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+                        ALOGI("%s: capture-burst-exposures not set, back to HDR OFF mode", __FUNCTION__);
+                        mHdrMode = HDR_BRACKETING_OFF;
+                        temp.hdr_enable= FALSE;
+                        temp.mode = HDR_BRACKETING_OFF;
+                        native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+                    }
+                }
+                break;
+            case HDR_BRACKETING_OFF:
+            default:
+                {
+                    mHdrMode = HDR_BRACKETING_OFF;
+                    temp.hdr_enable= FALSE;
+                    temp.mode = HDR_BRACKETING_OFF;
+                    native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+                }
+                break;
+        }
+
+        /* save the value*/
+        mParameters.set(QCameraParameters::KEY_AE_BRACKET_HDR, str);
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCaptureBurstExp()
+{
+    char burst_exp[PROPERTY_VALUE_MAX];
+    memset(burst_exp, 0, sizeof(burst_exp));
+    property_get("persist.capture.burst.exposures", burst_exp, "");
+    if (NULL != burst_exp)
+      mParameters.set("capture-burst-exposures", burst_exp);
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRedeyeReduction(const QCameraParameters& params)
+{
+    if(supportsRedEyeReduction() == false) {
+        ALOGE("Parameter Redeye Reduction is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_REDEYE_REDUCTION);
+    if (str != NULL) {
+        int value = attr_lookup(redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int8_t temp = (int8_t)value;
+            ALOGI("%s: setting Redeye Reduction value of %s", __FUNCTION__, str);
+            mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION, str);
+
+            native_set_parms(MM_CAMERA_PARM_REDEYE_REDUCTION, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid Redeye Reduction value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setGpsLocation(const QCameraParameters& params)
+{
+    const char *method = params.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+    if (method) {
+        mParameters.set(QCameraParameters::KEY_GPS_PROCESSING_METHOD, method);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+    }
+
+    const char *latitude = params.get(QCameraParameters::KEY_GPS_LATITUDE);
+    if (latitude) {
+        ALOGE("latitude %s",latitude);
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE, latitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE);
+    }
+
+    const char *latitudeRef = params.get(QCameraParameters::KEY_GPS_LATITUDE_REF);
+    if (latitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE_REF);
+    }
+
+    const char *longitude = params.get(QCameraParameters::KEY_GPS_LONGITUDE);
+    if (longitude) {
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE, longitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE);
+    }
+
+    const char *longitudeRef = params.get(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+    if (longitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, longitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+    }
+
+    const char *altitudeRef = params.get(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+    if (altitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, altitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+    }
+
+    const char *altitude = params.get(QCameraParameters::KEY_GPS_ALTITUDE);
+    if (altitude) {
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE, altitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE);
+    }
+
+    const char *status = params.get(QCameraParameters::KEY_GPS_STATUS);
+    if (status) {
+        mParameters.set(QCameraParameters::KEY_GPS_STATUS, status);
+    }
+
+    const char *dateTime = params.get(QCameraParameters::KEY_EXIF_DATETIME);
+    if (dateTime) {
+        mParameters.set(QCameraParameters::KEY_EXIF_DATETIME, dateTime);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_EXIF_DATETIME);
+    }
+
+    const char *timestamp = params.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+    if (timestamp) {
+        mParameters.set(QCameraParameters::KEY_GPS_TIMESTAMP, timestamp);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_TIMESTAMP);
+    }
+    ALOGE("setGpsLocation X");
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRotation(const QCameraParameters& params)
+{
+    status_t rc = NO_ERROR;
+    int rotation = params.getInt(QCameraParameters::KEY_ROTATION);
+    if (rotation != NOT_FOUND) {
+        if (rotation == 0 || rotation == 90 || rotation == 180
+            || rotation == 270) {
+          mParameters.set(QCameraParameters::KEY_ROTATION, rotation);
+          mRotation = rotation;
+        } else {
+            ALOGE("Invalid rotation value: %d", rotation);
+            rc = BAD_VALUE;
+        }
+    }
+    ALOGE("setRotation");
+    return rc;
+}
+
+status_t QCameraHardwareInterface::setDenoise(const QCameraParameters& params)
+{
+#if 0
+    if(!mCfgControl.mm_camera_is_supported(MM_CAMERA_PARM_WAVELET_DENOISE)) {
+        ALOGE("Wavelet Denoise is not supported for this sensor");
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_DENOISE);
+    if (str != NULL) {
+        int value = attr_lookup(denoise,
+        sizeof(denoise) / sizeof(str_map), str);
+        if ((value != NOT_FOUND) &&  (mDenoiseValue != value)) {
+        mDenoiseValue =  value;
+        mParameters.set(QCameraParameters::KEY_DENOISE, str);
+        bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(value),
+                                               (void *)&value);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+        return NO_ERROR;
+    }
+    ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+#endif
+    return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setOrientation(const QCameraParameters& params)
+{
+    const char *str = params.get("orientation");
+
+    if (str != NULL) {
+        if (strcmp(str, "portrait") == 0 || strcmp(str, "landscape") == 0) {
+            // Camera service needs this to decide if the preview frames and raw
+            // pictures should be rotated.
+            mParameters.set("orientation", str);
+        } else {
+            ALOGE("Invalid orientation value: %s", str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPictureFormat(const QCameraParameters& params)
+{
+    const char * str = params.get(QCameraParameters::KEY_PICTURE_FORMAT);
+
+    if(str != NULL){
+        int32_t value = attr_lookup(picture_formats,
+                                    sizeof(picture_formats) / sizeof(str_map), str);
+        if(value != NOT_FOUND){
+            mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT, str);
+        } else {
+            ALOGE("Invalid Picture Format value: %s", str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHintValue(const int32_t value)
+{
+    native_set_parms(MM_CAMERA_PARM_RECORDING_HINT, sizeof(value),
+                                           (void *)&value);
+    if (value == TRUE){
+        native_set_parms(MM_CAMERA_PARM_CAF_ENABLE, sizeof(value),
+                                           (void *)&value);
+    }
+    setDISMode();
+    setFullLiveshot();
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHint(const QCameraParameters& params)
+{
+
+  const char * str = params.get(QCameraParameters::KEY_RECORDING_HINT);
+
+  if(str != NULL){
+      int32_t value = attr_lookup(recording_Hints,
+                                  sizeof(recording_Hints) / sizeof(str_map), str);
+      if(value != NOT_FOUND){
+          mRecordingHint = value;
+          setRecordingHintValue(mRecordingHint);
+          mParameters.set(QCameraParameters::KEY_RECORDING_HINT, str);
+          return NO_ERROR;
+      } else {
+          ALOGE("Invalid Picture Format value: %s", str);
+          setDISMode();
+          setFullLiveshot();
+          return BAD_VALUE;
+      }
+  }
+  setDISMode();
+  setFullLiveshot();
+  return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setDISMode() {
+  /* Enable DIS only if
+   * - Camcorder mode AND
+   * - DIS property is set AND
+   * - Not in Low power mode. */
+  uint32_t value = mRecordingHint && mDisEnabled
+                   && !isLowPowerCamcorder();
+
+  ALOGI("%s DIS is %s value = %d", __func__,
+          value ? "Enabled" : "Disabled", value);
+  native_set_parms(MM_CAMERA_PARM_DIS_ENABLE, sizeof(value),
+                                               (void *)&value);
+  return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFullLiveshot()
+{
+  /* Enable full size liveshot only if
+   * - Camcorder mode AND
+   * - Full size liveshot is enabled. */
+  uint32_t value = mRecordingHint && mFullLiveshotEnabled
+                   && !isLowPowerCamcorder();
+
+  if (((mDimension.picture_width == mDimension.video_width) &&
+      (mDimension.picture_height == mDimension.video_height))) {
+    /* If video size matches the live snapshot size
+     * turn off full size liveshot to get higher fps. */
+    value = 0;
+  }
+
+  ALOGI("%s Full size liveshot %s value = %d", __func__,
+          value ? "Enabled" : "Disabled", value);
+  native_set_parms(MM_CAMERA_PARM_FULL_LIVESHOT, sizeof(value),
+                                               (void *)&value);
+  return NO_ERROR;
+}
+
+
+isp3a_af_mode_t QCameraHardwareInterface::getAutoFocusMode(
+  const QCameraParameters& params)
+{
+  isp3a_af_mode_t afMode = AF_MODE_MAX;
+  afMode = (isp3a_af_mode_t)mFocusMode;
+  return afMode;
+}
+
+void QCameraHardwareInterface::getPictureSize(int *picture_width,
+                                              int *picture_height) const
+{
+    mParameters.getPictureSize(picture_width, picture_height);
+}
+
+void QCameraHardwareInterface::getPreviewSize(int *preview_width,
+                                              int *preview_height) const
+{
+    mParameters.getPreviewSize(preview_width, preview_height);
+}
+
+cam_format_t QCameraHardwareInterface::getPreviewFormat() const
+{
+    cam_format_t foramt = CAMERA_YUV_420_NV21;
+    const char *str = mParameters.getPreviewFormat();
+    int32_t value = attr_lookup(preview_formats,
+                                sizeof(preview_formats)/sizeof(str_map),
+                                str);
+
+    if(value != NOT_FOUND) {
+        int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+        int i;
+        for (i = 0; i < num; i++) {
+          if (preview_format_info_list[i].Hal_format == value) {
+            foramt = preview_format_info_list[i].mm_cam_format;
+            break;
+          }
+        }
+    }
+
+    return foramt;
+}
+
+cam_pad_format_t QCameraHardwareInterface::getPreviewPadding() const
+{
+  return mPreviewFormatInfo.padding;
+}
+
+int QCameraHardwareInterface::getJpegQuality() const
+{
+    return mJpegQuality;
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(void) const
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.snapshot.number", prop, "0");
+    ALOGI("%s: prop enable/disable = %d", __func__, atoi(prop));
+    if (atoi(prop)) {
+        ALOGE("%s: Reading maximum no of snapshots = %d"
+             "from properties", __func__, atoi(prop));
+        return atoi(prop);
+    } else {
+        return mParameters.getInt("num-snaps-per-shutter");
+    }
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(const QCameraParameters& params)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.snapshot.number", prop, "0");
+    ALOGI("%s: prop enable/disable = %d", __func__, atoi(prop));
+    if (atoi(prop)) {
+        ALOGI("%s: Reading maximum no of snapshots = %d"
+             "from properties", __func__, atoi(prop));
+        return atoi(prop);
+    } else {
+        return params.getInt("num-snaps-per-shutter");
+    }
+
+}
+
+int QCameraHardwareInterface::
+getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+                             int *picture_width,
+                             int *picture_height)
+{
+    for(unsigned int i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ){
+        if(thumbnail_sizes[i].aspect_ratio == aspect_ratio)
+        {
+            *picture_width = thumbnail_sizes[i].width;
+            *picture_height = thumbnail_sizes[i].height;
+            return NO_ERROR;
+        }
+    }
+
+    return BAD_VALUE;
+}
+
+bool QCameraHardwareInterface::isRawSnapshot()
+{
+  const char *format = mParameters.getPictureFormat();
+    if( format!= NULL &&
+       !strcmp(format, QCameraParameters::PIXEL_FORMAT_RAW)){
+        return true;
+    }
+    else{
+        return false;
+    }
+}
+
+status_t QCameraHardwareInterface::setPreviewSizeTable(void)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_dimension_t dim;
+    struct camera_size_type* preview_size_table;
+    int preview_table_size;
+    int i = 0;
+    char str[10] = {0};
+
+    /* Initialize table with default values */
+    preview_size_table = default_preview_sizes;
+    preview_table_size = preview_sizes_count;
+
+
+    /* Get maximum preview size supported by sensor*/
+    memset(&dim, 0, sizeof(mm_camera_dimension_t));
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_MAX_PREVIEW_SIZE, &dim);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Failure getting Max Preview Size supported by camera",
+             __func__);
+        goto end;
+    }
+
+    ALOGD("%s: Max Preview Sizes Supported: %d X %d", __func__,
+         dim.width, dim.height);
+
+    for (i = 0; i < preview_table_size; i++) {
+        if ((preview_size_table->width <= dim.width) &&
+            (preview_size_table->height <= dim.height)) {
+            ALOGD("%s: Camera Preview Size Table "
+                 "Max width: %d height %d table_size: %d",
+                 __func__, preview_size_table->width,
+                 preview_size_table->height, preview_table_size - i);
+            break;
+        }
+        preview_size_table++;
+    }
+    //set preferred preview size to maximum preview size
+    sprintf(str, "%dx%d", preview_size_table->width, preview_size_table->height);
+    mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, str);
+    ALOGD("KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO = %s", str);
+
+end:
+    /* Save the table in global member*/
+    mPreviewSizes = preview_size_table;
+    mPreviewSizeCount = preview_table_size - i;
+
+    return ret;
+}
+
+status_t QCameraHardwareInterface::setPictureSizeTable(void)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_dimension_t dim;
+    struct camera_size_type* picture_size_table;
+    int picture_table_size;
+    int i = 0, count = 0;
+
+    /* Initialize table with default values */
+    picture_table_size = sizeof(default_picture_sizes)/
+        sizeof(default_picture_sizes[0]);
+    picture_size_table = default_picture_sizes;
+    mPictureSizes =
+        ( struct camera_size_type *)malloc(picture_table_size *
+                                           sizeof(struct camera_size_type));
+    if (mPictureSizes == NULL) {
+        ALOGE("%s: Failre allocating memory to store picture size table",__func__);
+        goto end;
+    }
+
+    /* Get maximum picture size supported by sensor*/
+    memset(&dim, 0, sizeof(mm_camera_dimension_t));
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Failure getting Max Picture Size supported by camera",
+             __func__);
+        ret = NO_MEMORY;
+        free(mPictureSizes);
+        mPictureSizes = NULL;
+        goto end;
+    }
+
+    ALOGD("%s: Max Picture Sizes Supported: %d X %d", __func__,
+         dim.width, dim.height);
+
+    for (i = 0; i < picture_table_size; i++) {
+        /* We'll store those dimensions whose width AND height
+           are less than or equal to maximum supported */
+        if ((picture_size_table->width <= dim.width) &&
+            (picture_size_table->height <= dim.height)) {
+            ALOGD("%s: Camera Picture Size Table "
+                 "Max width: %d height %d table_size: %d",
+                 __func__, picture_size_table->width,
+                 picture_size_table->height, count+1);
+            mPictureSizes[count].height = picture_size_table->height;
+            mPictureSizes[count].width = picture_size_table->width;
+            count++;
+        }
+        picture_size_table++;
+    }
+    mPictureSizeCount = count;
+
+end:
+     /* In case of error, we use default picture sizes */
+     if (ret != NO_ERROR) {
+        mPictureSizes = default_picture_sizes;
+        mPictureSizeCount = picture_table_size;
+    }
+    return ret;
+}
+
+status_t QCameraHardwareInterface::setVideoSizeTable(void)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_dimension_t dim;
+    struct camera_size_type* video_size_table;
+    int video_table_size;
+    int i = 0, count = 0;
+    ALOGE("%s: E", __func__);
+
+    /* Initialize table with default values */
+    video_table_size = video_sizes_count;
+    video_size_table = default_video_sizes;
+    mVideoSizes =
+        (struct camera_size_type *)malloc(video_table_size *
+                                           sizeof(struct camera_size_type));
+    if(mVideoSizes == NULL) {
+        ALOGE("%s: error allocating memory to store video size table",__func__);
+        ret = BAD_VALUE;
+        goto end;
+    }
+
+    /* Get maximum video size supported by sensor*/
+    memset(&dim, 0, sizeof(mm_camera_dimension_t));
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_MAX_VIDEO_SIZE, &dim);
+    if(ret != NO_ERROR) {
+        ALOGE("%s: error getting Max Video Size supported by camera",
+             __func__);
+        ret = NO_MEMORY;
+        free(mVideoSizes);
+        mVideoSizes = NULL;
+        ret = BAD_VALUE;
+        goto end;
+    }
+
+    ALOGD("%s: Max Video Size Supported: %d X %d", __func__,
+         dim.width, dim.height);
+
+    for(i=0; i < video_table_size; i++) {
+        /* We'll store those dimensions whose width AND height
+           are less than or equal to maximum supported */
+        if((video_size_table->width <= dim.width) &&
+            (video_size_table->height <= dim.height)) {
+            ALOGD("%s: Supported Video Size [%d] = %dx%d", __func__, count, video_size_table->width,
+                                    video_size_table->height);
+            mVideoSizes[count].height = video_size_table->height;
+            mVideoSizes[count].width = video_size_table->width;
+            count++;
+        }
+        video_size_table++;
+    }
+    mVideoSizeCount = count;
+
+end:
+    ALOGE("%s: X", __func__);
+    return ret;
+}
+
+void QCameraHardwareInterface::freeVideoSizeTable(void)
+{
+    if(mVideoSizes != NULL)
+    {
+        free(mVideoSizes);
+    }
+    mVideoSizeCount = 0;
+}
+
+
+void QCameraHardwareInterface::freePictureTable(void)
+{
+    /* If we couldn't allocate memory to store picture table
+       we use the picture table pointer to point to default
+       picture table array. In that case we cannot free it.*/
+    if ((mPictureSizes != default_picture_sizes) && mPictureSizes) {
+        free(mPictureSizes);
+    }
+}
+
+status_t QCameraHardwareInterface::setHistogram(int histogram_en)
+{
+    ALOGE("setHistogram: E");
+    if(mStatsOn == histogram_en) {
+        return NO_ERROR;
+    }
+
+    mSendData = histogram_en;
+    mStatsOn = histogram_en;
+    mCurrentHisto = -1;
+    mStatSize = sizeof(uint32_t)* HISTOGRAM_STATS_SIZE;
+
+    if (histogram_en == QCAMERA_PARM_ENABLE) {
+        /*Currently the Ashmem is multiplying the buffer size with total number
+        of buffers and page aligning. This causes a crash in JNI as each buffer
+        individually expected to be page aligned  */
+        int page_size_minus_1 = getpagesize() - 1;
+        int statSize = sizeof (camera_preview_histogram_info );
+        int32_t mAlignedStatSize = ((statSize + page_size_minus_1) & (~page_size_minus_1));
+#if 0
+        mStatHeap =
+        new AshmemPool(mAlignedStatSize, 3, statSize, "stat");
+        if (!mStatHeap->initialized()) {
+            ALOGE("Stat Heap X failed ");
+            mStatHeap.clear();
+            mStatHeap = NULL;
+            return UNKNOWN_ERROR;
+        }
+#endif
+        for(int cnt = 0; cnt<3; cnt++) {
+                mStatsMapped[cnt]=mGetMemory(-1, mStatSize, 1, mCallbackCookie);
+                if(mStatsMapped[cnt] == NULL) {
+                    ALOGE("Failed to get camera memory for stats heap index: %d", cnt);
+                    return(-1);
+                } else {
+                   ALOGE("Received following info for stats mapped data:%p,handle:%p, size:%d,release:%p",
+                   mStatsMapped[cnt]->data ,mStatsMapped[cnt]->handle, mStatsMapped[cnt]->size, mStatsMapped[cnt]->release);
+                }
+                mHistServer.size = sizeof(camera_preview_histogram_info);
+#ifdef USE_ION
+                if(allocate_ion_memory(&mHistServer, cnt, ION_CP_MM_HEAP_ID) < 0) {
+                  ALOGE("%s ION alloc failed\n", __func__);
+                  return -1;
+                }
+#else
+		        mHistServer.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+		        if(mHistServer.fd[cnt] <= 0) {
+			      ALOGE("%s: no pmem for frame %d", __func__, cnt);
+			      return -1;
+		        }
+#endif
+                mHistServer.camera_memory[cnt]=mGetMemory(mHistServer.fd[cnt],mHistServer.size, 1, mCallbackCookie);
+                if(mHistServer.camera_memory[cnt] == NULL) {
+                    ALOGE("Failed to get camera memory for server side histogram index: %d", cnt);
+                    return(-1);
+                } else {
+                   ALOGE("Received following info for server side histogram data:%p,handle:%p, size:%d,release:%p",
+                   mHistServer.camera_memory[cnt]->data ,mHistServer.camera_memory[cnt]->handle,
+                        mHistServer.camera_memory[cnt]->size, mHistServer.camera_memory[cnt]->release);
+                }
+                /*Register buffer at back-end*/
+                if (NO_ERROR != sendMappingBuf(0, cnt, mHistServer.fd[cnt],
+                                                   mHistServer.size, mCameraId,
+                                               CAM_SOCK_MSG_TYPE_HIST_MAPPING)) {
+                    ALOGE("%s could not send buffer to back-end\n", __func__);
+                }
+        }
+    }
+    ALOGV("Setting histogram = %d", histogram_en);
+    native_set_parms(MM_CAMERA_PARM_HISTOGRAM, sizeof(int), &histogram_en);
+    if(histogram_en == QCAMERA_PARM_DISABLE)
+    {
+        //release memory
+        for(int i=0; i<3; i++){
+            if(mStatsMapped[i] != NULL) {
+                mStatsMapped[i]->release(mStatsMapped[i]);
+            }
+            /*Unregister buffer at back-end */
+            if (NO_ERROR != sendUnMappingBuf(0, i, mCameraId, CAM_SOCK_MSG_TYPE_HIST_UNMAPPING)) {
+              ALOGE("%s could not unregister buffer from back-end\n", __func__);
+            }
+            if(mHistServer.camera_memory[i] != NULL) {
+                mHistServer.camera_memory[i]->release(mHistServer.camera_memory[i]);
+            }
+            close(mHistServer.fd[i]);
+#ifdef USE_ION
+            deallocate_ion_memory(&mHistServer, i);
+#endif
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstLookBack(const QCameraParameters& params)
+{
+  const char *v = params.get("capture-burst-retroactive");
+  if (v) {
+    int look_back = atoi(v);
+    ALOGI("%s: look_back =%d", __func__, look_back);
+    mParameters.set("capture-burst-retroactive", look_back);
+  }
+  return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstInterval(const QCameraParameters& params)
+{
+  mZslInterval = BURST_INTREVAL_DEFAULT;
+  const char *v = params.get("capture-burst-interval");
+  if (v) {
+    int interval = atoi(v);
+    ALOGI("%s: Interval =%d", __func__, interval);
+    if(interval < BURST_INTREVAL_MIN ||interval > BURST_INTREVAL_MAX ) {
+      return BAD_VALUE;
+    }
+    mZslInterval =  interval;
+  }
+  return NO_ERROR;
+}
+
+int QCameraHardwareInterface::getZSLBurstInterval( void )
+{
+  int val;
+
+  if (mZslInterval == BURST_INTREVAL_DEFAULT) {
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.zsl.interval", prop, "1");
+    val = atoi(prop);
+    ALOGD("%s: prop interval = %d", __func__, val);
+  } else {
+    val = mZslInterval;
+  }
+  return val;
+}
+
+
+int QCameraHardwareInterface::getZSLQueueDepth(void) const
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.zsl.queuedepth", prop, "2");
+    ALOGI("%s: prop = %d", __func__, atoi(prop));
+    return atoi(prop);
+}
+
+int QCameraHardwareInterface::getZSLBackLookCount(void) const
+{
+    int look_back;
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.zsl.backlookcnt", prop, "0");
+    ALOGI("%s: prop = %d", __func__, atoi(prop));
+    look_back = atoi(prop);
+    if (look_back == 0 ) {
+      look_back = mParameters.getInt("capture-burst-retroactive");
+      ALOGE("%s: look_back = %d", __func__, look_back);
+    }
+    return look_back;
+}
+
+//EXIF functions
+void QCameraHardwareInterface::deinitExifData()
+{
+    ALOGD("Clearing EXIF data");
+    for(int i=0; i<MAX_EXIF_TABLE_ENTRIES; i++)
+    {
+        //clear all data
+        memset(&mExifData[i], 0x00, sizeof(exif_tags_info_t));
+    }
+    mExifTableNumEntries = 0;
+}
+
+void QCameraHardwareInterface::addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+                        uint32_t count, uint8_t copy, void *data) {
+
+    if(mExifTableNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return;
+    }
+    int index = mExifTableNumEntries;
+    mExifData[index].tag_id = tagid;
+    mExifData[index].tag_entry.type = type;
+    mExifData[index].tag_entry.count = count;
+    mExifData[index].tag_entry.copy = copy;
+    if((type == EXIF_RATIONAL) && (count > 1))
+        mExifData[index].tag_entry.data._rats = (rat_t *)data;
+    if((type == EXIF_RATIONAL) && (count == 1))
+        mExifData[index].tag_entry.data._rat = *(rat_t *)data;
+    else if(type == EXIF_ASCII)
+        mExifData[index].tag_entry.data._ascii = (char *)data;
+    else if(type == EXIF_BYTE)
+        mExifData[index].tag_entry.data._byte = *(uint8_t *)data;
+    else if((type == EXIF_SHORT) && (count > 1))
+        mExifData[index].tag_entry.data._shorts = (uint16_t *)data;
+    else if((type == EXIF_SHORT) && (count == 1))
+        mExifData[index].tag_entry.data._short = *(uint16_t *)data;
+    // Increase number of entries
+    mExifTableNumEntries++;
+}
+
+rat_t getRational(int num, int denom)
+{
+    rat_t temp = {num, denom};
+    return temp;
+}
+
+void QCameraHardwareInterface::initExifData(){
+    if(mExifValues.dateTime) {
+        addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                  20, 1, (void *)mExifValues.dateTime);
+    }
+    addExifTag(EXIFTAGID_FOCAL_LENGTH, EXIF_RATIONAL, 1, 1, (void *)&(mExifValues.focalLength));
+    addExifTag(EXIFTAGID_ISO_SPEED_RATING,EXIF_SHORT,1,1,(void *)&(mExifValues.isoSpeed));
+
+    if(mExifValues.mGpsProcess) {
+        addExifTag(EXIFTAGID_GPS_PROCESSINGMETHOD, EXIF_ASCII,
+           EXIF_ASCII_PREFIX_SIZE + strlen(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE) + 1,
+           1, (void *)mExifValues.gpsProcessingMethod);
+    }
+
+    if(mExifValues.mLatitude) {
+        addExifTag(EXIFTAGID_GPS_LATITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.latitude);
+
+        if(mExifValues.latRef) {
+            addExifTag(EXIFTAGID_GPS_LATITUDE_REF, EXIF_ASCII, 2,
+                                    1, (void *)mExifValues.latRef);
+        }
+    }
+
+    if(mExifValues.mLongitude) {
+        addExifTag(EXIFTAGID_GPS_LONGITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.longitude);
+
+        if(mExifValues.lonRef) {
+            addExifTag(EXIFTAGID_GPS_LONGITUDE_REF, EXIF_ASCII, 2,
+                                1, (void *)mExifValues.lonRef);
+        }
+    }
+
+    if(mExifValues.mAltitude) {
+        addExifTag(EXIFTAGID_GPS_ALTITUDE, EXIF_RATIONAL, 1,
+                    1, (void *)&(mExifValues.altitude));
+
+        addExifTag(EXIFTAGID_GPS_ALTITUDE_REF, EXIF_BYTE, 1, 1, (void *)&mExifValues.mAltitude_ref);
+    }
+
+    if(mExifValues.mTimeStamp) {
+        time_t unixTime;
+        struct tm *UTCTimestamp;
+
+        unixTime = (time_t)mExifValues.mGPSTimestamp;
+        UTCTimestamp = gmtime(&unixTime);
+
+        strftime(mExifValues.gpsDateStamp, sizeof(mExifValues.gpsDateStamp), "%Y:%m:%d", UTCTimestamp);
+        addExifTag(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+                          strlen(mExifValues.gpsDateStamp)+1 , 1, (void *)mExifValues.gpsDateStamp);
+
+        mExifValues.gpsTimeStamp[0] = getRational(UTCTimestamp->tm_hour, 1);
+        mExifValues.gpsTimeStamp[1] = getRational(UTCTimestamp->tm_min, 1);
+        mExifValues.gpsTimeStamp[2] = getRational(UTCTimestamp->tm_sec, 1);
+
+        addExifTag(EXIFTAGID_GPS_TIMESTAMP, EXIF_RATIONAL,
+                  3, 1, (void *)mExifValues.gpsTimeStamp);
+        ALOGE("EXIFTAGID_GPS_TIMESTAMP set");
+    }
+
+}
+
+//Add all exif tags in this function
+void QCameraHardwareInterface::setExifTags()
+{
+    const char *str;
+
+    //set TimeStamp
+    str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+    if(str != NULL) {
+      strncpy(mExifValues.dateTime, str, 19);
+      mExifValues.dateTime[19] = '\0';
+    }
+
+    //Set focal length
+    int focalLengthValue = (int) (mParameters.getFloat(
+                QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+
+    mExifValues.focalLength = getRational(focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+
+    //Set ISO Speed
+    mExifValues.isoSpeed = getISOSpeedValue();
+
+    //set gps tags
+    setExifTagsGPS();
+}
+
+void QCameraHardwareInterface::setExifTagsGPS()
+{
+    const char *str = NULL;
+
+    //Set GPS processing method
+    str = mParameters.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+    if(str != NULL) {
+       memcpy(mExifValues.gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+       strncpy(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str,
+           GPS_PROCESSING_METHOD_SIZE - 1);
+       mExifValues.gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE-1] = '\0';
+       ALOGE("EXIFTAGID_GPS_PROCESSINGMETHOD = %s %s", mExifValues.gpsProcessingMethod,
+                                                    mExifValues.gpsProcessingMethod+8);
+       mExifValues.mGpsProcess  = true;
+    }else{
+        mExifValues.mGpsProcess = false;
+    }
+    str = NULL;
+
+    //Set Latitude
+    str = mParameters.get(QCameraParameters::KEY_GPS_LATITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, mExifValues.latitude);
+        ALOGE("EXIFTAGID_GPS_LATITUDE = %s", str);
+
+        //set Latitude Ref
+        float latitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LATITUDE);
+        if(latitudeValue < 0.0f) {
+            mExifValues.latRef[0] = 'S';
+        } else {
+            mExifValues.latRef[0] = 'N';
+        }
+        mExifValues.latRef[1] = '\0';
+        mExifValues.mLatitude = true;
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF,mExifValues.latRef);
+        ALOGE("EXIFTAGID_GPS_LATITUDE_REF = %s", mExifValues.latRef);
+    }else{
+        mExifValues.mLatitude = false;
+    }
+
+    //set Longitude
+    str = NULL;
+    str = mParameters.get(QCameraParameters::KEY_GPS_LONGITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, mExifValues.longitude);
+        ALOGE("EXIFTAGID_GPS_LONGITUDE = %s", str);
+
+        //set Longitude Ref
+        float longitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LONGITUDE);
+        if(longitudeValue < 0.0f) {
+            mExifValues.lonRef[0] = 'W';
+        } else {
+            mExifValues.lonRef[0] = 'E';
+        }
+        mExifValues.lonRef[1] = '\0';
+        mExifValues.mLongitude = true;
+        ALOGE("EXIFTAGID_GPS_LONGITUDE_REF = %s", mExifValues.lonRef);
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, mExifValues.lonRef);
+    }else{
+        mExifValues.mLongitude = false;
+    }
+
+    //set Altitude
+    str = mParameters.get(QCameraParameters::KEY_GPS_ALTITUDE);
+    if(str != NULL) {
+        double value = atof(str);
+        mExifValues.mAltitude_ref = 0;
+        if(value < 0){
+            mExifValues.mAltitude_ref = 1;
+            value = -value;
+        }
+        mExifValues.altitude = getRational(value*1000, 1000);
+        mExifValues.mAltitude = true;
+        //set AltitudeRef
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, mExifValues.mAltitude_ref);
+        ALOGE("EXIFTAGID_GPS_ALTITUDE = %f", value);
+    }else{
+        mExifValues.mAltitude = false;
+    }
+
+    //set Gps TimeStamp
+    str = NULL;
+    str = mParameters.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+    if(str != NULL) {
+      mExifValues.mTimeStamp = true;
+      mExifValues.mGPSTimestamp = atol(str);
+    }else{
+         mExifValues.mTimeStamp = false;
+    }
+}
+
+//latlonString is string formatted coordinate
+//coord is rat_t[3]
+void QCameraHardwareInterface::parseGPSCoordinate(const char *latlonString, rat_t* coord)
+{
+    if(coord == NULL) {
+        ALOGE("%s: error, invalid argument coord == NULL", __func__);
+        return;
+    }
+    float degF = fabs(atof(latlonString));
+    float minF = (degF- (int) degF) * 60;
+    float secF = (minF - (int) minF) * 60;
+
+    coord[0] = getRational((int) degF, 1);
+    coord[1] = getRational((int) minF, 1);
+    coord[2] = getRational((int) (secF * 10000), 10000);
+}
+
+bool QCameraHardwareInterface::isLowPowerCamcorder() {
+
+    if (mPowerMode == LOW_POWER)
+        return true;
+
+    if(mHFRLevel > 1) /* hard code the value now. Need to move tgtcommon to camear.h */
+      return true;
+
+      return false;
+}
+
+status_t QCameraHardwareInterface::setNoDisplayMode(const QCameraParameters& params)
+{
+  char prop[PROPERTY_VALUE_MAX];
+  memset(prop, 0, sizeof(prop));
+  property_get("persist.camera.nodisplay", prop, "0");
+  int prop_val = atoi(prop);
+
+  if (prop_val == 0) {
+    const char *str_val  = params.get("no-display-mode");
+    if(str_val && strlen(str_val) > 0) {
+      mNoDisplayMode = atoi(str_val);
+    } else {
+      mNoDisplayMode = 0;
+    }
+    ALOGD("Param mNoDisplayMode =%d", mNoDisplayMode);
+  } else {
+    mNoDisplayMode = prop_val;
+    ALOGD("prop mNoDisplayMode =%d", mNoDisplayMode);
+  }
+  return NO_ERROR;
+}
+
+}; /*namespace android */
diff --git a/camera/QCameraHWI_Preview.cpp b/camera/QCameraHWI_Preview.cpp
new file mode 100644
index 0000000..ffe8e1a
--- /dev/null
+++ b/camera/QCameraHWI_Preview.cpp
@@ -0,0 +1,1436 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_TAG "QCameraHWI_Preview"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include <genlock.h>
+#include <gralloc_priv.h>
+
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraHWI_Preview class implementation goes here*/
+/* following code implement the preview mode's image capture & display logic of this class*/
+
+namespace android {
+
+// ---------------------------------------------------------------------------
+// Preview Callback
+// ---------------------------------------------------------------------------
+static void preview_notify_cb(mm_camera_ch_data_buf_t *frame,
+                                void *user_data)
+{
+  QCameraStream_preview *pme = (QCameraStream_preview *)user_data;
+  mm_camera_ch_data_buf_t *bufs_used = 0;
+  ALOGV("%s: E", __func__);
+  /* for peview data, there is no queue, so directly use*/
+  if(pme==NULL) {
+    ALOGE("%s: X : Incorrect cookie",__func__);
+    /*Call buf done*/
+    return;
+  }
+
+  pme->processPreviewFrame(frame);
+  ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_preview::setPreviewWindow(preview_stream_ops_t* window)
+{
+    status_t retVal = NO_ERROR;
+    ALOGE(" %s: E ", __FUNCTION__);
+    if( window == NULL) {
+        ALOGW(" Setting NULL preview window ");
+        /* TODO: Current preview window will be invalidated.
+         * Release all the buffers back */
+       // relinquishBuffers();
+    }
+    Mutex::Autolock lock(mStopCallbackLock);
+    mPreviewWindow = window;
+    ALOGV(" %s : X ", __FUNCTION__ );
+    return retVal;
+}
+
+status_t QCameraStream_preview::getBufferFromSurface() {
+    int err = 0;
+    int numMinUndequeuedBufs = 0;
+  int format = 0;
+  status_t ret = NO_ERROR;
+  int gralloc_usage;
+
+    ALOGI(" %s : E ", __FUNCTION__);
+
+    if( mPreviewWindow == NULL) {
+    ALOGE("%s: mPreviewWindow = NULL", __func__);
+        return INVALID_OPERATION;
+  }
+    cam_ctrl_dimension_t dim;
+
+  //mDisplayLock.lock();
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+	format = mHalCamCtrl->getPreviewFormatInfo().Hal_format;
+	if(ret != NO_ERROR) {
+        ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+    goto end;
+  }
+  numMinUndequeuedBufs = 0;
+  if(mPreviewWindow->get_min_undequeued_buffer_count) {
+    err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &numMinUndequeuedBufs);
+    if (err != 0) {
+       ALOGE("get_min_undequeued_buffer_count  failed: %s (%d)",
+            strerror(-err), -err);
+       ret = UNKNOWN_ERROR;
+       goto end;
+    }
+  }
+    mHalCamCtrl->mPreviewMemoryLock.lock();
+    mHalCamCtrl->mPreviewMemory.buffer_count = kPreviewBufferCount + numMinUndequeuedBufs;
+    if(mHalCamCtrl->isZSLMode()) {
+      if(mHalCamCtrl->getZSLQueueDepth() > numMinUndequeuedBufs)
+        mHalCamCtrl->mPreviewMemory.buffer_count +=
+            mHalCamCtrl->getZSLQueueDepth() - numMinUndequeuedBufs;
+    }
+    err = mPreviewWindow->set_buffer_count(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_count );
+    if (err != 0) {
+         ALOGE("set_buffer_count failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+     goto end;
+    }
+    err = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+                dim.display_width, dim.display_height, format);
+    if (err != 0) {
+         ALOGE("set_buffers_geometry failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+     goto end;
+    }
+
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mVFEOutputs);
+    if(ret != MM_CAMERA_OK) {
+        ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE  failed");
+        ret = BAD_VALUE;
+        goto end;
+    }
+
+    //as software encoder is used to encode 720p, to enhance the performance
+    //cashed pmem is used here
+    if(mVFEOutputs == 1 && dim.display_height == 720)
+        gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID;
+    else
+        gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID |
+                    CAMERA_GRALLOC_CACHING_ID;
+    err = mPreviewWindow->set_usage(mPreviewWindow, gralloc_usage);
+    if(err != 0) {
+    /* set_usage error out */
+        ALOGE("%s: set_usage rc = %d", __func__, err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HFR_FRAME_SKIP, &mHFRFrameSkip);
+    if(ret != MM_CAMERA_OK) {
+        ALOGE("get parm MM_CAMERA_PARM_HFR_FRAME_SKIP  failed");
+        ret = BAD_VALUE;
+        goto end;
+    }
+	for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+		int stride;
+		err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+										&mHalCamCtrl->mPreviewMemory.buffer_handle[cnt],
+										&mHalCamCtrl->mPreviewMemory.stride[cnt]);
+		if(!err) {
+          ALOGE("%s: dequeue buf hdl =%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+                    err = mPreviewWindow->lock_buffer(this->mPreviewWindow,
+                                       mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+                    // lock the buffer using genlock
+                    ALOGE("%s: camera call genlock_lock, hdl=%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+                    if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]),
+                                                      GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+                       ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+                       mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+	                //mHalCamCtrl->mPreviewMemoryLock.unlock();
+                       //return -EINVAL;
+                   } else {
+                     ALOGE("%s: genlock_lock_buffer hdl =%p", __FUNCTION__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+                     mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_LOCKED;
+                   }
+		} else {
+          mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+          ALOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+        }
+
+		ALOGE("%s: dequeue buf: %p\n", __func__, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+		if(err != 0) {
+            ALOGE("%s: dequeue_buffer failed: %s (%d)", __func__,
+                    strerror(-err), -err);
+            ret = UNKNOWN_ERROR;
+			for(int i = 0; i < cnt; i++) {
+                if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[i]) {
+                      ALOGE("%s: camera call genlock_unlock", __FUNCTION__);
+                     if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+                                                  (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])))) {
+                        ALOGE("%s: genlock_unlock_buffer failed: hdl =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])) );
+                         //mHalCamCtrl->mPreviewMemoryLock.unlock();
+                        //return -EINVAL;
+                     } else {
+                       mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+                     }
+                }
+                if( mHalCamCtrl->mPreviewMemory.local_flag[i] != BUFFER_NOT_OWNED) {
+                  err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                          mHalCamCtrl->mPreviewMemory.buffer_handle[i]);
+                }
+                mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_NOT_OWNED;
+                ALOGE("%s: cancel_buffer: hdl =%p", __func__,  (*mHalCamCtrl->mPreviewMemory.buffer_handle[i]));
+				mHalCamCtrl->mPreviewMemory.buffer_handle[i] = NULL;
+			}
+            memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+			goto end;
+		}
+
+		mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt] =
+		    (struct private_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+#ifdef USE_ION
+        mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+        if (mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] < 0) {
+            ALOGE("%s: failed: could not open ion device\n", __func__);
+        } else {
+            mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].fd =
+                mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd;
+            if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt],
+              ION_IOC_IMPORT, &mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt]) < 0)
+              ALOGE("ION import failed\n");
+        }
+#endif
+		mHalCamCtrl->mPreviewMemory.camera_memory[cnt] =
+		    mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+			mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size, 1, (void *)this);
+		ALOGE("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+            cnt, mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+      mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size,
+      mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->offset);
+  }
+
+
+  memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+  memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+    ALOGI(" %s : X ",__FUNCTION__);
+end:
+  //mDisplayLock.unlock();
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+    return ret;
+}
+
+status_t QCameraStream_preview::putBufferToSurface() {
+    int err = 0;
+    status_t ret = NO_ERROR;
+
+    ALOGI(" %s : E ", __FUNCTION__);
+
+    mHalCamCtrl->mPreviewMemoryLock.lock();
+	for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+        if (cnt < mHalCamCtrl->mPreviewMemory.buffer_count) {
+            if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW, cnt, mCameraId,
+                                                          CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+                ALOGE("%s: sending data Msg Failed", __func__);
+            }
+        }
+
+        mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->release(mHalCamCtrl->mPreviewMemory.camera_memory[cnt]);
+#ifdef USE_ION
+        struct ion_handle_data ion_handle;
+        ion_handle.handle = mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].handle;
+        if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt], ION_IOC_FREE, &ion_handle)
+            < 0)
+            ALOGE("%s: ion free failed\n", __func__);
+        close(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt]);
+#endif
+            if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[cnt]) {
+                ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+	        if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+                                                    (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])))) {
+                    ALOGE("%s: genlock_unlock_buffer failed, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+                    continue;
+	                //mHalCamCtrl->mPreviewMemoryLock.unlock();
+                    //return -EINVAL;
+                } else {
+
+                    ALOGD("%s: genlock_unlock_buffer, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+                    mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+                }
+            }
+             if( mHalCamCtrl->mPreviewMemory.local_flag[cnt] != BUFFER_NOT_OWNED) {
+               err = mPreviewWindow->cancel_buffer(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+               ALOGD("%s: cancel_buffer: hdl =%p", __func__,  (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+             }
+             mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+
+		ALOGD(" put buffer %d successfully", cnt);
+	}
+
+    if (mDisplayBuf.preview.buf.mp != NULL) {
+        delete[] mDisplayBuf.preview.buf.mp;
+        mDisplayBuf.preview.buf.mp = NULL;
+    }
+
+    mHalCamCtrl->mPreviewMemoryLock.unlock();
+	memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+    ALOGI(" %s : X ",__FUNCTION__);
+    return NO_ERROR;
+}
+
+
+status_t  QCameraStream_preview::getBufferNoDisplay( )
+{
+  int err = 0;
+  status_t ret = NO_ERROR;
+  int i, num_planes, frame_len, y_off, cbcr_off;
+  cam_ctrl_dimension_t dim;
+  uint32_t planes[VIDEO_MAX_PLANES];
+
+  ALOGI("%s : E ", __FUNCTION__);
+
+
+  ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+  if(ret != NO_ERROR) {
+      ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+    goto end;
+  }
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  mHalCamCtrl->mNoDispPreviewMemory.buffer_count = kPreviewBufferCount;
+  if(mHalCamCtrl->isZSLMode()) {
+    if(mHalCamCtrl->getZSLQueueDepth() > kPreviewBufferCount - 3)
+      mHalCamCtrl->mNoDispPreviewMemory.buffer_count =
+      mHalCamCtrl->getZSLQueueDepth() + 3;
+  }
+
+  num_planes = dim.display_frame_offset.num_planes;
+  for ( i = 0; i < num_planes; i++) {
+    planes[i] = dim.display_frame_offset.mp[i].len;
+  }
+
+  frame_len = dim.picture_frame_offset.frame_len;
+  y_off = dim.picture_frame_offset.mp[0].offset;
+  cbcr_off = dim.picture_frame_offset.mp[1].offset;
+  ALOGE("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+       __func__, dim.rotation, y_off, cbcr_off, frame_len,
+       dim.display_width, dim.display_height);
+  if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mNoDispPreviewMemory,
+     mHalCamCtrl->mNoDispPreviewMemory.buffer_count,
+     frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG,
+     NULL,NULL, num_planes, planes) < 0) {
+              ret = NO_MEMORY;
+              goto end;
+  };
+
+  memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+  memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+  ALOGI(" %s : X ",__FUNCTION__);
+end:
+  //mDisplayLock.unlock();
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+  return NO_ERROR;
+}
+
+status_t   QCameraStream_preview::freeBufferNoDisplay()
+{
+  int err = 0;
+  status_t ret = NO_ERROR;
+
+  ALOGI(" %s : E ", __FUNCTION__);
+
+  //mDisplayLock.lock();
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  for (int cnt = 0; cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count; cnt++) {
+      if (cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count) {
+          if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+                       cnt, mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+              ALOGE("%s: sending data Msg Failed", __func__);
+          }
+      }
+  }
+  mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mNoDispPreviewMemory);
+  memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+  if (mDisplayBuf.preview.buf.mp != NULL) {
+      delete[] mDisplayBuf.preview.buf.mp;
+      mDisplayBuf.preview.buf.mp = NULL;
+  }
+
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  ALOGI(" %s : X ",__FUNCTION__);
+  return NO_ERROR;
+}
+
+void QCameraStream_preview::notifyROIEvent(fd_roi_t roi)
+{
+    switch (roi.type) {
+    case FD_ROI_TYPE_HEADER:
+        {
+            mDisplayLock.lock();
+            mNumFDRcvd = 0;
+            memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+            mHalCamCtrl->mMetadata.faces = mHalCamCtrl->mFace;
+            mHalCamCtrl->mMetadata.number_of_faces = roi.d.hdr.num_face_detected;
+            if(mHalCamCtrl->mMetadata.number_of_faces > MAX_ROI)
+              mHalCamCtrl->mMetadata.number_of_faces = MAX_ROI;
+            mDisplayLock.unlock();
+
+            if (mHalCamCtrl->mMetadata.number_of_faces == 0) {
+                // Clear previous faces
+                mHalCamCtrl->mCallbackLock.lock();
+                camera_data_callback pcb = mHalCamCtrl->mDataCb;
+                mHalCamCtrl->mCallbackLock.unlock();
+
+                if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+                    ALOGE("%s: Face detection RIO callback", __func__);
+                    pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+                }
+            }
+        }
+        break;
+    case FD_ROI_TYPE_DATA:
+        {
+        #if 0
+            mDisplayLock.lock();
+            int idx = roi.d.data.idx;
+            if (idx >= mHalCamCtrl->mMetadata.number_of_faces) {
+                mDisplayLock.unlock();
+                ALOGE("%s: idx %d out of boundary %d", __func__, idx, mHalCamCtrl->mMetadata.number_of_faces);
+                break;
+            }
+
+            mHalCamCtrl->mFace[idx].id = roi.d.data.face.id;
+            mHalCamCtrl->mFace[idx].score = roi.d.data.face.score;
+
+            // top
+            mHalCamCtrl->mFace[idx].rect[0] =
+               roi.d.data.face.face_boundary.x*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            //right
+            mHalCamCtrl->mFace[idx].rect[1] =
+               roi.d.data.face.face_boundary.y*2000/mHalCamCtrl->mDimension.display_height - 1000;
+            //bottom
+            mHalCamCtrl->mFace[idx].rect[2] =  mHalCamCtrl->mFace[idx].rect[0] +
+               roi.d.data.face.face_boundary.dx*2000/mHalCamCtrl->mDimension.display_width;
+            //left
+            mHalCamCtrl->mFace[idx].rect[3] = mHalCamCtrl->mFace[idx].rect[1] +
+               roi.d.data.face.face_boundary.dy*2000/mHalCamCtrl->mDimension.display_height;
+
+            // Center of left eye
+            mHalCamCtrl->mFace[idx].left_eye[0] =
+              roi.d.data.face.left_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].left_eye[1] =
+              roi.d.data.face.left_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            // Center of right eye
+            mHalCamCtrl->mFace[idx].right_eye[0] =
+              roi.d.data.face.right_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].right_eye[1] =
+              roi.d.data.face.right_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            // Center of mouth
+            mHalCamCtrl->mFace[idx].mouth[0] =
+              roi.d.data.face.mouth_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].mouth[1] =
+              roi.d.data.face.mouth_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            mHalCamCtrl->mFace[idx].smile_degree = roi.d.data.face.smile_degree;
+            mHalCamCtrl->mFace[idx].smile_score = roi.d.data.face.smile_confidence;
+            mHalCamCtrl->mFace[idx].blink_detected = roi.d.data.face.blink_detected;
+            mHalCamCtrl->mFace[idx].face_recognised = roi.d.data.face.is_face_recognised;
+            mHalCamCtrl->mFace[idx].gaze_angle = roi.d.data.face.gaze_angle;
+            /* newly added */
+            mHalCamCtrl->mFace[idx].updown_dir = roi.d.data.face.updown_dir;
+            mHalCamCtrl->mFace[idx].leftright_dir = roi.d.data.face.leftright_dir;
+            mHalCamCtrl->mFace[idx].roll_dir = roi.d.data.face.roll_dir;
+            mHalCamCtrl->mFace[idx].leye_blink = roi.d.data.face.left_blink;
+            mHalCamCtrl->mFace[idx].reye_blink = roi.d.data.face.right_blink;
+            mHalCamCtrl->mFace[idx].left_right_gaze = roi.d.data.face.left_right_gaze;
+            mHalCamCtrl->mFace[idx].top_bottom_gaze = roi.d.data.face.top_bottom_gaze;
+            ALOGE("%s: Face(%d, %d, %d, %d), leftEye(%d, %d), rightEye(%d, %d), mouth(%d, %d), smile(%d, %d), face_recg(%d)", __func__,
+               mHalCamCtrl->mFace[idx].rect[0],  mHalCamCtrl->mFace[idx].rect[1],
+               mHalCamCtrl->mFace[idx].rect[2],  mHalCamCtrl->mFace[idx].rect[3],
+               mHalCamCtrl->mFace[idx].left_eye[0], mHalCamCtrl->mFace[idx].left_eye[1],
+               mHalCamCtrl->mFace[idx].right_eye[0], mHalCamCtrl->mFace[idx].right_eye[1],
+               mHalCamCtrl->mFace[idx].mouth[0], mHalCamCtrl->mFace[idx].mouth[1],
+               mHalCamCtrl->mFace[idx].smile_degree, mHalCamCtrl->mFace[idx].smile_score,
+               mHalCamCtrl->mFace[idx].face_recognised);
+            ALOGE("%s: gaze(%d, %d, %d), updown(%d), leftright(%d), roll(%d), blink(%d, %d, %d)", __func__,
+               mHalCamCtrl->mFace[idx].gaze_angle,  mHalCamCtrl->mFace[idx].left_right_gaze,
+               mHalCamCtrl->mFace[idx].top_bottom_gaze,  mHalCamCtrl->mFace[idx].updown_dir,
+               mHalCamCtrl->mFace[idx].leftright_dir, mHalCamCtrl->mFace[idx].roll_dir,
+               mHalCamCtrl->mFace[idx].blink_detected,
+               mHalCamCtrl->mFace[idx].leye_blink, mHalCamCtrl->mFace[idx].reye_blink);
+
+             mNumFDRcvd++;
+             mDisplayLock.unlock();
+
+             if (mNumFDRcvd == mHalCamCtrl->mMetadata.number_of_faces) {
+                 mHalCamCtrl->mCallbackLock.lock();
+                 camera_data_callback pcb = mHalCamCtrl->mDataCb;
+                 mHalCamCtrl->mCallbackLock.unlock();
+
+                 if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+                     ALOGE("%s: Face detection RIO callback with %d faces detected (score=%d)", __func__, mNumFDRcvd, mHalCamCtrl->mFace[idx].score);
+                     pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+                 }
+             }
+        #endif
+        }
+        break;
+    }
+}
+
+status_t QCameraStream_preview::initDisplayBuffers()
+{
+  status_t ret = NO_ERROR;
+  int width = 0;  /* width of channel  */
+  int height = 0; /* height of channel */
+  uint32_t frame_len = 0; /* frame planner length */
+  int buffer_num = 4; /* number of buffers for display */
+  const char *pmem_region;
+  uint8_t num_planes = 0;
+  uint32_t planes[VIDEO_MAX_PLANES];
+  void *vaddr = NULL;
+  cam_ctrl_dimension_t dim;
+
+  ALOGE("%s:BEGIN",__func__);
+  memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+  memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+
+  memset(&(this->mDisplayStreamBuf),0, sizeof(this->mDisplayStreamBuf));
+
+  ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+  if (MM_CAMERA_OK != ret) {
+    ALOGE("%s: error - can't get camera dimension!", __func__);
+    ALOGE("%s: X", __func__);
+    return BAD_VALUE;
+  }else {
+    width =  dim.display_width,
+    height = dim.display_height;
+  }
+
+  ret = getBufferFromSurface();
+  if(ret != NO_ERROR) {
+    ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+    return ret;
+  }
+
+  /* set 4 buffers for display */
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+  this->mDisplayStreamBuf.num = mHalCamCtrl->mPreviewMemory.buffer_count;
+  this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+  num_planes = 2;
+  planes[0] = dim.display_frame_offset.mp[0].len;
+  planes[1] = dim.display_frame_offset.mp[1].len;
+  this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+  memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+  mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+  if (!mDisplayBuf.preview.buf.mp) {
+    ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+    ret = NO_MEMORY;
+    goto error;
+  }
+  memset(mDisplayBuf.preview.buf.mp, 0,
+    mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+  /*allocate memory for the buffers*/
+  for(int i = 0; i < mDisplayStreamBuf.num; i++){
+	  if (mHalCamCtrl->mPreviewMemory.private_buffer_handle[i] == NULL)
+		  continue;
+      mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->fd;
+      mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+      mDisplayStreamBuf.frame[i].y_off = 0;
+      mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+	  mHalCamCtrl->mPreviewMemory.addr_offset[i] =
+	      mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->offset;
+      mDisplayStreamBuf.frame[i].buffer =
+          (long unsigned int)mHalCamCtrl->mPreviewMemory.camera_memory[i]->data;
+      mDisplayStreamBuf.frame[i].ion_alloc.len = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size;
+      mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mPreviewMemory.main_ion_fd[i];
+      mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mPreviewMemory.ion_info_fd[i];
+
+    ALOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+      "offset = %d, vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+      mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+      mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+      mHalCamCtrl->mPreviewMemory.addr_offset[i],
+      (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+    ret = mHalCamCtrl->sendMappingBuf(
+                        MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+                        i,
+                        mDisplayStreamBuf.frame[i].fd,
+                        mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+                        mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING);
+    if (NO_ERROR != ret) {
+      ALOGE("%s: sending mapping data Msg Failed", __func__);
+      goto error;
+    }
+
+    mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+    mDisplayBuf.preview.buf.mp[i].frame_offset = mHalCamCtrl->mPreviewMemory.addr_offset[i];
+    mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+    /* Plane 0 needs to be set seperately. Set other planes
+     * in a loop. */
+    mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+    mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+    mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+    mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+      mDisplayBuf.preview.buf.mp[i].frame_offset;
+    for (int j = 1; j < num_planes; j++) {
+      mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+      mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+        mDisplayStreamBuf.frame[i].fd;
+      mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+      mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+        mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+        mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+    }
+
+    for (int j = 0; j < num_planes; j++)
+      ALOGE("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+        mDisplayBuf.preview.buf.mp[i].planes[j].length,
+        mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+        mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+  }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+  mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+  mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  ALOGE("%s:END",__func__);
+  return NO_ERROR;
+
+error:
+    mHalCamCtrl->mPreviewMemoryLock.unlock();
+    putBufferToSurface();
+
+    ALOGV("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_preview::initPreviewOnlyBuffers()
+{
+  status_t ret = NO_ERROR;
+  int width = 0;  /* width of channel  */
+  int height = 0; /* height of channel */
+  uint32_t frame_len = 0; /* frame planner length */
+  int buffer_num = 4; /* number of buffers for display */
+  const char *pmem_region;
+  uint8_t num_planes = 0;
+  uint32_t planes[VIDEO_MAX_PLANES];
+
+  cam_ctrl_dimension_t dim;
+
+  ALOGE("%s:BEGIN",__func__);
+  memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+  memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+  ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+  if (MM_CAMERA_OK != ret) {
+    ALOGE("%s: error - can't get camera dimension!", __func__);
+    ALOGE("%s: X", __func__);
+    return BAD_VALUE;
+  }else {
+    width =  dim.display_width;
+    height = dim.display_height;
+  }
+
+  ret = getBufferNoDisplay( );
+  if(ret != NO_ERROR) {
+    ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+    return ret;
+  }
+
+  /* set 4 buffers for display */
+  memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  this->mDisplayStreamBuf.num = mHalCamCtrl->mNoDispPreviewMemory.buffer_count;
+  this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+  num_planes = dim.display_frame_offset.num_planes;
+  for (int i = 0; i < num_planes; i++) {
+    planes[i] = dim.display_frame_offset.mp[i].len;
+  }
+  this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+  memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+  mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+  if (!mDisplayBuf.preview.buf.mp) {
+    ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+  }
+  memset(mDisplayBuf.preview.buf.mp, 0,
+    mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+  /*allocate memory for the buffers*/
+  void *vaddr = NULL;
+  for(int i = 0; i < mDisplayStreamBuf.num; i++){
+	  if (mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i] == NULL)
+		  continue;
+      mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mNoDispPreviewMemory.fd[i];
+      mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+      mDisplayStreamBuf.frame[i].y_off = 0;
+      mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+      mDisplayStreamBuf.frame[i].buffer =
+          (long unsigned int)mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i]->data;
+      mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mNoDispPreviewMemory.main_ion_fd[i];
+      mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mNoDispPreviewMemory.ion_info_fd[i];
+
+    ALOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+      "vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+      frame_len,
+      mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+      (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+    if (NO_ERROR != mHalCamCtrl->sendMappingBuf(
+                        MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+                        i,
+                        mDisplayStreamBuf.frame[i].fd,
+                        mHalCamCtrl->mNoDispPreviewMemory.size,
+                        mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+      ALOGE("%s: sending mapping data Msg Failed", __func__);
+    }
+
+    mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+    mDisplayBuf.preview.buf.mp[i].frame_offset = mDisplayStreamBuf.frame[i].y_off;
+    mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+    /* Plane 0 needs to be set seperately. Set other planes
+     * in a loop. */
+    mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+    mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+    mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+    mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+      mDisplayBuf.preview.buf.mp[i].frame_offset;
+    for (int j = 1; j < num_planes; j++) {
+      mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+      mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+        mDisplayStreamBuf.frame[i].fd;
+      mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+      mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+        mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+        mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+    }
+
+    for (int j = 0; j < num_planes; j++)
+      ALOGE("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+        mDisplayBuf.preview.buf.mp[i].planes[j].length,
+        mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+        mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+  }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+  mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+  mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  ALOGE("%s:END",__func__);
+  return NO_ERROR;
+
+end:
+  if (MM_CAMERA_OK == ret ) {
+    ALOGV("%s: X - NO_ERROR ", __func__);
+    return NO_ERROR;
+  }
+
+    ALOGV("%s: out of memory clean up", __func__);
+  /* release the allocated memory */
+
+  ALOGV("%s: X - BAD_VALUE ", __func__);
+  return BAD_VALUE;
+}
+
+
+void QCameraStream_preview::dumpFrameToFile(struct msm_frame* newFrame)
+{
+  int32_t enabled = 0;
+  int frm_num;
+  uint32_t  skip_mode;
+  char value[PROPERTY_VALUE_MAX];
+  char buf[32];
+  int w, h;
+  static int count = 0;
+  cam_ctrl_dimension_t dim;
+  int file_fd;
+  int rc = 0;
+  int len;
+  unsigned long addr;
+  unsigned long * tmp = (unsigned long *)newFrame->buffer;
+  addr = *tmp;
+  status_t ret = cam_config_get_parm(mHalCamCtrl->mCameraId,
+                 MM_CAMERA_PARM_DIMENSION, &dim);
+
+  w = dim.display_width;
+  h = dim.display_height;
+  len = (w * h)*3/2;
+  count++;
+  if(count < 100) {
+    snprintf(buf, sizeof(buf), "/data/mzhu%d.yuv", count);
+    file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+
+    rc = write(file_fd, (const void *)addr, len);
+    ALOGE("%s: file='%s', vaddr_old=0x%x, addr_map = 0x%p, len = %d, rc = %d",
+          __func__, buf, (uint32_t)newFrame->buffer, (void *)addr, len, rc);
+    close(file_fd);
+    ALOGE("%s: dump %s, rc = %d, len = %d", __func__, buf, rc, len);
+  }
+}
+
+status_t QCameraStream_preview::processPreviewFrameWithDisplay(
+  mm_camera_ch_data_buf_t *frame)
+{
+  ALOGV("%s",__func__);
+  int err = 0;
+  int msgType = 0;
+  int i;
+  camera_memory_t *data = NULL;
+  camera_frame_metadata_t *metadata = NULL;
+
+  Mutex::Autolock lock(mStopCallbackLock);
+  if(!mActive) {
+    ALOGE("Preview Stopped. Returning callback");
+    return NO_ERROR;
+  }
+
+  if(mHalCamCtrl==NULL) {
+    ALOGE("%s: X: HAL control object not set",__func__);
+    /*Call buf done*/
+    return BAD_VALUE;
+  }
+  mHalCamCtrl->mCallbackLock.lock();
+  camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+  void *rdata = mHalCamCtrl->mCallbackCookie;
+  mHalCamCtrl->mCallbackLock.unlock();
+  nsecs_t timeStamp = seconds_to_nanoseconds(frame->def.frame->ts.tv_sec) ;
+  timeStamp += frame->def.frame->ts.tv_nsec;
+
+  if(mFirstFrameRcvd == false) {
+  mm_camera_util_profile("HAL: First preview frame received");
+  mFirstFrameRcvd = true;
+  }
+
+  if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+      mHalCamCtrl->debugShowPreviewFPS();
+  }
+  //dumpFrameToFile(frame->def.frame);
+  mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  mNotifyBuffer[frame->def.idx] = *frame;
+
+  ALOGI("Enqueue buf handle %p\n",
+  mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+  ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+    if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx]) {
+      ALOGD("%s: genlock_unlock_buffer hdl =%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]));
+        if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)
+	            (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]))) {
+            ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+	        //mHalCamCtrl->mPreviewMemoryLock.unlock();
+            //return -EINVAL;
+        } else {
+            mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_UNLOCKED;
+        }
+    } else {
+        ALOGE("%s: buffer to be enqueued is not locked", __FUNCTION__);
+	    //mHalCamCtrl->mPreviewMemoryLock.unlock();
+        //return -EINVAL;
+    }
+
+#ifdef USE_ION
+  struct ion_flush_data cache_inv_data;
+  int ion_fd;
+  ion_fd = frame->def.frame->ion_dev_fd;
+  cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+  cache_inv_data.fd = frame->def.frame->fd;
+  cache_inv_data.handle = frame->def.frame->fd_data.handle;
+  cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+  if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+    ALOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+      cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+  if(mHFRFrameSkip == 1)
+  {
+      const char *str = mHalCamCtrl->mParameters.get(
+                          QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+      if(str != NULL){
+      int is_hfr_off = 0;
+      mHFRFrameCnt++;
+      if(!strcmp(str, QCameraParameters::VIDEO_HFR_OFF)) {
+          is_hfr_off = 1;
+          err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+            (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+      } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_2X)) {
+          mHFRFrameCnt %= 2;
+      } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_3X)) {
+          mHFRFrameCnt %= 3;
+      } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_4X)) {
+          mHFRFrameCnt %= 4;
+      }
+      if(mHFRFrameCnt == 0)
+          err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+            (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+      else if(!is_hfr_off)
+          err = this->mPreviewWindow->cancel_buffer(this->mPreviewWindow,
+            (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+      } else
+          err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+            (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+  } else {
+      err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+          (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+  }
+  if(err != 0) {
+    ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+  } else {
+   ALOGD("%s: enqueue_buffer hdl=%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+    mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_NOT_OWNED;
+  }
+  buffer_handle_t *buffer_handle = NULL;
+  int tmp_stride = 0;
+  err = this->mPreviewWindow->dequeue_buffer(this->mPreviewWindow,
+              &buffer_handle, &tmp_stride);
+  if (err == NO_ERROR && buffer_handle != NULL) {
+
+    ALOGD("%s: dequed buf hdl =%p", __func__, *buffer_handle);
+    for(i = 0; i < mHalCamCtrl->mPreviewMemory.buffer_count; i++) {
+        if(mHalCamCtrl->mPreviewMemory.buffer_handle[i] == buffer_handle) {
+          mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+          break;
+        }
+    }
+     if (i < mHalCamCtrl->mPreviewMemory.buffer_count ) {
+      err = this->mPreviewWindow->lock_buffer(this->mPreviewWindow, buffer_handle);
+      ALOGD("%s: camera call genlock_lock: hdl =%p", __FUNCTION__, *buffer_handle);
+      if (GENLOCK_FAILURE == genlock_lock_buffer((native_handle_t*)(*buffer_handle), GENLOCK_WRITE_LOCK,
+                                                 GENLOCK_MAX_TIMEOUT)) {
+            ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+	    //mHalCamCtrl->mPreviewMemoryLock.unlock();
+           // return -EINVAL;
+      } else  {
+        mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_LOCKED;
+
+        if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[i])) {
+            ALOGE("BUF DONE FAILED");
+        }
+      }
+     }
+  } else
+      ALOGE("%s: error in dequeue_buffer, enqueue_buffer idx = %d, no free buffer now", __func__, frame->def.idx);
+  /* Save the last displayed frame. We'll be using it to fill the gap between
+     when preview stops and postview start during snapshot.*/
+  mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+  mHalCamCtrl->mCallbackLock.lock();
+  camera_data_callback pcb = mHalCamCtrl->mDataCb;
+  mHalCamCtrl->mCallbackLock.unlock();
+  ALOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+  camera_memory_t *previewMem = NULL;
+
+  if (pcb != NULL) {
+       ALOGD("%s: mMsgEnabled =0x%x, preview format =%d", __func__,
+            mHalCamCtrl->mMsgEnabled, mHalCamCtrl->mPreviewFormat);
+      //Sending preview callback if corresponding Msgs are enabled
+      if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+          ALOGE("Q%s: PCB callback enabled", __func__);
+          msgType |=  CAMERA_MSG_PREVIEW_FRAME;
+          int previewBufSize;
+          /* The preview buffer size sent back in the callback should be (width*height*bytes_per_pixel)
+           * As all preview formats we support, use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+           * We need to put a check if some other formats are supported in future. (punits) */
+          if((mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) || (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV12) ||
+                    (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_YV12))
+          {
+              previewBufSize = mHalCamCtrl->mPreviewWidth * mHalCamCtrl->mPreviewHeight * 3/2;
+              if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+                  previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+                  previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+                  if (!previewMem || !previewMem->data) {
+                      ALOGE("%s: mGetMemory failed.\n", __func__);
+                  } else {
+                      data = previewMem;
+                  }
+              } else
+                    data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+          } else {
+                data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+                ALOGE("Invalid preview format, buffer size in preview callback may be wrong.");
+          }
+      } else {
+          data = NULL;
+      }
+      if(msgType) {
+          mStopCallbackLock.unlock();
+          if(mActive)
+            pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+          if (previewMem)
+              previewMem->release(previewMem);
+      }
+	  ALOGD("end of cb");
+  } else {
+    ALOGD("%s PCB is not enabled", __func__);
+  }
+  if(rcb != NULL && mVFEOutputs == 1)
+  {
+      int flagwait = 1;
+      if(mHalCamCtrl->mStartRecording == true &&
+              ( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+      {
+        if (mHalCamCtrl->mStoreMetaDataInFrame)
+        {
+          if(mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx])
+          {
+              flagwait = 1;
+              mStopCallbackLock.unlock();
+              rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+                      mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx],
+                      0, mHalCamCtrl->mCallbackCookie);
+          }else
+              flagwait = 0;
+      }
+      else
+      {
+              mStopCallbackLock.unlock();
+              rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+                      mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx],
+                      0, mHalCamCtrl->mCallbackCookie);
+      }
+
+      if(flagwait){
+          Mutex::Autolock rLock(&mHalCamCtrl->mRecordFrameLock);
+          if (mHalCamCtrl->mReleasedRecordingFrame != true) {
+              mHalCamCtrl->mRecordWait.wait(mHalCamCtrl->mRecordFrameLock);
+          }
+          mHalCamCtrl->mReleasedRecordingFrame = false;
+      }
+      }
+  }
+  /* Save the last displayed frame. We'll be using it to fill the gap between
+     when preview stops and postview start during snapshot.*/
+  //mLastQueuedFrame = frame->def.frame;
+/*
+  if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame))
+  {
+      ALOGE("BUF DONE FAILED");
+      return BAD_VALUE;
+  }
+*/
+  return NO_ERROR;
+}
+
+
+status_t QCameraStream_preview::processPreviewFrameWithOutDisplay(
+  mm_camera_ch_data_buf_t *frame)
+{
+  ALOGV("%s",__func__);
+  int err = 0;
+  int msgType = 0;
+  int i;
+  camera_memory_t *data = NULL;
+  camera_frame_metadata_t *metadata = NULL;
+
+  Mutex::Autolock lock(mStopCallbackLock);
+  if(!mActive) {
+    ALOGE("Preview Stopped. Returning callback");
+    return NO_ERROR;
+  }
+  if(mHalCamCtrl==NULL) {
+    ALOGE("%s: X: HAL control object not set",__func__);
+    /*Call buf done*/
+    return BAD_VALUE;
+  }
+
+  if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+      mHalCamCtrl->debugShowPreviewFPS();
+  }
+  //dumpFrameToFile(frame->def.frame);
+  mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  mNotifyBuffer[frame->def.idx] = *frame;
+
+  /* Save the last displayed frame. We'll be using it to fill the gap between
+     when preview stops and postview start during snapshot.*/
+  mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+  mHalCamCtrl->mCallbackLock.lock();
+  camera_data_callback pcb = mHalCamCtrl->mDataCb;
+  mHalCamCtrl->mCallbackLock.unlock();
+  ALOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+  camera_memory_t *previewMem = NULL;
+  int previewWidth, previewHeight;
+  mHalCamCtrl->mParameters.getPreviewSize(&previewWidth, &previewHeight);
+
+#ifdef USE_ION
+  struct ion_flush_data cache_inv_data;
+  int ion_fd;
+  ion_fd = frame->def.frame->ion_dev_fd;
+  cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+  cache_inv_data.fd = frame->def.frame->fd;
+  cache_inv_data.handle = frame->def.frame->fd_data.handle;
+  cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+  if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+    ALOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+      cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+  if (pcb != NULL) {
+      //Sending preview callback if corresponding Msgs are enabled
+      if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+          msgType |=  CAMERA_MSG_PREVIEW_FRAME;
+          int previewBufSize;
+          /* For CTS : Forcing preview memory buffer lenth to be
+             'previewWidth * previewHeight * 3/2'.
+              Needed when gralloc allocated extra memory.*/
+          //Can add this check for other formats as well.
+          if( mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) {
+              previewBufSize = previewWidth * previewHeight * 3/2;
+              if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+                  previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+                  previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+                  if (!previewMem || !previewMem->data) {
+                      ALOGE("%s: mGetMemory failed.\n", __func__);
+                  } else {
+                      data = previewMem;
+                  }
+              } else
+                    data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+          } else
+                data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+      } else {
+          data = NULL;
+      }
+
+      if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA){
+          msgType  |= CAMERA_MSG_PREVIEW_METADATA;
+          metadata = &mHalCamCtrl->mMetadata;
+      } else {
+          metadata = NULL;
+      }
+      if(msgType) {
+          mStopCallbackLock.unlock();
+          if(mActive)
+            pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+          if (previewMem)
+              previewMem->release(previewMem);
+      }
+
+      if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[frame->def.idx])) {
+          ALOGE("BUF DONE FAILED");
+      }
+
+      ALOGD("end of cb");
+  }
+
+  return NO_ERROR;
+}
+
+status_t QCameraStream_preview::processPreviewFrame (
+  mm_camera_ch_data_buf_t *frame)
+{
+  if (mHalCamCtrl->isNoDisplayMode()) {
+    return processPreviewFrameWithOutDisplay(frame);
+  } else {
+    return processPreviewFrameWithDisplay(frame);
+  }
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::
+QCameraStream_preview(int cameraId, camera_mode_t mode)
+  : QCameraStream(cameraId,mode),
+    mLastQueuedFrame(NULL),
+    mNumFDRcvd(0),
+    mFirstFrameRcvd(false)
+  {
+    mHalCamCtrl = NULL;
+    ALOGE("%s: E", __func__);
+    ALOGE("%s: X", __func__);
+  }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::~QCameraStream_preview() {
+    ALOGV("%s: E", __func__);
+	if(mActive) {
+		stop();
+	}
+	if(mInit) {
+		release();
+	}
+	mInit = false;
+	mActive = false;
+    ALOGV("%s: X", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::init() {
+
+  status_t ret = NO_ERROR;
+  ALOGV("%s: E", __func__);
+
+  ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_PREVIEW_MASK);
+  if (NO_ERROR!=ret) {
+    ALOGE("%s E: can't init native cammera preview ch\n",__func__);
+    return ret;
+  }
+
+  ALOGE("Debug : %s : initChannel",__func__);
+  /* register a notify into the mmmm_camera_t object*/
+  (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+                                     preview_notify_cb,
+                                     MM_CAMERA_REG_BUF_CB_INFINITE,
+                                     0,this);
+  ALOGE("Debug : %s : cam_evt_register_buf_notify",__func__);
+  buffer_handle_t *buffer_handle = NULL;
+  int tmp_stride = 0;
+  mInit = true;
+  return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::start()
+{
+    ALOGV("%s: E", __func__);
+    status_t ret = NO_ERROR;
+
+    Mutex::Autolock lock(mStopCallbackLock);
+
+    /* call start() in parent class to start the monitor thread*/
+    //QCameraStream::start ();
+    setFormat(MM_CAMERA_CH_PREVIEW_MASK);
+
+    if (mHalCamCtrl->isNoDisplayMode()) {
+        if(NO_ERROR!=initPreviewOnlyBuffers()){
+            return BAD_VALUE;
+        }
+    } else {
+        if(NO_ERROR!=initDisplayBuffers()){
+            return BAD_VALUE;
+        }
+    }
+    ALOGE("Debug : %s : initDisplayBuffers",__func__);
+
+    ret = cam_config_prepare_buf(mCameraId, &mDisplayBuf);
+    ALOGE("Debug : %s : cam_config_prepare_buf",__func__);
+    if(ret != MM_CAMERA_OK) {
+        ALOGV("%s:reg preview buf err=%d\n", __func__, ret);
+        ret = BAD_VALUE;
+        goto error;
+    }else {
+        ret = NO_ERROR;
+    }
+
+	/* For preview, the OP_MODE we set is dependent upon whether we are
+       starting camera or camcorder. For snapshot, anyway we disable preview.
+       However, for ZSL we need to set OP_MODE to OP_MODE_ZSL and not
+       OP_MODE_VIDEO. We'll set that for now in CamCtrl. So in case of
+       ZSL we skip setting Mode here */
+
+    if (!(myMode & CAMERA_ZSL_MODE)) {
+        ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_VIDEO");
+        mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+        ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+                                        &op_mode);
+        ALOGE("OP Mode Set");
+
+        if(MM_CAMERA_OK != ret) {
+          ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_VIDEO err=%d\n", __func__, ret);
+          ret = BAD_VALUE;
+          goto error;
+        }
+    }else {
+        ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_ZSL");
+        mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_ZSL;
+        ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+                                        &op_mode);
+        if(MM_CAMERA_OK != ret) {
+          ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_ZSL err=%d\n", __func__, ret);
+          ret = BAD_VALUE;
+          goto error;
+        }
+     }
+
+    /* call mm_camera action start(...)  */
+    ALOGE("Starting Preview/Video Stream. ");
+    mFirstFrameRcvd = false;
+    ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_PREVIEW, 0);
+
+    if (MM_CAMERA_OK != ret) {
+      ALOGE ("%s: preview streaming start err=%d\n", __func__, ret);
+      ret = BAD_VALUE;
+      goto error;
+    }
+
+    ALOGE("Debug : %s : Preview streaming Started",__func__);
+    ret = NO_ERROR;
+
+    mActive =  true;
+    goto end;
+
+error:
+    putBufferToSurface();
+end:
+    ALOGE("%s: X", __func__);
+    return ret;
+  }
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+  void QCameraStream_preview::stop() {
+    ALOGE("%s: E", __func__);
+    int ret=MM_CAMERA_OK;
+
+    if(!mActive) {
+      return;
+    }
+    Mutex::Autolock lock(mStopCallbackLock);
+    mActive =  false;
+    /* unregister the notify fn from the mmmm_camera_t object*/
+
+    ALOGI("%s: Stop the thread \n", __func__);
+    /* call stop() in parent class to stop the monitor thread*/
+    ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_PREVIEW, 0);
+    if(MM_CAMERA_OK != ret) {
+      ALOGE ("%s: camera preview stop err=%d\n", __func__, ret);
+    }
+    ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_PREVIEW);
+    if(ret != MM_CAMERA_OK) {
+      ALOGE("%s:Unreg preview buf err=%d\n", __func__, ret);
+      //ret = BAD_VALUE;
+    }
+
+    /* In case of a clean stop, we need to clean all buffers*/
+    ALOGE("Debug : %s : Buffer Unprepared",__func__);
+    /*free camera_memory handles and return buffer back to surface*/
+    if (! mHalCamCtrl->isNoDisplayMode() ) {
+      putBufferToSurface();
+    } else {
+      freeBufferNoDisplay( );
+    }
+
+    ALOGE("%s: X", __func__);
+
+  }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+  void QCameraStream_preview::release() {
+
+    ALOGE("%s : BEGIN",__func__);
+    int ret=MM_CAMERA_OK,i;
+
+    if(!mInit)
+    {
+      ALOGE("%s : Stream not Initalized",__func__);
+      return;
+    }
+
+    if(mActive) {
+      this->stop();
+    }
+
+    ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_PREVIEW);
+    ALOGE("Debug : %s : De init Channel",__func__);
+    if(ret != MM_CAMERA_OK) {
+      ALOGE("%s:Deinit preview channel failed=%d\n", __func__, ret);
+      //ret = BAD_VALUE;
+    }
+
+    (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+                                      NULL,
+                                      (mm_camera_register_buf_cb_type_t)NULL,
+                                      NULL,
+                                      NULL);
+    mInit = false;
+    ALOGE("%s: END", __func__);
+
+  }
+
+QCameraStream*
+QCameraStream_preview::createInstance(int cameraId,
+                                      camera_mode_t mode)
+{
+  QCameraStream* pme = new QCameraStream_preview(cameraId, mode);
+  return pme;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+void QCameraStream_preview::deleteInstance(QCameraStream *p)
+{
+  if (p){
+    ALOGV("%s: BEGIN", __func__);
+    p->release();
+    delete p;
+    p = NULL;
+    ALOGV("%s: END", __func__);
+  }
+}
+
+
+/* Temp helper function */
+void *QCameraStream_preview::getLastQueuedFrame(void)
+{
+    return mLastQueuedFrame;
+}
+
+// ---------------------------------------------------------------------------
+// No code beyone this line
+// ---------------------------------------------------------------------------
+}; // namespace android
diff --git a/camera/QCameraHWI_Preview_7x27A.cpp b/camera/QCameraHWI_Preview_7x27A.cpp
new file mode 100644
index 0000000..41a1693
--- /dev/null
+++ b/camera/QCameraHWI_Preview_7x27A.cpp
@@ -0,0 +1,888 @@
+/*
+** Copyright (c) 2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Preview"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraHWI_Preview class implementation goes here*/
+/* following code implement the preview mode's image capture & display logic of this class*/
+
+namespace android {
+
+// ---------------------------------------------------------------------------
+// Preview Callback
+// ---------------------------------------------------------------------------
+static void preview_notify_cb(mm_camera_ch_data_buf_t *frame,
+                                void *user_data)
+{
+  QCameraStream_preview *pme = (QCameraStream_preview *)user_data;
+  mm_camera_ch_data_buf_t *bufs_used = 0;
+  ALOGV("%s: E", __func__);
+  /* for peview data, there is no queue, so directly use*/
+  if(pme==NULL) {
+    ALOGE("%s: X : Incorrect cookie",__func__);
+    /*Call buf done*/
+    return;
+  }
+
+  pme->processPreviewFrame(frame);
+  ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_preview::setPreviewWindow(preview_stream_ops_t* window)
+{
+    status_t retVal = NO_ERROR;
+    ALOGE(" %s: E ", __FUNCTION__);
+    if( window == NULL) {
+        ALOGW(" Setting NULL preview window ");
+        /* TODO: Current preview window will be invalidated.
+         * Release all the buffers back */
+       // relinquishBuffers();
+    }
+    mDisplayLock.lock();
+    mPreviewWindow = window;
+    mDisplayLock.unlock();
+    ALOGV(" %s : X ", __FUNCTION__ );
+    return retVal;
+}
+
+status_t QCameraStream_preview::getBufferFromSurface() {
+    int err = 0;
+    int numMinUndequeuedBufs = 0;
+	int format = 0;
+	status_t ret = NO_ERROR;
+
+    ALOGI(" %s : E ", __FUNCTION__);
+
+    if( mPreviewWindow == NULL) {
+		ALOGE("%s: mPreviewWindow = NULL", __func__);
+        return INVALID_OPERATION;
+	}
+    cam_ctrl_dimension_t dim;
+
+	//mDisplayLock.lock();
+    cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+	format = mHalCamCtrl->getPreviewFormatInfo().Hal_format;
+	if(ret != NO_ERROR) {
+        ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+		goto end;
+	}
+	numMinUndequeuedBufs = 0;
+	if(mPreviewWindow->get_min_undequeued_buffer_count) {
+    err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &numMinUndequeuedBufs);
+		if (err != 0) {
+			 ALOGE("get_min_undequeued_buffer_count  failed: %s (%d)",
+						strerror(-err), -err);
+			 ret = UNKNOWN_ERROR;
+			 goto end;
+		}
+	}
+    mHalCamCtrl->mPreviewMemoryLock.lock();
+    mHalCamCtrl->mPreviewMemory.buffer_count = kPreviewBufferCount + numMinUndequeuedBufs;;
+    err = mPreviewWindow->set_buffer_count(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_count );
+    if (err != 0) {
+         ALOGE("set_buffer_count failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+		 goto end;
+    }
+    err = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+                dim.display_width, dim.display_height, format);
+    if (err != 0) {
+         ALOGE("set_buffers_geometry failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+		 goto end;
+    }
+    err = mPreviewWindow->set_usage(mPreviewWindow,
+                GRALLOC_USAGE_PRIVATE_ADSP_HEAP |
+                GRALLOC_USAGE_PRIVATE_UNCACHED);
+	if(err != 0) {
+        /* set_usage error out */
+		ALOGE("%s: set_usage rc = %d", __func__, err);
+		ret = UNKNOWN_ERROR;
+		goto end;
+	}
+	for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+		int stride;
+		err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+										&mHalCamCtrl->mPreviewMemory.buffer_handle[cnt],
+										&mHalCamCtrl->mPreviewMemory.stride[cnt]);
+		if(!err) {
+                    err = mPreviewWindow->lock_buffer(this->mPreviewWindow,
+                                       mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+                    // lock the buffer using genlock
+                    ALOGD("%s: camera call genlock_lock", __FUNCTION__);
+                    if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]),
+                                                      GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+                       ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+                       mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+	               mHalCamCtrl->mPreviewMemoryLock.unlock();
+                       return -EINVAL;
+                   }
+		   mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_LOCKED;
+		} else
+			ALOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+
+		ALOGE("%s: dequeue buf: %u\n", __func__, (unsigned int)mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+		if(err != 0) {
+            ALOGE("%s: dequeue_buffer failed: %s (%d)", __func__,
+                    strerror(-err), -err);
+            ret = UNKNOWN_ERROR;
+			for(int i = 0; i < cnt; i++) {
+                        ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+                        if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[i]) {
+                             if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+                                                          (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])))) {
+                                ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+	                        mHalCamCtrl->mPreviewMemoryLock.unlock();
+                                return -EINVAL;
+                             }
+                        }
+		        err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+										mHalCamCtrl->mPreviewMemory.buffer_handle[i]);
+				mHalCamCtrl->mPreviewMemory.buffer_handle[i] = NULL;
+				mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+			}
+			goto end;
+		}
+		mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt] =
+		    (struct private_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+		mHalCamCtrl->mPreviewMemory.camera_memory[cnt] =
+		    mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+			mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size, 1, (void *)this);
+		ALOGE("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+            cnt, mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+			mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size,
+			mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->offset);
+	}
+
+
+	memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+	memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+    ALOGI(" %s : X ",__FUNCTION__);
+end:
+	//mDisplayLock.unlock();
+	mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+    return NO_ERROR;
+}
+
+status_t QCameraStream_preview::putBufferToSurface() {
+    int err = 0;
+	status_t ret = NO_ERROR;
+
+    ALOGI(" %s : E ", __FUNCTION__);
+
+    //mDisplayLock.lock();
+    mHalCamCtrl->mPreviewMemoryLock.lock();
+	for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+        mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->release(mHalCamCtrl->mPreviewMemory.camera_memory[cnt]);
+            if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[cnt]) {
+                ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+	        if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+                                                    (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])))) {
+                    ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+	            mHalCamCtrl->mPreviewMemoryLock.unlock();
+                    return -EINVAL;
+                } else {
+                    mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+                }
+            }
+            err = mPreviewWindow->cancel_buffer(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+		ALOGE(" put buffer %d successfully", cnt);
+	}
+	memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+	mHalCamCtrl->mPreviewMemoryLock.unlock();
+	//mDisplayLock.unlock();
+    ALOGI(" %s : X ",__FUNCTION__);
+    return NO_ERROR;
+}
+
+void QCameraStream_preview::notifyROIEvent(fd_roi_t roi)
+{
+    switch (roi.type) {
+    case FD_ROI_TYPE_HEADER:
+        {
+            mDisplayLock.lock();
+            mNumFDRcvd = 0;
+            memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+            mHalCamCtrl->mMetadata.faces = mHalCamCtrl->mFace;
+            mHalCamCtrl->mMetadata.number_of_faces = roi.d.hdr.num_face_detected;
+            if(mHalCamCtrl->mMetadata.number_of_faces > MAX_ROI)
+              mHalCamCtrl->mMetadata.number_of_faces = MAX_ROI;
+            mDisplayLock.unlock();
+
+            if (mHalCamCtrl->mMetadata.number_of_faces == 0) {
+                // Clear previous faces
+                mHalCamCtrl->mCallbackLock.lock();
+                camera_data_callback pcb = mHalCamCtrl->mDataCb;
+                mHalCamCtrl->mCallbackLock.unlock();
+
+                if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+                    ALOGE("%s: Face detection RIO callback", __func__);
+                    pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+                }
+            }
+        }
+        break;
+    case FD_ROI_TYPE_DATA:
+        {
+            mDisplayLock.lock();
+            int idx = roi.d.data.idx;
+            if (idx >= mHalCamCtrl->mMetadata.number_of_faces) {
+                mDisplayLock.unlock();
+                ALOGE("%s: idx %d out of boundary %d", __func__, idx, mHalCamCtrl->mMetadata.number_of_faces);
+                break;
+            }
+
+            mHalCamCtrl->mFace[idx].id = roi.d.data.face.id;
+            mHalCamCtrl->mFace[idx].score = roi.d.data.face.score / 10; // keep within range 0~100
+
+            // top
+            mHalCamCtrl->mFace[idx].rect[0] =
+               roi.d.data.face.face_boundary.x*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            //right
+            mHalCamCtrl->mFace[idx].rect[1] =
+               roi.d.data.face.face_boundary.y*2000/mHalCamCtrl->mDimension.display_height - 1000;
+            //bottom
+            mHalCamCtrl->mFace[idx].rect[2] =  mHalCamCtrl->mFace[idx].rect[0] +
+               roi.d.data.face.face_boundary.dx*2000/mHalCamCtrl->mDimension.display_width;
+            //left
+            mHalCamCtrl->mFace[idx].rect[3] = mHalCamCtrl->mFace[idx].rect[1] +
+               roi.d.data.face.face_boundary.dy*2000/mHalCamCtrl->mDimension.display_height;
+
+            // Center of left eye
+            mHalCamCtrl->mFace[idx].left_eye[0] =
+              roi.d.data.face.left_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].left_eye[1] =
+              roi.d.data.face.left_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            // Center of right eye
+            mHalCamCtrl->mFace[idx].right_eye[0] =
+              roi.d.data.face.right_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].right_eye[1] =
+              roi.d.data.face.right_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            // Center of mouth
+            mHalCamCtrl->mFace[idx].mouth[0] =
+              roi.d.data.face.mouth_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+            mHalCamCtrl->mFace[idx].mouth[1] =
+              roi.d.data.face.mouth_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+            mHalCamCtrl->mFace[idx].smile_degree = roi.d.data.face.smile_degree;
+            mHalCamCtrl->mFace[idx].smile_score = roi.d.data.face.smile_confidence / 10; //Keep within range 1~100
+            mHalCamCtrl->mFace[idx].blink_detected = roi.d.data.face.blink_detected;
+            mHalCamCtrl->mFace[idx].face_recognised = roi.d.data.face.is_face_recognised;
+
+            ALOGE("%s: Face(%d, %d, %d, %d), leftEye(%d, %d), rightEye(%d, %d), mouth(%d, %d), smile(%d, %d), blinked(%d)", __func__,
+               mHalCamCtrl->mFace[idx].rect[0],  mHalCamCtrl->mFace[idx].rect[1],
+               mHalCamCtrl->mFace[idx].rect[2],  mHalCamCtrl->mFace[idx].rect[3],
+               mHalCamCtrl->mFace[idx].left_eye[0], mHalCamCtrl->mFace[idx].left_eye[1],
+               mHalCamCtrl->mFace[idx].right_eye[0], mHalCamCtrl->mFace[idx].right_eye[1],
+               mHalCamCtrl->mFace[idx].mouth[0], mHalCamCtrl->mFace[idx].mouth[1],
+               roi.d.data.face.smile_degree, roi.d.data.face.smile_confidence, roi.d.data.face.blink_detected);
+
+             mNumFDRcvd++;
+             mDisplayLock.unlock();
+
+             if (mNumFDRcvd == mHalCamCtrl->mMetadata.number_of_faces) {
+                 mHalCamCtrl->mCallbackLock.lock();
+                 camera_data_callback pcb = mHalCamCtrl->mDataCb;
+                 mHalCamCtrl->mCallbackLock.unlock();
+
+                 if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+                     ALOGE("%s: Face detection RIO callback with %d faces detected (score=%d)", __func__, mNumFDRcvd, mHalCamCtrl->mFace[idx].score);
+                     pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+                 }
+             }
+        }
+        break;
+    }
+}
+
+status_t QCameraStream_preview::initDisplayBuffers()
+{
+    status_t ret = NO_ERROR;
+    int width = 0;  /* width of channel  */
+    int height = 0; /* height of channel */
+    uint32_t frame_len = 0; /* frame planner length */
+    int buffer_num = 4; /* number of buffers for display */
+    const char *pmem_region;
+    uint8_t num_planes = 0;
+    uint32_t planes[VIDEO_MAX_PLANES];
+
+    cam_ctrl_dimension_t dim;
+
+    ALOGE("%s:BEGIN",__func__);
+	memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+	mHalCamCtrl->mPreviewMemoryLock.lock();
+	memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+	mHalCamCtrl->mPreviewMemoryLock.unlock();
+	memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+  /* get preview size, by qury mm_camera*/
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+
+    memset(&(this->mDisplayStreamBuf),0, sizeof(this->mDisplayStreamBuf));
+
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != ret) {
+      ALOGE("%s: error - can't get camera dimension!", __func__);
+      ALOGE("%s: X", __func__);
+      return BAD_VALUE;
+    }else {
+      width =  dim.display_width,
+      height = dim.display_height;
+    }
+
+	ret = getBufferFromSurface();
+	if(ret != NO_ERROR) {
+	 ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+	 return ret;
+	}
+
+  /* set 4 buffers for display */
+  memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  this->mDisplayStreamBuf.num = mHalCamCtrl->mPreviewMemory.buffer_count;
+  this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+  num_planes = 2;
+  planes[0] = dim.display_frame_offset.mp[0].len;
+  planes[1] = dim.display_frame_offset.mp[1].len;
+  this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+  mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+  if (!mDisplayBuf.preview.buf.mp) {
+	  ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+  }
+  memset(mDisplayBuf.preview.buf.mp, 0,
+    mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+  /*allocate memory for the buffers*/
+  void *vaddr = NULL;
+  for(int i = 0; i < mDisplayStreamBuf.num; i++){
+	  if (mHalCamCtrl->mPreviewMemory.private_buffer_handle[i] == NULL)
+		  continue;
+      mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->fd;
+      mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+      mDisplayStreamBuf.frame[i].y_off = 0;
+      mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+	  mHalCamCtrl->mPreviewMemory.addr_offset[i] =
+	      mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->offset;
+      mDisplayStreamBuf.frame[i].buffer =
+          (long unsigned int)mHalCamCtrl->mPreviewMemory.camera_memory[i]->data;
+
+	  ALOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, offset = %d, vaddr = 0x%x",
+		  __func__, i,
+		  mDisplayStreamBuf.frame[i].fd,
+		  mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+		  mDisplayStreamBuf.frame[i].cbcr_off,
+		  mDisplayStreamBuf.frame[i].y_off,
+		  mHalCamCtrl->mPreviewMemory.addr_offset[i],
+		  (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+
+        mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+        mDisplayBuf.preview.buf.mp[i].frame_offset = mHalCamCtrl->mPreviewMemory.addr_offset[i];
+        mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+		/* Plane 0 needs to be set seperately. Set other planes
+         * in a loop. */
+        mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+        mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+        mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+        mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+          mDisplayBuf.preview.buf.mp[i].frame_offset;
+        for (int j = 1; j < num_planes; j++) {
+          mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+          mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+            mDisplayStreamBuf.frame[i].fd;
+		  mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+          mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+            mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+            mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+        }
+
+		for (int j = 0; j < num_planes; j++) {
+			ALOGE("Planes: %d length: %d userptr: %lu offset: %d\n",
+				 j, mDisplayBuf.preview.buf.mp[i].planes[j].length,
+				 mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+				 mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+		}
+
+  }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+  mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+  mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+  ALOGE("%s:END",__func__);
+  return NO_ERROR;
+
+end:
+  if (MM_CAMERA_OK == ret ) {
+    ALOGV("%s: X - NO_ERROR ", __func__);
+    return NO_ERROR;
+  }
+
+    ALOGV("%s: out of memory clean up", __func__);
+  /* release the allocated memory */
+
+  ALOGV("%s: X - BAD_VALUE ", __func__);
+  return BAD_VALUE;
+}
+
+void QCameraStream_preview::dumpFrameToFile(struct msm_frame* newFrame)
+{
+  int32_t enabled = 0;
+  int frm_num;
+  uint32_t  skip_mode;
+  char value[PROPERTY_VALUE_MAX];
+  char buf[32];
+  int w, h;
+  static int count = 0;
+  cam_ctrl_dimension_t dim;
+  int file_fd;
+  int rc = 0;
+  int len;
+  unsigned long addr;
+  unsigned long * tmp = (unsigned long *)newFrame->buffer;
+  addr = *tmp;
+  status_t ret = cam_config_get_parm(mHalCamCtrl->mCameraId,
+                 MM_CAMERA_PARM_DIMENSION, &dim);
+
+  w = dim.display_width;
+  h = dim.display_height;
+  len = (w * h)*3/2;
+  count++;
+  if(count < 100) {
+    snprintf(buf, sizeof(buf), "/data/mzhu%d.yuv", count);
+    file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+
+    rc = write(file_fd, (const void *)addr, len);
+    ALOGE("%s: file='%s', vaddr_old=0x%x, addr_map = 0x%p, len = %d, rc = %d",
+          __func__, buf, (uint32_t)newFrame->buffer, (void *)addr, len, rc);
+    close(file_fd);
+    ALOGE("%s: dump %s, rc = %d, len = %d", __func__, buf, rc, len);
+  }
+}
+
+status_t QCameraStream_preview::processPreviewFrame(mm_camera_ch_data_buf_t *frame)
+{
+  ALOGV("%s",__func__);
+  int err = 0;
+  int msgType = 0;
+  camera_memory_t *data = NULL;
+  camera_frame_metadata_t *metadata = NULL;
+
+  Mutex::Autolock lock(mStopCallbackLock);
+  if(!mActive) {
+    ALOGE("Preview Stopped. Returning callback");
+    return NO_ERROR;
+  }
+  if(mHalCamCtrl==NULL) {
+    ALOGE("%s: X: HAL control object not set",__func__);
+    /*Call buf done*/
+    return BAD_VALUE;
+  }
+
+  mHalCamCtrl->mCallbackLock.lock();
+  camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+  void *rdata = mHalCamCtrl->mCallbackCookie;
+  mHalCamCtrl->mCallbackLock.unlock();
+
+  if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+      mHalCamCtrl->debugShowPreviewFPS();
+  }
+  //dumpFrameToFile(frame->def.frame);
+  mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+  nsecs_t timeStamp = systemTime();
+
+  mHalCamCtrl->mPreviewMemoryLock.lock();
+  mNotifyBuffer[frame->def.idx] = *frame;
+  // mzhu fix me, need to check meta data also.
+
+  ALOGI("Enqueue buf handle %p\n",
+  mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+  ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+    if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx]) {
+        if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)
+	            (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]))) {
+            ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+	    mHalCamCtrl->mPreviewMemoryLock.unlock();
+            return -EINVAL;
+        } else {
+            mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_UNLOCKED;
+        }
+    } else {
+        ALOGE("%s: buffer to be enqueued is not locked", __FUNCTION__);
+	mHalCamCtrl->mPreviewMemoryLock.unlock();
+        return -EINVAL;
+    }
+  err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+    (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+  if(err != 0) {
+    ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+  }
+  buffer_handle_t *buffer_handle = NULL;
+  int tmp_stride = 0;
+  err = this->mPreviewWindow->dequeue_buffer(this->mPreviewWindow,
+              &buffer_handle, &tmp_stride);
+  if (err == NO_ERROR && buffer_handle != NULL) {
+      err = this->mPreviewWindow->lock_buffer(this->mPreviewWindow, buffer_handle);
+      ALOGD("%s: camera call genlock_lock", __FUNCTION__);
+      if (GENLOCK_FAILURE == genlock_lock_buffer((native_handle_t*)(*buffer_handle), GENLOCK_WRITE_LOCK,
+                                                 GENLOCK_MAX_TIMEOUT)) {
+            ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+	    mHalCamCtrl->mPreviewMemoryLock.unlock();
+            return -EINVAL;
+      }
+      for(int i = 0; i < mHalCamCtrl->mPreviewMemory.buffer_count; i++) {
+		  ALOGD("h1: %p h2: %p\n", mHalCamCtrl->mPreviewMemory.buffer_handle[i], buffer_handle);
+		  if(mHalCamCtrl->mPreviewMemory.buffer_handle[i] == buffer_handle) {
+	          mm_camera_ch_data_buf_t tmp_frame;
+                  mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_LOCKED;
+              if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[i])) {
+                  ALOGD("BUF DONE FAILED");
+                  mHalCamCtrl->mPreviewMemoryLock.unlock();
+                  return BAD_VALUE;
+              }
+			  break;
+		  }
+	  }
+  } else
+      ALOGE("%s: error in dequeue_buffer, enqueue_buffer idx = %d, no free buffer now", __func__, frame->def.idx);
+  /* Save the last displayed frame. We'll be using it to fill the gap between
+     when preview stops and postview start during snapshot.*/
+  mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+  mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+  mHalCamCtrl->mCallbackLock.lock();
+  camera_data_callback pcb = mHalCamCtrl->mDataCb;
+  mHalCamCtrl->mCallbackLock.unlock();
+  ALOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+  if (pcb != NULL) {
+      //Sending preview callback if corresponding Msgs are enabled
+      if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+          msgType |=  CAMERA_MSG_PREVIEW_FRAME;
+          data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+      } else {
+          data = NULL;
+      }
+      if(msgType) {
+          mStopCallbackLock.unlock();
+          pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+      }
+	  ALOGD("end of cb");
+  }
+  if(rcb != NULL)
+  {
+    if (mHalCamCtrl->mStoreMetaDataInFrame)
+    {
+          mStopCallbackLock.unlock();
+          if(mHalCamCtrl->mStartRecording == true &&( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+          rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+              mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx],
+              0, mHalCamCtrl->mCallbackCookie);
+    }
+    else
+    {
+        if(mHalCamCtrl->mStartRecording == true &&( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+        {
+            mStopCallbackLock.unlock();
+            rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+              mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx],
+              0, mHalCamCtrl->mCallbackCookie);
+        }
+    }
+  }
+
+  /* Save the last displayed frame. We'll be using it to fill the gap between
+     when preview stops and postview start during snapshot.*/
+  //mLastQueuedFrame = frame->def.frame;
+/*
+  if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame))
+  {
+      ALOGE("BUF DONE FAILED");
+      return BAD_VALUE;
+  }
+*/
+  return NO_ERROR;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::
+QCameraStream_preview(int cameraId, camera_mode_t mode)
+  : QCameraStream(cameraId,mode),
+    mLastQueuedFrame(NULL),
+    mNumFDRcvd(0)
+  {
+    mHalCamCtrl = NULL;
+    ALOGE("%s: E", __func__);
+    ALOGE("%s: X", __func__);
+  }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::~QCameraStream_preview() {
+    ALOGV("%s: E", __func__);
+	if(mActive) {
+		stop();
+	}
+	if(mInit) {
+		release();
+	}
+	mInit = false;
+	mActive = false;
+    ALOGV("%s: X", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::init() {
+
+  status_t ret = NO_ERROR;
+  ALOGV("%s: E", __func__);
+
+  ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_PREVIEW_MASK);
+  if (NO_ERROR!=ret) {
+    ALOGE("%s E: can't init native cammera preview ch\n",__func__);
+    return ret;
+  }
+
+  ALOGE("Debug : %s : initChannel",__func__);
+  /* register a notify into the mmmm_camera_t object*/
+  (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+                                     preview_notify_cb, MM_CAMERA_REG_BUF_CB_INFINITE, 0, this);
+  ALOGE("Debug : %s : cam_evt_register_buf_notify",__func__);
+  buffer_handle_t *buffer_handle = NULL;
+  int tmp_stride = 0;
+  mInit = true;
+  return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::start()
+{
+    ALOGV("%s: E", __func__);
+    status_t ret = NO_ERROR;
+    mm_camera_reg_buf_t *reg_buf=&mDisplayBuf;
+
+    Mutex::Autolock lock(mStopCallbackLock);
+
+    /* call start() in parent class to start the monitor thread*/
+    //QCameraStream::start ();
+    setFormat(MM_CAMERA_CH_PREVIEW_MASK);
+
+    if(NO_ERROR!=initDisplayBuffers()){
+        return BAD_VALUE;
+    }
+    ALOGE("Debug : %s : initDisplayBuffers",__func__);
+    ret = cam_config_prepare_buf(mCameraId, reg_buf);
+    ALOGE("Debug : %s : cam_config_prepare_buf",__func__);
+    if(ret != MM_CAMERA_OK) {
+        ALOGV("%s:reg preview buf err=%d\n", __func__, ret);
+        ret = BAD_VALUE;
+    }else
+        ret = NO_ERROR;
+
+	/* For preview, the OP_MODE we set is dependent upon whether we are
+       starting camera or camcorder. For snapshot, anyway we disable preview.
+       However, for ZSL we need to set OP_MODE to OP_MODE_ZSL and not
+       OP_MODE_VIDEO. We'll set that for now in CamCtrl. So in case of
+       ZSL we skip setting Mode here */
+
+    if (!(myMode & CAMERA_ZSL_MODE)) {
+        ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_VIDEO");
+        mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+        ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+                                        &op_mode);
+        ALOGE("OP Mode Set");
+
+        if(MM_CAMERA_OK != ret) {
+          ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_VIDEO err=%d\n", __func__, ret);
+          return BAD_VALUE;
+        }
+    }else {
+        ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_ZSL");
+        mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_ZSL;
+        ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+                                        &op_mode);
+        if(MM_CAMERA_OK != ret) {
+          ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_ZSL err=%d\n", __func__, ret);
+          return BAD_VALUE;
+        }
+     }
+
+    /* call mm_camera action start(...)  */
+    ALOGE("Starting Preview/Video Stream. ");
+    ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_PREVIEW, 0);
+
+    if (MM_CAMERA_OK != ret) {
+      ALOGE ("%s: preview streaming start err=%d\n", __func__, ret);
+      return BAD_VALUE;
+    }
+
+    ALOGE("Debug : %s : Preview streaming Started",__func__);
+    ret = NO_ERROR;
+
+    mActive =  true;
+    ALOGE("%s: X", __func__);
+    return NO_ERROR;
+  }
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+  void QCameraStream_preview::stop() {
+    ALOGE("%s: E", __func__);
+    int ret=MM_CAMERA_OK;
+
+    if(!mActive) {
+      return;
+    }
+    mActive =  false;
+    Mutex::Autolock lock(mStopCallbackLock);
+    /* unregister the notify fn from the mmmm_camera_t object*/
+
+    /* call stop() in parent class to stop the monitor thread*/
+    ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_PREVIEW, 0);
+    if(MM_CAMERA_OK != ret) {
+      ALOGE ("%s: camera preview stop err=%d\n", __func__, ret);
+    }
+    ALOGE("Debug : %s : Preview streaming Stopped",__func__);
+    ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_PREVIEW);
+    if(ret != MM_CAMERA_OK) {
+      ALOGE("%s:Unreg preview buf err=%d\n", __func__, ret);
+      //ret = BAD_VALUE;
+    }
+
+    ALOGE("Debug : %s : Buffer Unprepared",__func__);
+    if (mDisplayBuf.preview.buf.mp != NULL) {
+        delete[] mDisplayBuf.preview.buf.mp;
+    }
+	/*free camera_memory handles and return buffer back to surface*/
+    putBufferToSurface();
+
+    ALOGE("%s: X", __func__);
+
+  }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+  void QCameraStream_preview::release() {
+
+    ALOGE("%s : BEGIN",__func__);
+    int ret=MM_CAMERA_OK,i;
+
+    if(!mInit)
+    {
+      ALOGE("%s : Stream not Initalized",__func__);
+      return;
+    }
+
+    if(mActive) {
+      this->stop();
+    }
+
+    ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_PREVIEW);
+    ALOGE("Debug : %s : De init Channel",__func__);
+    if(ret != MM_CAMERA_OK) {
+      ALOGE("%s:Deinit preview channel failed=%d\n", __func__, ret);
+      //ret = BAD_VALUE;
+    }
+
+    (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+                                      NULL,
+					(mm_camera_register_buf_cb_type_t)NULL,
+					NULL,
+					NULL);
+	mInit = false;
+    ALOGE("%s: END", __func__);
+
+  }
+
+QCameraStream*
+QCameraStream_preview::createInstance(int cameraId,
+                                      camera_mode_t mode)
+{
+  QCameraStream* pme = new QCameraStream_preview(cameraId, mode);
+  return pme;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+void QCameraStream_preview::deleteInstance(QCameraStream *p)
+{
+  if (p){
+    ALOGV("%s: BEGIN", __func__);
+    p->release();
+    delete p;
+    p = NULL;
+    ALOGV("%s: END", __func__);
+  }
+}
+
+
+/* Temp helper function */
+void *QCameraStream_preview::getLastQueuedFrame(void)
+{
+    return mLastQueuedFrame;
+}
+
+status_t QCameraStream_preview::initPreviewOnlyBuffers()
+{
+  /*1. for 7x27a, this shall not called;
+    2. this file shall be removed ASAP
+    so put a dummy function to just pass the compile*/
+  return INVALID_OPERATION;
+}
+
+// ---------------------------------------------------------------------------
+// No code beyone this line
+// ---------------------------------------------------------------------------
+}; // namespace android
diff --git a/camera/QCameraHWI_Record.cpp b/camera/QCameraHWI_Record.cpp
new file mode 100644
index 0000000..6319b57
--- /dev/null
+++ b/camera/QCameraHWI_Record.cpp
@@ -0,0 +1,582 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Record"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraStream.h"
+
+
+#define LIKELY(exp)   __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraStream_record class implementation goes here*/
+/* following code implement the video streaming capture & encoding logic of this class*/
+// ---------------------------------------------------------------------------
+// QCameraStream_record createInstance()
+// ---------------------------------------------------------------------------
+namespace android {
+
+
+QCameraStream* QCameraStream_record::createInstance(int cameraId,
+                                      camera_mode_t mode)
+{
+  ALOGV("%s: BEGIN", __func__);
+  QCameraStream* pme = new QCameraStream_record(cameraId, mode);
+  ALOGV("%s: END", __func__);
+  return pme;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record deleteInstance()
+// ---------------------------------------------------------------------------
+void QCameraStream_record::deleteInstance(QCameraStream *ptr)
+{
+  ALOGV("%s: BEGIN", __func__);
+  if (ptr){
+    ptr->release();
+    delete ptr;
+    ptr = NULL;
+  }
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Constructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::QCameraStream_record(int cameraId,
+                                           camera_mode_t mode)
+  :QCameraStream(cameraId,mode),
+  mDebugFps(false)
+{
+  mHalCamCtrl = NULL;
+  char value[PROPERTY_VALUE_MAX];
+  ALOGV("%s: BEGIN", __func__);
+
+  property_get("persist.debug.sf.showfps", value, "0");
+  mDebugFps = atoi(value);
+
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Destructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::~QCameraStream_record() {
+  ALOGV("%s: BEGIN", __func__);
+  if(mActive) {
+    stop();
+  }
+  if(mInit) {
+    release();
+  }
+  mInit = false;
+  mActive = false;
+  ALOGV("%s: END", __func__);
+
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Callback from mm_camera
+// ---------------------------------------------------------------------------
+static void record_notify_cb(mm_camera_ch_data_buf_t *bufs_new,
+                              void *user_data)
+{
+  QCameraStream_record *pme = (QCameraStream_record *)user_data;
+  mm_camera_ch_data_buf_t *bufs_used = 0;
+  ALOGV("%s: BEGIN", __func__);
+
+  /*
+  * Call Function Process Video Data
+  */
+  pme->processRecordFrame(bufs_new);
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+status_t QCameraStream_record::init()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+
+  /*
+  *  Acquiring Video Channel
+  */
+  ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_VIDEO_MASK);
+  if (NO_ERROR!=ret) {
+    ALOGE("%s ERROR: Can't init native cammera preview ch\n",__func__);
+    return ret;
+  }
+
+  /*
+  * Register the Callback with camera
+  */
+  (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+                                            record_notify_cb,
+                                            MM_CAMERA_REG_BUF_CB_INFINITE,
+                                            0,
+                                            this);
+
+  mInit = true;
+  ALOGV("%s: END", __func__);
+  return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_record::start()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+
+  Mutex::Autolock lock(mStopCallbackLock);
+  if(!mInit) {
+    ALOGE("%s ERROR: Record buffer not registered",__func__);
+    return BAD_VALUE;
+  }
+
+  setFormat(MM_CAMERA_CH_VIDEO_MASK);
+  //mRecordFreeQueueLock.lock();
+  //mRecordFreeQueue.clear();
+  //mRecordFreeQueueLock.unlock();
+  /*
+  *  Allocating Encoder Frame Buffers
+  */
+  ret = initEncodeBuffers();
+  if (NO_ERROR!=ret) {
+    ALOGE("%s ERROR: Buffer Allocation Failed\n",__func__);
+    goto error;
+  }
+
+  ret = cam_config_prepare_buf(mCameraId, &mRecordBuf);
+  if(ret != MM_CAMERA_OK) {
+    ALOGV("%s ERROR: Reg Record buf err=%d\n", __func__, ret);
+    ret = BAD_VALUE;
+    goto error;
+  }else{
+    ret = NO_ERROR;
+  }
+
+  /*
+  * Start Video Streaming
+  */
+  ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_VIDEO, 0);
+  if (MM_CAMERA_OK != ret) {
+    ALOGE ("%s ERROR: Video streaming start err=%d\n", __func__, ret);
+    ret = BAD_VALUE;
+    goto error;
+  }else{
+    ALOGE("%s : Video streaming Started",__func__);
+    ret = NO_ERROR;
+  }
+  mActive = true;
+  ALOGV("%s: END", __func__);
+  return ret;
+
+error:
+  releaseEncodeBuffer();
+  ALOGV("%s: END", __func__);
+  return ret;
+}
+
+void QCameraStream_record::releaseEncodeBuffer() {
+  for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+    if (NO_ERROR !=
+      mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+      mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING))
+      ALOGE("%s: Unmapping Video Data Failed", __func__);
+
+    if (mHalCamCtrl->mStoreMetaDataInFrame) {
+      struct encoder_media_buffer_type * packet =
+          (struct encoder_media_buffer_type  *)
+          mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+      native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+      mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+        mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+
+    }
+    mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->release(
+      mHalCamCtrl->mRecordingMemory.camera_memory[cnt]);
+    close(mHalCamCtrl->mRecordingMemory.fd[cnt]);
+    mHalCamCtrl->mRecordingMemory.fd[cnt] = -1;
+
+#ifdef USE_ION
+    mHalCamCtrl->deallocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt);
+#endif
+  }
+  memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+  //mNumRecordFrames = 0;
+  delete[] recordframes;
+  if (mRecordBuf.video.video.buf.mp)
+    delete[] mRecordBuf.video.video.buf.mp;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::stop()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+
+  if(!mActive) {
+    ALOGE("%s : Record stream not started",__func__);
+    return;
+  }
+  mActive =  false;
+  Mutex::Autolock lock(mStopCallbackLock);
+#if 0 //mzhu, when stop recording, all frame will be dirty. no need to queue frame back to kernel any more
+  mRecordFreeQueueLock.lock();
+  while(!mRecordFreeQueue.isEmpty()) {
+    ALOGV("%s : Pre-releasing of Encoder buffers!\n", __FUNCTION__);
+    mm_camera_ch_data_buf_t releasedBuf = mRecordFreeQueue.itemAt(0);
+    mRecordFreeQueue.removeAt(0);
+    mRecordFreeQueueLock.unlock();
+    ALOGV("%s (%d): releasedBuf.idx = %d\n", __FUNCTION__, __LINE__,
+                                              releasedBuf.video.video.idx);
+    if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId,&releasedBuf))
+        ALOGE("%s : Buf Done Failed",__func__);
+  }
+  mRecordFreeQueueLock.unlock();
+#if 0
+  while (!mRecordFreeQueue.isEmpty()) {
+        ALOGE("%s : Waiting for Encoder to release all buffer!\n", __FUNCTION__);
+  }
+#endif
+#endif // mzhu
+  /* unregister the notify fn from the mmmm_camera_t object
+   *  call stop() in parent class to stop the monitor thread */
+
+  ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_VIDEO, 0);
+  if (MM_CAMERA_OK != ret) {
+    ALOGE ("%s ERROR: Video streaming Stop err=%d\n", __func__, ret);
+  }
+
+  ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_VIDEO);
+  if(ret != MM_CAMERA_OK){
+    ALOGE("%s ERROR: Ureg video buf \n", __func__);
+  }
+
+  releaseEncodeBuffer();
+
+  mActive = false;
+  ALOGV("%s: END", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::release()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+
+  if(mActive) {
+    stop();
+  }
+  if(!mInit) {
+    ALOGE("%s : Record stream not initialized",__func__);
+    return;
+  }
+
+  ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_VIDEO);
+  if(ret != MM_CAMERA_OK) {
+    ALOGE("%s:Deinit Video channel failed=%d\n", __func__, ret);
+  }
+  (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+                                            NULL,
+                                            (mm_camera_register_buf_cb_type_t)NULL,
+                                            NULL,
+                                            NULL);
+  mInit = false;
+  ALOGV("%s: END", __func__);
+}
+
+status_t QCameraStream_record::processRecordFrame(void *data)
+{
+    ALOGV("%s : BEGIN",__func__);
+    mm_camera_ch_data_buf_t* frame = (mm_camera_ch_data_buf_t*) data;
+
+    Mutex::Autolock lock(mStopCallbackLock);
+    if(!mActive) {
+      ALOGE("Recording Stopped. Returning callback");
+      return NO_ERROR;
+    }
+
+    if (UNLIKELY(mDebugFps)) {
+        debugShowVideoFPS();
+    }
+
+    mHalCamCtrl->dumpFrameToFile(frame->video.video.frame, HAL_DUMP_FRM_VIDEO);
+    mHalCamCtrl->mCallbackLock.lock();
+    camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+    void *rdata = mHalCamCtrl->mCallbackCookie;
+    mHalCamCtrl->mCallbackLock.unlock();
+
+	nsecs_t timeStamp = nsecs_t(frame->video.video.frame->ts.tv_sec)*1000000000LL + \
+                      frame->video.video.frame->ts.tv_nsec;
+
+  ALOGE("Send Video frame to services/encoder TimeStamp : %lld",timeStamp);
+  mRecordedFrames[frame->video.video.idx] = *frame;
+
+#ifdef USE_ION
+  struct ion_flush_data cache_inv_data;
+  int ion_fd;
+  ion_fd = frame->video.video.frame->ion_dev_fd;
+  cache_inv_data.vaddr = (void *)frame->video.video.frame->buffer;
+  cache_inv_data.fd = frame->video.video.frame->fd;
+  cache_inv_data.handle = frame->video.video.frame->fd_data.handle;
+  cache_inv_data.length = frame->video.video.frame->ion_alloc.len;
+
+  if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+    ALOGE("%s: Cache clean for Video buffer %p fd = %d failed", __func__,
+      cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+  if (mHalCamCtrl->mStoreMetaDataInFrame) {
+    mStopCallbackLock.unlock();
+    if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+      rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+              mHalCamCtrl->mRecordingMemory.metadata_memory[frame->video.video.idx],
+              0, mHalCamCtrl->mCallbackCookie);
+    }
+  } else {
+    mStopCallbackLock.unlock();
+    if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+      rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+              mHalCamCtrl->mRecordingMemory.camera_memory[frame->video.video.idx],
+              0, mHalCamCtrl->mCallbackCookie);
+    }
+  }
+
+  ALOGV("%s : END",__func__);
+  return NO_ERROR;
+}
+
+//Record Related Functions
+status_t QCameraStream_record::initEncodeBuffers()
+{
+  ALOGE("%s : BEGIN",__func__);
+  status_t ret = NO_ERROR;
+  const char *pmem_region;
+  uint32_t frame_len;
+  uint8_t num_planes;
+  uint32_t planes[VIDEO_MAX_PLANES];
+  //cam_ctrl_dimension_t dim;
+  int width = 0;  /* width of channel  */
+  int height = 0; /* height of channel */
+  int buf_cnt;
+  pmem_region = "/dev/pmem_adsp";
+
+
+  memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+  memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+  ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+  if (MM_CAMERA_OK != ret) {
+    ALOGE("%s: ERROR - can't get camera dimension!", __func__);
+    return BAD_VALUE;
+  }
+  else {
+    width =  dim.video_width;
+    height = dim.video_height;
+  }
+  num_planes = 2;
+  planes[0] = dim.video_frame_offset.mp[0].len;
+  planes[1] = dim.video_frame_offset.mp[1].len;
+  frame_len = dim.video_frame_offset.frame_len;
+
+  buf_cnt = VIDEO_BUFFER_COUNT;
+  if(mHalCamCtrl->isLowPowerCamcorder()) {
+    ALOGE("%s: lower power camcorder selected", __func__);
+    buf_cnt = VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER;
+  }
+    recordframes = new msm_frame[buf_cnt];
+    memset(recordframes,0,sizeof(struct msm_frame) * buf_cnt);
+
+		mRecordBuf.video.video.buf.mp = new mm_camera_mp_buf_t[buf_cnt *
+                                  sizeof(mm_camera_mp_buf_t)];
+		if (!mRecordBuf.video.video.buf.mp) {
+			ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+			return BAD_VALUE;
+		}
+		memset(mRecordBuf.video.video.buf.mp, 0,
+					 buf_cnt * sizeof(mm_camera_mp_buf_t));
+
+    memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+    for (int i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+        mHalCamCtrl->mRecordingMemory.main_ion_fd[i] = -1;
+        mHalCamCtrl->mRecordingMemory.fd[i] = -1;
+    }
+
+    mHalCamCtrl->mRecordingMemory.buffer_count = buf_cnt;
+
+		mHalCamCtrl->mRecordingMemory.size = frame_len;
+		mHalCamCtrl->mRecordingMemory.cbcr_offset = planes[0];
+
+    for (int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+#ifdef USE_ION
+      if(mHalCamCtrl->allocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt,
+        ((0x1 << CAMERA_ION_HEAP_ID) | (0x1 << CAMERA_ION_FALLBACK_HEAP_ID))) < 0) {
+        ALOGE("%s ION alloc failed\n", __func__);
+        return UNKNOWN_ERROR;
+      }
+#else
+		  mHalCamCtrl->mRecordingMemory.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+		  if(mHalCamCtrl->mRecordingMemory.fd[cnt] <= 0) {
+			  ALOGE("%s: no pmem for frame %d", __func__, cnt);
+			  return UNKNOWN_ERROR;
+		  }
+#endif
+		  mHalCamCtrl->mRecordingMemory.camera_memory[cnt] =
+		    mHalCamCtrl->mGetMemory(mHalCamCtrl->mRecordingMemory.fd[cnt],
+		    mHalCamCtrl->mRecordingMemory.size, 1, (void *)this);
+
+      if (mHalCamCtrl->mStoreMetaDataInFrame) {
+        mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] =
+          mHalCamCtrl->mGetMemory(-1,
+          sizeof(struct encoder_media_buffer_type), 1, (void *)this);
+        struct encoder_media_buffer_type * packet =
+          (struct encoder_media_buffer_type  *)
+          mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+        packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+        nh->data[0] = mHalCamCtrl->mRecordingMemory.fd[cnt];
+        nh->data[1] = 0;
+        nh->data[2] = mHalCamCtrl->mRecordingMemory.size;
+      }
+    	recordframes[cnt].fd = mHalCamCtrl->mRecordingMemory.fd[cnt];
+    	recordframes[cnt].buffer = (uint32_t)mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data;
+	    recordframes[cnt].y_off = 0;
+	    recordframes[cnt].cbcr_off = mHalCamCtrl->mRecordingMemory.cbcr_offset;
+	    recordframes[cnt].path = OUTPUT_TYPE_V;
+      recordframes[cnt].fd_data = mHalCamCtrl->mRecordingMemory.ion_info_fd[cnt];
+      recordframes[cnt].ion_alloc = mHalCamCtrl->mRecordingMemory.alloc[cnt];
+      recordframes[cnt].ion_dev_fd = mHalCamCtrl->mRecordingMemory.main_ion_fd[cnt];
+
+      if (NO_ERROR !=
+        mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+        recordframes[cnt].fd, mHalCamCtrl->mRecordingMemory.size, mCameraId,
+        CAM_SOCK_MSG_TYPE_FD_MAPPING))
+        ALOGE("%s: sending mapping data Msg Failed", __func__);
+
+      ALOGE ("initRecord :  record heap , video buffers  buffer=%lu fd=%d y_off=%d cbcr_off=%d\n",
+		    (unsigned long)recordframes[cnt].buffer, recordframes[cnt].fd, recordframes[cnt].y_off,
+		    recordframes[cnt].cbcr_off);
+	    //mNumRecordFrames++;
+
+			mRecordBuf.video.video.buf.mp[cnt].frame = recordframes[cnt];
+      mRecordBuf.video.video.buf.mp[cnt].frame_offset = 0;
+      mRecordBuf.video.video.buf.mp[cnt].num_planes = num_planes;
+      /* Plane 0 needs to be set seperately. Set other planes
+       * in a loop. */
+      mRecordBuf.video.video.buf.mp[cnt].planes[0].reserved[0] =
+        mRecordBuf.video.video.buf.mp[cnt].frame_offset;
+      mRecordBuf.video.video.buf.mp[cnt].planes[0].length = planes[0];
+      mRecordBuf.video.video.buf.mp[cnt].planes[0].m.userptr =
+        recordframes[cnt].fd;
+      for (int j = 1; j < num_planes; j++) {
+        mRecordBuf.video.video.buf.mp[cnt].planes[j].length = planes[j];
+        mRecordBuf.video.video.buf.mp[cnt].planes[j].m.userptr =
+          recordframes[cnt].fd;
+        mRecordBuf.video.video.buf.mp[cnt].planes[j].reserved[0] =
+          mRecordBuf.video.video.buf.mp[cnt].planes[j-1].reserved[0] +
+          mRecordBuf.video.video.buf.mp[cnt].planes[j-1].length;
+      }
+    }
+
+    //memset(&mRecordBuf, 0, sizeof(mRecordBuf));
+    mRecordBuf.ch_type = MM_CAMERA_CH_VIDEO;
+    mRecordBuf.video.video.num = mHalCamCtrl->mRecordingMemory.buffer_count;//kRecordBufferCount;
+    //mRecordBuf.video.video.frame_offset = &record_offset[0];
+    //mRecordBuf.video.video.frame = &recordframes[0];
+    ALOGE("%s : END",__func__);
+    return NO_ERROR;
+}
+
+void QCameraStream_record::releaseRecordingFrame(const void *opaque)
+{
+    ALOGV("%s : BEGIN, opaque = 0x%p",__func__, opaque);
+    if(!mActive)
+    {
+        ALOGE("%s : Recording already stopped!!! Leak???",__func__);
+        return;
+    }
+    for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+      if (mHalCamCtrl->mStoreMetaDataInFrame) {
+        if(mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] &&
+                mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data == opaque) {
+            /* found the match */
+            if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+                ALOGE("%s : Buf Done Failed",__func__);
+            ALOGV("%s : END",__func__);
+            return;
+        }
+      } else {
+        if(mHalCamCtrl->mRecordingMemory.camera_memory[cnt] &&
+                mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data == opaque) {
+            /* found the match */
+            if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+                ALOGE("%s : Buf Done Failed",__func__);
+            ALOGV("%s : END",__func__);
+            return;
+        }
+      }
+    }
+	ALOGE("%s: cannot find the matched frame with opaue = 0x%p", __func__, opaque);
+}
+
+void QCameraStream_record::debugShowVideoFPS() const
+{
+  static int mFrameCount;
+  static int mLastFrameCount = 0;
+  static nsecs_t mLastFpsTime = 0;
+  static float mFps = 0;
+  mFrameCount++;
+  nsecs_t now = systemTime();
+  nsecs_t diff = now - mLastFpsTime;
+  if (diff > ms2ns(250)) {
+    mFps =  ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+    ALOGI("Video Frames Per Second: %.4f", mFps);
+    mLastFpsTime = now;
+    mLastFrameCount = mFrameCount;
+  }
+}
+
+#if 0
+sp<IMemoryHeap> QCameraStream_record::getHeap() const
+{
+  return mRecordHeap != NULL ? mRecordHeap->mHeap : NULL;
+}
+
+#endif
+status_t  QCameraStream_record::takeLiveSnapshot(){
+	return true;
+}
+
+}//namespace android
+
diff --git a/camera/QCameraHWI_Record_7x27A.cpp b/camera/QCameraHWI_Record_7x27A.cpp
new file mode 100644
index 0000000..c634cc1
--- /dev/null
+++ b/camera/QCameraHWI_Record_7x27A.cpp
@@ -0,0 +1,254 @@
+/*
+** Copyright (c) 2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Record"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraStream.h"
+
+
+#define LIKELY(exp)   __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraStream_record class implementation goes here*/
+/* following code implement the video streaming capture & encoding logic of this class*/
+// ---------------------------------------------------------------------------
+// QCameraStream_record createInstance()
+// ---------------------------------------------------------------------------
+namespace android {
+
+
+QCameraStream* QCameraStream_record::createInstance(int cameraId,
+                                      camera_mode_t mode)
+{
+  ALOGV("%s: BEGIN", __func__);
+  QCameraStream* pme = new QCameraStream_record(cameraId, mode);
+  ALOGV("%s: END", __func__);
+  return pme;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record deleteInstance()
+// ---------------------------------------------------------------------------
+void QCameraStream_record::deleteInstance(QCameraStream *ptr)
+{
+  ALOGV("%s: BEGIN", __func__);
+  if (ptr){
+    ptr->release();
+    delete ptr;
+    ptr = NULL;
+  }
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Constructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::QCameraStream_record(int cameraId,
+                                           camera_mode_t mode)
+  :QCameraStream(cameraId,mode),
+   mDebugFps(false)
+{
+  mHalCamCtrl = NULL;
+  char value[PROPERTY_VALUE_MAX];
+  ALOGV("%s: BEGIN", __func__);
+
+  property_get("persist.debug.sf.showfps", value, "0");
+  mDebugFps = atoi(value);
+
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Destructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::~QCameraStream_record() {
+  ALOGV("%s: BEGIN", __func__);
+  if(mActive) {
+    stop();
+  }
+  if(mInit) {
+    release();
+  }
+  mInit = false;
+  mActive = false;
+  ALOGV("%s: END", __func__);
+
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Callback from mm_camera
+// ---------------------------------------------------------------------------
+static void record_notify_cb(mm_camera_ch_data_buf_t *bufs_new,
+                              void *user_data)
+{
+  QCameraStream_record *pme = (QCameraStream_record *)user_data;
+  mm_camera_ch_data_buf_t *bufs_used = 0;
+  ALOGV("%s: BEGIN", __func__);
+
+  /*
+  * Call Function Process Video Data
+  */
+  pme->processRecordFrame(bufs_new);
+  ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+status_t QCameraStream_record::init()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+  mInit = true;
+  ALOGV("%s: END", __func__);
+  return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_record::start()
+{
+  status_t ret = NO_ERROR;
+  ALOGE("%s: BEGIN", __func__);
+
+  ret = initEncodeBuffers();
+  if (NO_ERROR!=ret) {
+    ALOGE("%s ERROR: Buffer Allocation Failed\n",__func__);
+    return ret;
+  }
+  Mutex::Autolock l(&mHalCamCtrl->mRecordLock);
+  mHalCamCtrl->mReleasedRecordingFrame = false;
+
+  mHalCamCtrl->mStartRecording  = true;
+
+  ALOGV("%s: END", __func__);
+  return ret;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::stop()
+{
+  status_t ret = NO_ERROR;
+  ALOGE("%s: BEGIN", __func__);
+  mHalCamCtrl->mStartRecording  = false;
+  Mutex::Autolock l(&mHalCamCtrl->mRecordLock);
+  {
+        mHalCamCtrl->mRecordFrameLock.lock();
+        mHalCamCtrl->mReleasedRecordingFrame = true;
+        mHalCamCtrl->mRecordWait.signal();
+        mHalCamCtrl-> mRecordFrameLock.unlock();
+  }
+
+  for(int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+    if (mHalCamCtrl->mStoreMetaDataInFrame) {
+      struct encoder_media_buffer_type * packet =
+          (struct encoder_media_buffer_type  *)
+          mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+      native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+      mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+		    mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+    }
+  }
+  ALOGV("%s: END", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::release()
+{
+  status_t ret = NO_ERROR;
+  ALOGV("%s: BEGIN", __func__);
+  ALOGV("%s: END", __func__);
+}
+
+status_t QCameraStream_record::processRecordFrame(void *data)
+{
+  ALOGE("%s : BEGIN",__func__);
+  ALOGE("%s : END",__func__);
+  return NO_ERROR;
+}
+
+//Record Related Functions
+status_t QCameraStream_record::initEncodeBuffers()
+{
+  ALOGE("%s : BEGIN",__func__);
+  status_t ret = NO_ERROR;
+    for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+      if (mHalCamCtrl->mStoreMetaDataInFrame) {
+        mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] =
+          mHalCamCtrl->mGetMemory(-1,
+          sizeof(struct encoder_media_buffer_type), 1, (void *)this);
+        struct encoder_media_buffer_type * packet =
+          (struct encoder_media_buffer_type  *)
+          mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+        packet->meta_handle = native_handle_create(1, 3); //1 fd, 1 offset,1 size and 1 data
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+        nh->data[0] = mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd;
+        nh->data[1] = 0;
+        nh->data[2] = mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size;
+        nh->data[3] = (uint32_t)mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->data;
+      }
+    }
+    ALOGE("%s : END",__func__);
+    return NO_ERROR;
+}
+
+void QCameraStream_record::releaseEncodeBuffer() {
+  for(int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+    if (mHalCamCtrl->mStoreMetaDataInFrame) {
+      struct encoder_media_buffer_type * packet =
+          (struct encoder_media_buffer_type  *)
+          mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+      native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+      mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+        mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+
+    }
+  }
+}
+
+void QCameraStream_record::releaseRecordingFrame(const void *opaque)
+{
+    Mutex::Autolock rLock(&mHalCamCtrl->mRecordFrameLock);
+    mHalCamCtrl->mReleasedRecordingFrame = true;
+    mHalCamCtrl->mRecordWait.signal();
+    ALOGE("%s, Signaling from-",__func__);
+}
+
+void QCameraStream_record::debugShowVideoFPS() const
+{
+
+}
+
+status_t  QCameraStream_record::takeLiveSnapshot(){
+	return true;
+}
+
+}//namespace android
diff --git a/camera/QCameraHWI_Still.cpp b/camera/QCameraHWI_Still.cpp
new file mode 100644
index 0000000..e4d0b7d
--- /dev/null
+++ b/camera/QCameraHWI_Still.cpp
@@ -0,0 +1,2560 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NDDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QCameraHWI_Still"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <media/mediarecorder.h>
+#include <math.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+#define THUMBNAIL_DEFAULT_WIDTH 512
+#define THUMBNAIL_DEFAULT_HEIGHT 384
+
+/* following code implement the still image capture & encoding logic of this class*/
+namespace android {
+
+typedef enum {
+    SNAPSHOT_STATE_ERROR,
+    SNAPSHOT_STATE_UNINIT,
+    SNAPSHOT_STATE_CH_ACQUIRED,
+    SNAPSHOT_STATE_BUF_NOTIF_REGD,
+    SNAPSHOT_STATE_BUF_INITIALIZED,
+    SNAPSHOT_STATE_INITIALIZED,
+    SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD,
+    SNAPSHOT_STATE_YUV_RECVD,
+    SNAPSHOT_STATE_JPEG_ENCODING,
+    SNAPSHOT_STATE_JPEG_ENCODE_DONE,
+    SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE,
+
+    /*Add any new state above*/
+    SNAPSHOT_STATE_MAX
+} snapshot_state_type_t;
+
+
+//-----------------------------------------------------------------------
+// Constants
+//----------------------------------------------------------------------
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+static const int POSTVIEW_SMALL_HEIGHT = 144;
+
+// ---------------------------------------------------------------------------
+/* static functions*/
+// ---------------------------------------------------------------------------
+
+
+
+/* TBD: Temp: to be removed*/
+static pthread_mutex_t g_s_mutex;
+static int g_status = 0;
+static pthread_cond_t g_s_cond_v;
+
+static void mm_app_snapshot_done()
+{
+  pthread_mutex_lock(&g_s_mutex);
+  g_status = TRUE;
+  pthread_cond_signal(&g_s_cond_v);
+  pthread_mutex_unlock(&g_s_mutex);
+}
+
+static void mm_app_snapshot_wait()
+{
+        pthread_mutex_lock(&g_s_mutex);
+        if(FALSE == g_status) pthread_cond_wait(&g_s_cond_v, &g_s_mutex);
+        pthread_mutex_unlock(&g_s_mutex);
+    g_status = FALSE;
+}
+
+static int mm_app_dump_snapshot_frame(char *filename,
+                                      const void *buffer,
+                                      uint32_t len)
+{
+    char bufp[128];
+    int file_fdp;
+    int rc = 0;
+
+    file_fdp = open(filename, O_RDWR | O_CREAT, 0777);
+
+    if (file_fdp < 0) {
+        rc = -1;
+        goto end;
+    }
+    write(file_fdp,
+        (const void *)buffer, len);
+    close(file_fdp);
+end:
+    return rc;
+}
+
+/* Callback received when a frame is available after snapshot*/
+static void snapshot_notify_cb(mm_camera_ch_data_buf_t *recvd_frame,
+                               void *user_data)
+{
+    QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+    ALOGD("%s: E", __func__);
+
+    if (pme != NULL) {
+        pme->receiveRawPicture(recvd_frame);
+    }
+    else{
+        ALOGW("%s: Snapshot obj NULL in callback", __func__);
+    }
+
+    ALOGD("%s: X", __func__);
+
+}
+
+/* Once we give frame for encoding, we get encoded jpeg image
+   fragments by fragment. We'll need to store them in a buffer
+   to form complete JPEG image */
+static void snapshot_jpeg_fragment_cb(uint8_t *ptr,
+                                      uint32_t size,
+                                      void *user_data)
+{
+    QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+    ALOGE("%s: E",__func__);
+    if (pme != NULL) {
+        pme->receiveJpegFragment(ptr,size);
+    }
+    else
+        ALOGW("%s: Receive jpeg fragment cb obj Null", __func__);
+
+    ALOGD("%s: X",__func__);
+}
+
+/* This callback is received once the complete JPEG encoding is done */
+static void snapshot_jpeg_cb(jpeg_event_t event, void *user_data)
+{
+    QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+    ALOGE("%s: E ",__func__);
+
+    switch(event) {
+    case JPEG_EVENT_DONE:
+        break;
+    case JPEG_EVENT_THUMBNAIL_DROPPED:
+        ALOGE("%s: Error in thumbnail encoding (event: %d) : X !!!",
+            __func__, event);
+        return;
+    case JPEG_EVENT_ERROR:
+    case JPEG_EVENT_ABORTED:
+        if (NULL != pme) {
+            pme->jpegErrorHandler(event);
+            if (!(pme->isZSLMode())) {
+                pme->stop();
+            }
+        }
+        ALOGE("Error event handled from JPEG \n");
+        return;
+    default:
+        ALOGE("Unsupported JPEG event %d \n", event);
+        break;
+    }
+
+    if (event != JPEG_EVENT_DONE) {
+        if (event == JPEG_EVENT_THUMBNAIL_DROPPED) {
+            ALOGE("%s: Error in thumbnail encoding (event: %d)!!!",
+                 __func__, event);
+            ALOGD("%s: X",__func__);
+            return;
+        }
+        else {
+            ALOGE("%s: Error (event: %d) while jpeg encoding!!!",
+                 __func__, event);
+        }
+    }
+
+    if (pme != NULL) {
+       pme->receiveCompleteJpegPicture(event);
+       ALOGE(" Completed issuing JPEG callback");
+       /* deinit only if we are done taking requested number of snapshots */
+       if (pme->getSnapshotState() == SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE) {
+           ALOGE(" About to issue deinit callback");
+       /* If it's ZSL Mode, we don't deinit now. We'll stop the polling thread and
+          deinit the channel/buffers only when we change the mode from zsl to
+          non-zsl. */
+           if (!(pme->isZSLMode())) {
+               pme->stop();
+           }
+        }
+    }
+    else
+        ALOGW("%s: Receive jpeg cb Obj Null", __func__);
+
+
+    ALOGD("%s: X",__func__);
+
+}
+
+// ---------------------------------------------------------------------------
+/* private functions*/
+// ---------------------------------------------------------------------------
+
+void QCameraStream_Snapshot::
+receiveJpegFragment(uint8_t *ptr, uint32_t size)
+{
+    ALOGE("%s: E", __func__);
+#if 0
+    if (mJpegHeap != NULL) {
+        ALOGE("%s: Copy jpeg...", __func__);
+        memcpy((uint8_t *)mJpegHeap->mHeap->base()+ mJpegOffset, ptr, size);
+        mJpegOffset += size;
+    }
+    else {
+        ALOGE("%s: mJpegHeap is NULL!", __func__);
+    }
+    #else
+    if(mHalCamCtrl->mJpegMemory.camera_memory[0] != NULL && ptr != NULL && size > 0) {
+        memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+        mJpegOffset += size;
+
+
+        /*
+                memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+                mJpegOffset += size;
+        */
+    } else {
+        ALOGE("%s: mJpegHeap is NULL!", __func__);
+    }
+
+
+    #endif
+
+    ALOGD("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::jpegErrorHandler(jpeg_event_t event)
+{
+    ALOGV("%s: E", __func__);
+    mStopCallbackLock.lock( );
+    if(mCurrentFrameEncoded) {
+        free(mCurrentFrameEncoded);
+        mCurrentFrameEncoded = NULL;
+    }
+    setSnapshotState(SNAPSHOT_STATE_ERROR);
+    if (!mSnapshotQueue.isEmpty()) {
+        ALOGI("%s: JPEG Queue not empty. flush the queue in "
+             "error case.", __func__);
+        mSnapshotQueue.flush();
+    }
+    mStopCallbackLock.unlock( );
+    if (NULL != mHalCamCtrl->mDataCb)
+        mHalCamCtrl->mDataCb (CAMERA_MSG_COMPRESSED_IMAGE,
+                              NULL, 0, NULL,mHalCamCtrl->mCallbackCookie);
+    ALOGV("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::
+receiveCompleteJpegPicture(jpeg_event_t event)
+{
+    int msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+    ALOGE("%s: E", __func__);
+    camera_memory_t *encodedMem = NULL;
+    camera_data_callback jpg_data_cb = NULL;
+    bool fail_cb_flag = false;
+
+    //Mutex::Autolock l(&snapshotLock);
+    mStopCallbackLock.lock( );
+    if(!mActive && !isLiveSnapshot()) {
+        ALOGE("%s : Cancel Picture",__func__);
+        fail_cb_flag = true;
+        goto end;
+    }
+
+    if(mCurrentFrameEncoded!=NULL /*&& !isLiveSnapshot()*/){
+        ALOGV("<DEBUG>: Calling buf done for snapshot buffer");
+        cam_evt_buf_done(mCameraId, mCurrentFrameEncoded);
+    }
+    mHalCamCtrl->dumpFrameToFile(mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset, (char *)"debug", (char *)"jpg", 0);
+
+end:
+    msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+    if (mHalCamCtrl->mDataCb && (mHalCamCtrl->mMsgEnabled & msg_type)) {
+        jpg_data_cb = mHalCamCtrl->mDataCb;
+    }else{
+        ALOGE("%s: JPEG callback was cancelled--not delivering image.", __func__);
+    }
+    setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODE_DONE);
+    mNumOfRecievedJPEG++;
+    mHalCamCtrl->deinitExifData();
+
+    /* free the resource we allocated to maintain the structure */
+    //mm_camera_do_munmap(main_fd, (void *)main_buffer_addr, mSnapshotStreamBuf.frame_len);
+    if(mCurrentFrameEncoded) {
+        free(mCurrentFrameEncoded);
+        mCurrentFrameEncoded = NULL;
+    }
+
+    /* Before leaving check the jpeg queue. If it's not empty give the available
+       frame for encoding*/
+    if (!mSnapshotQueue.isEmpty()) {
+        ALOGI("%s: JPEG Queue not empty. Dequeue and encode.", __func__);
+        mm_camera_ch_data_buf_t* buf =
+            (mm_camera_ch_data_buf_t *)mSnapshotQueue.dequeue();
+        //encodeDisplayAndSave(buf, 1);
+        if ( NO_ERROR != encodeDisplayAndSave(buf, 1)){
+          fail_cb_flag = true;
+        }
+    }  else if (mNumOfSnapshot == mNumOfRecievedJPEG )  { /* finished */
+      ALOGD("%s: Before omxJpegFinish", __func__);
+      omxJpegFinish();
+      ALOGD("%s: After omxJpegFinish", __func__);
+        /* getRemainingSnapshots call will give us number of snapshots still
+           remaining after flushing current zsl buffer once*/
+      ALOGD("%s: Complete JPEG Encoding Done!", __func__);
+      setSnapshotState(SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE);
+      mBurstModeFlag = false;
+      mSnapshotQueue.flush();
+      mNumOfRecievedJPEG = 0;
+      /* in case of zsl, we need to reset some of the zsl attributes */
+      if (isZSLMode()){
+          ALOGD("%s: Resetting the ZSL attributes", __func__);
+          setZSLChannelAttribute();
+      }
+      if (!isZSLMode() && !isLiveSnapshot()){
+         //Stop polling before calling datacb for if not ZSL mode
+         stopPolling();
+      }
+
+    } else {
+        ALOGD("%s: mNumOfRecievedJPEG(%d), mNumOfSnapshot(%d)", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+    }
+    if(fail_cb_flag && mHalCamCtrl->mDataCb &&
+        (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+        /* get picture failed. Give jpeg callback with NULL data
+         * to the application to restore to preview mode
+         */
+        jpg_data_cb  = mHalCamCtrl->mDataCb;
+    }
+    if(!fail_cb_flag) {
+        camera_memory_t *encodedMem = mHalCamCtrl->mGetMemory(
+            mHalCamCtrl->mJpegMemory.fd[0], mJpegOffset, 1, mHalCamCtrl);
+        if (!encodedMem || !encodedMem->data) {
+            ALOGE("%s: mGetMemory failed.\n", __func__);
+        }
+        memcpy(encodedMem->data, mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset );
+        mStopCallbackLock.unlock( );
+        if ((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+            ALOGV("%s: Calling upperlayer callback to store JPEG image", __func__);
+            jpg_data_cb (msg_type,encodedMem, 0, NULL,mHalCamCtrl->mCallbackCookie);
+        }
+        encodedMem->release( encodedMem );
+        jpg_data_cb = NULL;
+    }else{
+        ALOGV("Image Encoding Failed... Notify Upper layer");
+        mStopCallbackLock.unlock( );
+        if((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+            jpg_data_cb (CAMERA_MSG_COMPRESSED_IMAGE,NULL, 0, NULL,
+                         mHalCamCtrl->mCallbackCookie);
+        }
+    }
+    //reset jpeg_offset
+    mJpegOffset = 0;
+    ALOGD("%s: X", __func__);
+}
+
+status_t QCameraStream_Snapshot::
+configSnapshotDimension(cam_ctrl_dimension_t* dim)
+{
+    bool matching = true;
+    cam_format_t img_format;
+    status_t ret = NO_ERROR;
+    ALOGD("%s: E", __func__);
+
+    ALOGI("%s:Passed picture size: %d X %d", __func__,
+         dim->picture_width, dim->picture_height);
+    ALOGI("%s:Passed postview size: %d X %d", __func__,
+         dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+    /* First check if the picture resolution is the same, if not, change it*/
+    mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+    ALOGD("%s: Picture size received: %d x %d", __func__,
+         mPictureWidth, mPictureHeight);
+    /*Current VFE software design requires picture size >= display size for ZSL*/
+    if (isZSLMode()){
+      mPostviewWidth = dim->display_width;
+      mPostviewHeight = dim->display_height;
+    } else {
+      mPostviewWidth = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+      mPostviewHeight =  mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+    }
+    /*If application requested thumbnail size to be (0,0) 
+       then configure second outout to a default size.
+       Jpeg encoder will drop thumbnail as reflected in encodeParams.
+    */
+    mDropThumbnail = false;
+    if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+         mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+         mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+         mDropThumbnail = true;
+    }
+
+    ALOGD("%s: Postview size received: %d x %d", __func__,
+         mPostviewWidth, mPostviewHeight);
+
+    matching = (mPictureWidth == dim->picture_width) &&
+        (mPictureHeight == dim->picture_height);
+    matching &= (dim->ui_thumbnail_width == mPostviewWidth) &&
+        (dim->ui_thumbnail_height == mPostviewHeight);
+
+    /* picture size currently set do not match with the one wanted
+       by user.*/
+    if (!matching) {
+        if (mPictureWidth < mPostviewWidth || mPictureHeight < mPostviewHeight) {
+            //Changes to Handle VFE limitation.
+            mActualPictureWidth = mPictureWidth;
+            mActualPictureHeight = mPictureHeight;
+            mPictureWidth = mPostviewWidth;
+            mPictureHeight = mPostviewHeight;
+            mJpegDownscaling = TRUE;
+        }else{
+            mJpegDownscaling = FALSE;
+        }
+        dim->picture_width  = mPictureWidth;
+        dim->picture_height = mPictureHeight;
+        dim->ui_thumbnail_height = mThumbnailHeight = mPostviewHeight;
+        dim->ui_thumbnail_width = mThumbnailWidth = mPostviewWidth;
+    }
+    #if 0
+    img_format = mHalCamCtrl->getPreviewFormat();
+    if (img_format) {
+        matching &= (img_format == dim->main_img_format);
+        if (!matching) {
+            dim->main_img_format = img_format;
+            dim->thumb_format = img_format;
+        }
+    }
+    #endif
+    if (!matching) {
+         ALOGD("%s: Image Sizes before set parm call: main: %dx%d thumbnail: %dx%d",
+              __func__,
+              dim->picture_width, dim->picture_height,
+              dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+        ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,dim);
+        if (NO_ERROR != ret) {
+            ALOGE("%s: error - can't config snapshot parms!", __func__);
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+    }
+    /* set_parm will return corrected dimension based on aspect ratio and
+       ceiling size */
+    mPictureWidth = dim->picture_width;
+    mPictureHeight = dim->picture_height;
+    mPostviewHeight = mThumbnailHeight = dim->ui_thumbnail_height;
+    mPostviewWidth = mThumbnailWidth = dim->ui_thumbnail_width;
+    mPictureFormat= dim->main_img_format;
+    mThumbnailFormat = dim->thumb_format;
+
+    ALOGD("%s: Image Format: %d", __func__, dim->main_img_format);
+    ALOGI("%s: Image Sizes: main: %dx%d thumbnail: %dx%d", __func__,
+         dim->picture_width, dim->picture_height,
+         dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotChannel(cam_ctrl_dimension_t *dim,
+                       int num_of_snapshots)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_ch_image_fmt_parm_t fmt;
+    mm_camera_channel_attr_t ch_attr;
+
+    mm_camera_raw_streaming_type_t raw_stream_type =
+        MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+
+    ALOGD("%s: E", __func__);
+
+    /* Initialize stream - set format, acquire channel */
+    /*TBD: Currently we only support single raw capture*/
+    ALOGE("num_of_snapshots = %d",num_of_snapshots);
+    if (num_of_snapshots == 1) {
+        raw_stream_type = MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+    }
+
+    /* Set channel attribute */
+    ALOGD("%s: Set Raw Snapshot Channel attribute", __func__);
+    memset(&ch_attr, 0, sizeof(ch_attr));
+    ch_attr.type = MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE;
+    ch_attr.raw_streaming_mode = raw_stream_type;
+
+    if( NO_ERROR !=
+        cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_RAW, &ch_attr)) {
+        ALOGD("%s: Failure setting Raw channel attribute.", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+    fmt.ch_type = MM_CAMERA_CH_RAW;
+    fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+    fmt.def.dim.width = dim->raw_picture_width;
+    fmt.def.dim.height = dim->raw_picture_height;
+
+
+    ALOGV("%s: Raw snapshot channel fmt: %d", __func__,
+         fmt.def.fmt);
+    ALOGV("%s: Raw snapshot resolution: %dX%d", __func__,
+         dim->raw_picture_width, dim->raw_picture_height);
+
+    ALOGD("%s: Set Raw Snapshot channel image format", __func__);
+    ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: Set Raw Snapshot Channel format err=%d\n", __func__, ret);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGE("%s: X", __func__);
+    return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+setZSLChannelAttribute(void)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_channel_attr_t ch_attr;
+    ALOGD("%s: E", __func__);
+
+    memset(&ch_attr, 0, sizeof(mm_camera_channel_attr_t));
+    ch_attr.type = MM_CAMERA_CH_ATTR_BUFFERING_FRAME;
+    ch_attr.buffering_frame.look_back = mHalCamCtrl->getZSLBackLookCount();
+    ch_attr.buffering_frame.water_mark = mHalCamCtrl->getZSLQueueDepth();
+    ch_attr.buffering_frame.interval = mHalCamCtrl->getZSLBurstInterval( );
+    ALOGE("%s: ZSL queue_depth = %d, back_look_count = %d", __func__,
+         ch_attr.buffering_frame.water_mark,
+         ch_attr.buffering_frame.look_back);
+    if( NO_ERROR !=
+        cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_SNAPSHOT, &ch_attr)) {
+        ALOGD("%s: Failure setting ZSL channel attribute.", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotFormat(cam_ctrl_dimension_t *dim)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_ch_image_fmt_parm_t fmt;
+
+    ALOGD("%s: E", __func__);
+
+    /* For ZSL mode we'll need to set channel attribute */
+    if (isZSLMode()) {
+        ret = setZSLChannelAttribute();
+        if (ret != NO_ERROR) {
+            goto end;
+        }
+    }
+
+    memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+    fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+    fmt.snapshot.main.fmt = dim->main_img_format;
+    fmt.snapshot.main.dim.width = dim->picture_width;
+    fmt.snapshot.main.dim.height = dim->picture_height;
+
+    fmt.snapshot.thumbnail.fmt = dim->thumb_format;
+    fmt.snapshot.thumbnail.dim.width = dim->ui_thumbnail_width;
+    fmt.snapshot.thumbnail.dim.height = dim->ui_thumbnail_height;
+
+    ALOGV("%s: Snapshot channel fmt = main: %d thumbnail: %d", __func__,
+         dim->main_img_format, dim->thumb_format);
+    ALOGV("%s: Snapshot channel resolution = main: %dX%d  thumbnail: %dX%d",
+         __func__, dim->picture_width, dim->picture_height,
+         dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+    ALOGD("%s: Set Snapshot channel image format", __func__);
+    ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: Set Snapshot Channel format err=%d\n", __func__, ret);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGE("%s: X", __func__);
+    return ret;
+
+}
+
+void QCameraStream_Snapshot::
+deinitSnapshotChannel(mm_camera_channel_type_t ch_type)
+{
+    ALOGD("%s: E", __func__);
+
+    /* unreg buf notify*/
+    if (getSnapshotState() >= SNAPSHOT_STATE_BUF_NOTIF_REGD){
+        if (NO_ERROR != cam_evt_register_buf_notify(mCameraId,
+                        ch_type, NULL,(mm_camera_register_buf_cb_type_t)NULL,NULL, this)) {
+            ALOGE("%s: Failure to unregister buf notification", __func__);
+        }
+    }
+
+    if (getSnapshotState() >= SNAPSHOT_STATE_CH_ACQUIRED) {
+        ALOGD("%s: Release snapshot channel", __func__);
+        cam_ops_ch_release(mCameraId, ch_type);
+    }
+
+    ALOGD("%s: X",__func__);
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+    status_t ret = NO_ERROR;
+    struct msm_frame *frame;
+    uint32_t frame_len;
+    uint8_t num_planes;
+    uint32_t planes[VIDEO_MAX_PLANES];
+    mm_camera_reg_buf_t reg_buf;
+
+    ALOGD("%s: E", __func__);
+    memset(&reg_buf,  0,  sizeof(mm_camera_reg_buf_t));
+    memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+    if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+        ALOGE("%s: Invalid number of buffers (=%d) requested!", __func__, num_of_buf);
+        ret = BAD_VALUE;
+        goto end;
+    }
+
+    reg_buf.def.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+    if (!reg_buf.def.buf.mp) {
+      ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+      ret = NO_MEMORY;
+      goto end;
+    }
+    memset(reg_buf.def.buf.mp, 0, num_of_buf * sizeof(mm_camera_mp_buf_t));
+
+    /* Get a frame len for buffer to be allocated*/
+    frame_len = mm_camera_get_msm_frame_len(CAMERA_BAYER_SBGGR10,
+                                            myMode,
+                                            dim->raw_picture_width,
+                                            dim->raw_picture_height,
+                                            OUTPUT_TYPE_S,
+                                            &num_planes, planes);
+
+    if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mRawMemory, num_of_buf,
+                                        frame_len, 0, planes[0], MSM_PMEM_RAW_MAINIMG,
+                                        &mSnapshotStreamBuf, &reg_buf.def,
+                                        num_planes, planes) < 0) {
+        ret = NO_MEMORY;
+        goto end;
+    }
+
+    /* register the streaming buffers for the channel*/
+    reg_buf.ch_type = MM_CAMERA_CH_RAW;
+    reg_buf.def.num = mSnapshotStreamBuf.num;
+
+    ret = cam_config_prepare_buf(mCameraId, &reg_buf);
+    if(ret != NO_ERROR) {
+        ALOGV("%s:reg snapshot buf err=%d\n", __func__, ret);
+        ret = FAILED_TRANSACTION;
+        mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+        goto end;
+    }
+
+    /* If we have reached here successfully, we have allocated buffer.
+       Set state machine.*/
+    setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+
+end:
+    /* If it's error, we'll need to do some needful */
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    if (reg_buf.def.buf.mp)
+      delete []reg_buf.def.buf.mp;
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::deinitRawSnapshotBuffers(void)
+{
+    int ret = NO_ERROR;
+    ALOGD("%s: E", __func__);
+
+    int err = getSnapshotState();
+
+    /* deinit buffers only if we have already allocated */
+    if (err >= SNAPSHOT_STATE_BUF_INITIALIZED || err == SNAPSHOT_STATE_ERROR){
+        ALOGD("%s: Unpreparing Snapshot Buffer", __func__);
+        ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_RAW);
+        if(ret != NO_ERROR) {
+            ALOGE("%s:Unreg Raw snapshot buf err=%d\n", __func__, ret);
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+        mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+    }
+
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+    status_t ret = NO_ERROR;
+    struct msm_frame *frame;
+    uint32_t frame_len, y_off, cbcr_off;
+    uint8_t num_planes;
+    uint32_t planes[VIDEO_MAX_PLANES];
+    mm_camera_reg_buf_t reg_buf;
+    int rotation = 0;
+
+    ALOGD("%s: E", __func__);
+    memset(&reg_buf,  0,  sizeof(mm_camera_reg_buf_t));
+    memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+    if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+        ALOGE("%s: Invalid number of buffers (=%d) requested!",
+             __func__, num_of_buf);
+        ret = BAD_VALUE;
+        goto end;
+    }
+
+    ALOGD("%s: Mode: %d Num_of_buf: %d ImageSizes: main: %dx%d thumb: %dx%d",
+         __func__, myMode, num_of_buf,
+         dim->picture_width, dim->picture_height,
+         dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+    reg_buf.snapshot.main.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+    if (!reg_buf.snapshot.main.buf.mp) {
+          ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+          ret = NO_MEMORY;
+          goto end;
+    }
+    memset(reg_buf.snapshot.main.buf.mp, 0,
+      num_of_buf * sizeof(mm_camera_mp_buf_t));
+    if (!isFullSizeLiveshot()) {
+      reg_buf.snapshot.thumbnail.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+      if (!reg_buf.snapshot.thumbnail.buf.mp) {
+        ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+        ret = NO_MEMORY;
+        goto end;
+      }
+      memset(reg_buf.snapshot.thumbnail.buf.mp, 0,
+        num_of_buf * sizeof(mm_camera_mp_buf_t));
+    }
+    /* Number of buffers to be set*/
+    /* Set the JPEG Rotation here since get_buffer_offset needs
+     * the value of rotation.*/
+    mHalCamCtrl->setJpegRotation(isZSLMode());
+    if(!isZSLMode())
+        rotation = mHalCamCtrl->getJpegRotation();
+    else
+        rotation = 0;
+    if(rotation != dim->rotation) {
+        dim->rotation = rotation;
+        ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+    }
+
+    if(isLiveSnapshot()) {
+        ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+    }
+    num_planes = 2;
+    planes[0] = dim->picture_frame_offset.mp[0].len;
+    planes[1] = dim->picture_frame_offset.mp[1].len;
+    frame_len = dim->picture_frame_offset.frame_len;
+    y_off = dim->picture_frame_offset.mp[0].offset;
+    cbcr_off = dim->picture_frame_offset.mp[1].offset;
+    ALOGE("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+         __func__, dim->rotation, y_off, cbcr_off, frame_len, dim->picture_width, dim->picture_height);
+    if (mHalCamCtrl->initHeapMem (&mHalCamCtrl->mJpegMemory, 1, frame_len, 0, cbcr_off,
+                                  MSM_PMEM_MAX, NULL, NULL, num_planes, planes) < 0) {
+		ALOGE("%s: Error allocating JPEG memory", __func__);
+		ret = NO_MEMORY;
+		goto end;
+    }
+    if(!isLiveSnapshot()) {
+        if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mSnapshotMemory, num_of_buf,
+    	   frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG, &mSnapshotStreamBuf,
+                                     &reg_buf.snapshot.main, num_planes, planes) < 0) {
+    				ret = NO_MEMORY;
+                    mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+    				goto end;
+    	};
+        num_planes = 2;
+        planes[0] = dim->thumb_frame_offset.mp[0].len;
+        planes[1] = dim->thumb_frame_offset.mp[1].len;
+        frame_len = planes[0] + planes[1];
+        if (!isFullSizeLiveshot()) {
+    	    y_off = dim->thumb_frame_offset.mp[0].offset;
+                cbcr_off = dim->thumb_frame_offset.mp[1].offset;
+    	    ALOGE("%s: thumbnail: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+    		__func__, dim->rotation, y_off, cbcr_off, frame_len,
+    		dim->thumbnail_width, dim->thumbnail_height);
+
+    	    if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mThumbnailMemory, num_of_buf,
+    		    frame_len, y_off, cbcr_off, MSM_PMEM_THUMBNAIL, &mPostviewStreamBuf,
+    		    &reg_buf.snapshot.thumbnail, num_planes, planes) < 0) {
+    	        ret = NO_MEMORY;
+                    mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+                    mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+    	        goto end;
+    	    }
+        } 
+        /* register the streaming buffers for the channel*/
+        reg_buf.ch_type = MM_CAMERA_CH_SNAPSHOT;
+        reg_buf.snapshot.main.num = mSnapshotStreamBuf.num;
+
+        if (!isFullSizeLiveshot())
+            reg_buf.snapshot.thumbnail.num = mPostviewStreamBuf.num;
+        else
+            reg_buf.snapshot.thumbnail.num = 0;
+    
+        ret = cam_config_prepare_buf(mCameraId, &reg_buf);
+        if(ret != NO_ERROR) {
+            ALOGV("%s:reg snapshot buf err=%d\n", __func__, ret);
+            ret = FAILED_TRANSACTION;
+            if (!isFullSizeLiveshot()){
+                mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+            }
+            mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+            mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+            goto end;
+        }
+    }
+
+    /* If we have reached here successfully, we have allocated buffer.
+       Set state machine.*/
+    setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    if (reg_buf.snapshot.main.buf.mp)
+      delete []reg_buf.snapshot.main.buf.mp;
+    if (reg_buf.snapshot.thumbnail.buf.mp)
+      delete []reg_buf.snapshot.thumbnail.buf.mp;
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+deinitSnapshotBuffers(void)
+{
+    int ret = NO_ERROR;
+    ALOGD("%s: E", __func__);
+
+    int err = getSnapshotState();
+    /* Deinit only if we have already initialized*/
+    if (err >= SNAPSHOT_STATE_BUF_INITIALIZED || err == SNAPSHOT_STATE_ERROR){
+
+        if(!isLiveSnapshot()) {
+            ALOGD("%s: Unpreparing Snapshot Buffer", __func__);
+            ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+            if(ret != NO_ERROR) {
+                ALOGE("%s:unreg snapshot buf err=%d\n", __func__, ret);
+                ret = FAILED_TRANSACTION;
+                goto end;
+            }
+        }
+
+        /* Clear main and thumbnail heap*/
+        if(!isLiveSnapshot()) {
+            mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+            if (!isFullSizeLiveshot())
+              mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+        }
+        mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+    }
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+void QCameraStream_Snapshot::deInitBuffer(void)
+{
+    mm_camera_channel_type_t ch_type;
+
+    ALOGI("%s: E", __func__);
+
+    if( getSnapshotState() == SNAPSHOT_STATE_UNINIT) {
+        ALOGD("%s: Already deinit'd!", __func__);
+        return;
+    }
+
+    if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+      /* deinit buffer */
+      deinitRawSnapshotBuffers();
+    }
+    else
+    {
+      if (!isZSLMode() &&
+      ((mHalCamCtrl->getHDRMode() == HDR_MODE) || (mHalCamCtrl->isWDenoiseEnabled()))) {
+        /*register main and thumbnail buffers at back-end for frameproc*/
+        for (int i = 0; i < mHalCamCtrl->mSnapshotMemory.buffer_count; i++) {
+          if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i, mCameraId,
+                                                        CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+            ALOGE("%s: sending unmapping data Msg Failed", __func__);
+          }
+          if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i, mCameraId,
+                                                        CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+            ALOGE("%s: sending unmapping data Msg Failed", __func__);
+          }
+        }
+      }
+
+      deinitSnapshotBuffers();
+    }
+
+
+    /* deinit jpeg buffer if allocated */
+    if(mJpegHeap != NULL) mJpegHeap.clear();
+    mJpegHeap = NULL;
+
+    /* memset some global structure */
+    memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+    memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+    mSnapshotQueue.flush();
+    mWDNQueue.flush();
+
+    setSnapshotState(SNAPSHOT_STATE_UNINIT);
+
+    ALOGD("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera.
+  We need an event - one event for
+  stream-off to disable OPS_SNAPSHOT*/
+void QCameraStream_Snapshot::runSnapshotThread(void *data)
+{
+    ALOGD("%s: E", __func__);
+
+    if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+       /* TBD: Temp: Needs to be removed once event handling is enabled.
+          We cannot call mm-camera interface to stop snapshot from callback
+          function as it causes deadlock. Hence handling it here temporarily
+          in this thread. Later mm-camera intf will give us event in separate
+          thread context */
+        mm_app_snapshot_wait();
+        /* Send command to stop snapshot polling thread*/
+        stop();
+    }
+    ALOGD("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera*/
+static void *snapshot_thread(void *obj)
+{
+    QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)obj;
+    ALOGD("%s: E", __func__);
+    if (pme != 0) {
+        pme->runSnapshotThread(obj);
+    }
+    else ALOGW("not starting snapshot thread: the object went away!");
+    ALOGD("%s: X", __func__);
+    return NULL;
+}
+
+/*Temp: to be removed later*/
+static pthread_t mSnapshotThread;
+
+status_t QCameraStream_Snapshot::initJPEGSnapshot(int num_of_snapshots)
+{
+    status_t ret = NO_ERROR;
+    cam_ctrl_dimension_t dim;
+    mm_camera_op_mode_type_t op_mode;
+
+    ALOGV("%s: E", __func__);
+
+    if (isFullSizeLiveshot())
+      goto end;
+
+    ALOGD("%s: Get current dimension", __func__);
+    /* Query mm_camera to get current dimension */
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_DIMENSION, &dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't get preview dimension!", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+    ALOGD("Setting OP_MODE_CAPTURE");
+    op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+    if( NO_ERROR != cam_config_set_parm(mCameraId,
+            MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+        ALOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    /* config the parmeters and see if we need to re-init the stream*/
+    ALOGI("%s: Configure Snapshot Dimension", __func__);
+    ret = configSnapshotDimension(&dim);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Setting snapshot dimension failed", __func__);
+        goto end;
+    }
+
+    /* Initialize stream - set format, acquire channel */
+    ret = initSnapshotFormat(&dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't init nonZSL stream!", __func__);
+        goto end;
+    }
+
+    ret = initSnapshotBuffers(&dim, num_of_snapshots);
+    if ( NO_ERROR != ret ){
+        ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+        goto end;
+    }
+
+    if (!isZSLMode() &&
+    ((mHalCamCtrl->getHDRMode() == HDR_MODE) || (mHalCamCtrl->isWDenoiseEnabled()))) {
+      /*register main and thumbnail buffers at back-end for frameproc*/
+        for (int i = 0; i < num_of_snapshots; i++) {
+          if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i,
+          mSnapshotStreamBuf.frame[i].fd, mHalCamCtrl->mSnapshotMemory.size, mCameraId,
+                                                      CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+            ALOGE("%s: sending mapping data Msg Failed", __func__);
+          }
+          if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i,
+          mPostviewStreamBuf.frame[i].fd, mHalCamCtrl->mThumbnailMemory.size, mCameraId,
+                                                      CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+            ALOGE("%s: sending mapping data Msg Failed", __func__);
+          }
+        }
+    }
+
+end:
+    /* Based on what state we are in, we'll need to handle error -
+       like deallocating memory if we have already allocated */
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGV("%s: X", __func__);
+    return ret;
+
+}
+
+status_t QCameraStream_Snapshot::initRawSnapshot(int num_of_snapshots)
+{
+    status_t ret = NO_ERROR;
+    cam_ctrl_dimension_t dim;
+    bool initSnapshot = false;
+    mm_camera_op_mode_type_t op_mode;
+
+    ALOGV("%s: E", __func__);
+
+    /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+    ALOGD("%s: Setting OP_MODE_CAPTURE", __func__);
+    op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+    if( NO_ERROR != cam_config_set_parm(mCameraId,
+            MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+        ALOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    /* For raw snapshot, we do not know the dimension as it
+       depends on sensor to sensor. We call getDimension which will
+       give us raw width and height */
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != ret) {
+      ALOGE("%s: error - can't get dimension!", __func__);
+      ALOGE("%s: X", __func__);
+      goto end;
+    }
+    ALOGD("%s: Raw Snapshot dimension: %dx%d", __func__,
+         dim.raw_picture_width,
+         dim.raw_picture_height);
+
+
+    ret = initRawSnapshotChannel(&dim, num_of_snapshots);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't init nonZSL stream!", __func__);
+        goto end;
+    }
+
+    ret = initRawSnapshotBuffers(&dim, num_of_snapshots);
+    if ( NO_ERROR != ret ){
+        ALOGE("%s: Failure allocating memory for Raw Snapshot buffers",
+             __func__);
+        goto end;
+    }
+    setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGV("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::initFullLiveshot(void)
+{
+    status_t ret = NO_ERROR;
+    cam_ctrl_dimension_t dim;
+    bool matching = true;
+
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != ret) {
+      ALOGE("%s: error - can't get dimension!", __func__);
+      return ret;
+    }
+#if 1
+    /* First check if the picture resolution is the same, if not, change it*/
+    mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+    ALOGD("%s: Picture size received: %d x %d", __func__,
+         mPictureWidth, mPictureHeight);
+
+    //Use main image as input to encoder to generate thumbnail
+    mThumbnailWidth = dim.picture_width;
+    mThumbnailHeight = dim.picture_height;
+    matching = (mPictureWidth == dim.picture_width) &&
+        (mPictureHeight == dim.picture_height);
+
+    //Actual thumbnail size requested
+    mPostviewWidth = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+    mPostviewHeight =  mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+    mDropThumbnail = false;
+    if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+         mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+         mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+         mDropThumbnail = true;
+    }
+
+    if (!matching) {
+        dim.picture_width  = mPictureWidth;
+        dim.picture_height = mPictureHeight;
+        dim.ui_thumbnail_height = mThumbnailHeight;
+        dim.ui_thumbnail_width = mThumbnailWidth;
+    }
+    ALOGD("%s: Picture size to set: %d x %d", __func__,
+         dim.picture_width, dim.picture_height);
+    ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+#endif
+    /* Initialize stream - set format, acquire channel */
+    ret = initSnapshotFormat(&dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't init nonZSL stream!", __func__);
+        return ret;
+    }
+    ret = initSnapshotBuffers(&dim, 1);
+    if ( NO_ERROR != ret ){
+        ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+        return ret;
+    }
+
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::initZSLSnapshot(void)
+{
+    status_t ret = NO_ERROR;
+    cam_ctrl_dimension_t dim;
+    mm_camera_op_mode_type_t op_mode;
+
+    ALOGV("%s: E", __func__);
+
+    ALOGD("%s: Get current dimension", __func__);
+    /* Query mm_camera to get current dimension */
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId,
+                              MM_CAMERA_PARM_DIMENSION, &dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't get preview dimension!", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    /* config the parmeters and see if we need to re-init the stream*/
+    ALOGD("%s: Configure Snapshot Dimension", __func__);
+    ret = configSnapshotDimension(&dim);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Setting snapshot dimension failed", __func__);
+        goto end;
+    }
+
+    /* Initialize stream - set format, acquire channel */
+    ret = initSnapshotFormat(&dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't init nonZSL stream!", __func__);
+        goto end;
+    }
+
+    /* For ZSL we'll have to allocate buffers for internal queue
+       maintained by mm-camera lib plus around 3 buffers used for
+       data handling by lower layer.*/
+
+    ret = initSnapshotBuffers(&dim, mHalCamCtrl->getZSLQueueDepth() + 3);
+    if ( NO_ERROR != ret ){
+        ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+        goto end;
+    }
+
+end:
+    /* Based on what state we are in, we'll need to handle error -
+       like deallocating memory if we have already allocated */
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGV("%s: X", __func__);
+    return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureJPEG(void)
+{
+    status_t ret = NO_ERROR;
+
+    ALOGD("%s: E", __func__);
+
+    /* Take snapshot */
+    ALOGD("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+    if (NO_ERROR != cam_ops_action(mCameraId,
+                                              TRUE,
+                                              MM_CAMERA_OPS_SNAPSHOT,
+                                              this)) {
+           ALOGE("%s: Failure taking snapshot", __func__);
+           ret = FAILED_TRANSACTION;
+           goto end;
+    }
+
+    /* TBD: Temp: to be removed once event callback
+       is implemented in mm-camera lib  */
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+    pthread_create(&mSnapshotThread,&attr,
+                   snapshot_thread, (void *)this);
+
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+
+    ALOGD("%s: X", __func__);
+    return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureRaw(void)
+{
+    status_t ret = NO_ERROR;
+
+    ALOGD("%s: E", __func__);
+
+    /* Take snapshot */
+    ALOGD("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+    if (NO_ERROR != cam_ops_action(mCameraId,
+                                  TRUE,
+                                  MM_CAMERA_OPS_RAW,
+                                  this)) {
+           ALOGE("%s: Failure taking snapshot", __func__);
+           ret = FAILED_TRANSACTION;
+           goto end;
+    }
+
+    /* TBD: Temp: to be removed once event callback
+       is implemented in mm-camera lib  */
+    /* Wait for snapshot frame callback to return*/
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+    pthread_create(&mSnapshotThread,&attr,
+                   snapshot_thread, (void *)this);
+
+end:
+    if (ret != NO_ERROR) {
+        handleError();
+    }
+    ALOGD("%s: X", __func__);
+    return ret;
+
+}
+
+/* This is called from vide stream object */
+status_t QCameraStream_Snapshot::
+takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+                    cam_ctrl_dimension_t *dim,
+                    int frame_len)
+{
+    status_t ret = NO_ERROR;
+    common_crop_t crop_info;
+    //common_crop_t crop;
+    uint32_t aspect_ratio;
+    camera_notify_callback notifyCb;
+    camera_data_callback dataCb;
+
+    ALOGI("%s: E", __func__);
+
+    /* set flag to indicate we are doing livesnapshot */
+    resetSnapshotCounters( );
+    setModeLiveSnapshot(true);
+
+    if(!mHalCamCtrl->mShutterSoundPlayed) {
+        notifyShutter(&crop_info, TRUE);
+    }
+    notifyShutter(&crop_info, FALSE);
+    mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+    // send upperlayer callback for raw image (data or notify, not both)
+    if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+      dataCb = mHalCamCtrl->mDataCb;
+    } else {
+      dataCb = NULL;
+    }
+    if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+      notifyCb = mHalCamCtrl->mNotifyCb;
+    } else {
+      notifyCb = NULL;
+    }
+
+    ALOGI("%s:Passed picture size: %d X %d", __func__,
+         dim->picture_width, dim->picture_height);
+    ALOGI("%s:Passed thumbnail size: %d X %d", __func__,
+         dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+    mPictureWidth = dim->picture_width;
+    mPictureHeight = dim->picture_height;
+    mThumbnailWidth = dim->ui_thumbnail_width;
+    mThumbnailHeight = dim->ui_thumbnail_height;
+    mPictureFormat = dim->main_img_format;
+    mThumbnailFormat = dim->thumb_format;
+
+    memset(&crop_info, 0, sizeof(common_crop_t));
+    crop_info.in1_w = mPictureWidth;
+    crop_info.in1_h = mPictureHeight;
+    /* For low power live snapshot the thumbnail output size is set to default size.
+       In case of live snapshot video buffer = thumbnail buffer. For higher resolutions
+       the thumnail will be dropped if its more than 64KB. To avoid thumbnail drop
+       set thumbnail as configured by application. This will be a size lower than video size*/
+    mDropThumbnail = false;
+    if(mHalCamCtrl->thumbnailWidth == 0 &&  mHalCamCtrl->thumbnailHeight == 0) {
+        ALOGE("Live Snapshot thumbnail will be dropped as indicated by application");
+        mDropThumbnail = true;
+   }
+    crop_info.out1_w = mHalCamCtrl->thumbnailWidth;
+    crop_info.out1_h =  mHalCamCtrl->thumbnailHeight;
+    ret = encodeData(recvd_frame, &crop_info, frame_len, 0);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Failure configuring JPEG encoder", __func__);
+
+        /* Failure encoding this frame. Just notify upper layer
+           about it.*/
+        #if 0
+        if(mHalCamCtrl->mDataCb &&
+            (mHalCamCtrl->mMsgEnabled & MEDIA_RECORDER_MSG_COMPRESSED_IMAGE)) {
+            /* get picture failed. Give jpeg callback with NULL data
+             * to the application to restore to preview mode
+             */
+        }
+        #endif
+        setModeLiveSnapshot(false);
+        goto end;
+    }
+
+    if (dataCb) {
+      dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+                           1, NULL, mHalCamCtrl->mCallbackCookie);
+    }
+    if (notifyCb) {
+      notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+    }
+
+end:
+    ALOGI("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+takePictureZSL(void)
+{
+    status_t ret = NO_ERROR;
+    mm_camera_ops_parm_get_buffered_frame_t param;
+
+    ALOGE("%s: E", __func__);
+
+    memset(&param, 0, sizeof(param));
+    param.ch_type = MM_CAMERA_CH_SNAPSHOT;
+
+    /* Take snapshot */
+    ALOGE("%s: Call MM_CAMERA_OPS_GET_BUFFERED_FRAME", __func__);
+
+    mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+    if (NO_ERROR != cam_ops_action(mCameraId,
+                                          TRUE,
+                                          MM_CAMERA_OPS_GET_BUFFERED_FRAME,
+                                          &param)) {
+           ALOGE("%s: Failure getting zsl frame(s)", __func__);
+           ret = FAILED_TRANSACTION;
+           goto end;
+    }
+
+    /* TBD: Temp: to be removed once event callback
+       is implemented in mm-camera lib  */
+/*    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+    pthread_create(&mSnapshotThread,&attr,
+                   snapshot_thread, (void *)this);
+*/
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::
+startStreamZSL(void)
+{
+    status_t ret = NO_ERROR;
+
+    ALOGD("%s: E", __func__);
+
+    /* Start ZSL - it'll start queuing the frames */
+    ALOGD("%s: Call MM_CAMERA_OPS_ZSL", __func__);
+    if (NO_ERROR != cam_ops_action(mCameraId,
+                                          TRUE,
+                                          MM_CAMERA_OPS_ZSL,
+                                          this)) {
+           ALOGE("%s: Failure starting ZSL stream", __func__);
+           ret = FAILED_TRANSACTION;
+           goto end;
+    }
+
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+
+}
+
+status_t  QCameraStream_Snapshot::
+encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+           common_crop_t *crop_info,
+           int frame_len,
+           bool enqueued)
+{
+    status_t ret = NO_ERROR;
+    cam_ctrl_dimension_t dimension;
+    struct msm_frame *postviewframe;
+    struct msm_frame *mainframe;
+    common_crop_t crop;
+    cam_point_t main_crop_offset;
+    cam_point_t thumb_crop_offset;
+    int width, height;
+    uint8_t *thumbnail_buf;
+    uint32_t thumbnail_fd;
+
+    omx_jpeg_encode_params encode_params;
+
+    /* If it's the only frame, we directly pass to encoder.
+       If not, we'll queue it and check during next jpeg .
+       Also, if the queue isn't empty then we need to queue this
+       one too till its turn comes (only if it's not already
+       queued up there)*/
+    ALOGD("%s: getSnapshotState()=%d, enqueued =%d, Q empty=%d", __func__, getSnapshotState(), enqueued, mSnapshotQueue.isEmpty());
+    ALOGD("%s: mNumOfRecievedJPEG=%d, mNumOfSnapshot =%d", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+    if((getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) ||
+       (!mSnapshotQueue.isEmpty() && !enqueued)){ /*busy and new buffer*/
+        /* encoding is going on. Just queue the frame for now.*/
+        ALOGD("%s: JPEG encoding in progress."
+             "Enqueuing frame id(%d) for later processing.", __func__,
+             recvd_frame->snapshot.main.idx);
+        mSnapshotQueue.enqueue((void *)recvd_frame);
+    } else if (enqueued ||
+       (mNumOfRecievedJPEG != mNumOfSnapshot  && mNumOfRecievedJPEG != 0)) { /*not busy, not first*/
+      ALOGD("%s: JPG not busy, not first frame.", __func__);
+
+      // For full-size live shot, use mainimage to generate thumbnail
+      if (isFullSizeLiveshot()) {
+          postviewframe = recvd_frame->snapshot.main.frame;
+      } else {
+          postviewframe = recvd_frame->snapshot.thumbnail.frame;
+      }
+      mainframe = recvd_frame->snapshot.main.frame;
+      cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+      ALOGD("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+      /*since this is the continue job, we only care about the input buffer*/
+      encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+      encode_params.thumbnail_fd = postviewframe->fd;
+      encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+      encode_params.snapshot_fd = mainframe->fd;
+      encode_params.dimension = &dimension;
+      /*update exif parameters in HAL*/
+      mHalCamCtrl->setExifTags();
+
+      encode_params.exif_data = mHalCamCtrl->getExifData();
+      encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+      if (!omxJpegEncodeNext(&encode_params)){
+          ALOGE("%s: Failure! JPEG encoder returned error.", __func__);
+          ret = FAILED_TRANSACTION;
+          goto end;
+      }
+      /* Save the pointer to the frame sent for encoding. we'll need it to
+         tell kernel that we are done with the frame.*/
+      mCurrentFrameEncoded = recvd_frame;
+      setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+    } else {  /*not busy and new buffer (first job)*/
+
+      ALOGD("%s: JPG Idle and  first frame.", __func__);
+
+        // For full-size live shot, use mainimage to generate thumbnail
+        if (isFullSizeLiveshot()){
+            postviewframe = recvd_frame->snapshot.main.frame;
+        } else {
+            postviewframe = recvd_frame->snapshot.thumbnail.frame;
+        }
+        mainframe = recvd_frame->snapshot.main.frame;
+        cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+        ALOGD("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+
+        dimension.orig_picture_dx = mPictureWidth;
+        dimension.orig_picture_dy = mPictureHeight;
+
+        if(!mDropThumbnail) {
+            if(isZSLMode()) {
+                ALOGI("Setting input thumbnail size to previewWidth= %d   previewheight= %d in ZSL mode",
+                     mHalCamCtrl->mPreviewWidth, mHalCamCtrl->mPreviewHeight);
+                dimension.thumbnail_width = width = mHalCamCtrl->mPreviewWidth;
+                dimension.thumbnail_height = height = mHalCamCtrl->mPreviewHeight;
+            } else {
+                dimension.thumbnail_width = width = mThumbnailWidth;
+                dimension.thumbnail_height = height = mThumbnailHeight;
+            }
+        } else {
+            dimension.thumbnail_width = width = 0;
+            dimension.thumbnail_height = height = 0;
+        }
+        dimension.main_img_format = mPictureFormat;
+        dimension.thumb_format = mThumbnailFormat;
+
+        /*TBD: Move JPEG handling to the mm-camera library */
+        ALOGD("Setting callbacks, initializing encoder and start encoding.");
+        ALOGD(" Passing my obj: %x", (unsigned int) this);
+        set_callbacks(snapshot_jpeg_fragment_cb, snapshot_jpeg_cb, this,
+             mHalCamCtrl->mJpegMemory.camera_memory[0]->data, &mJpegOffset);
+
+        if(omxJpegStart() != NO_ERROR){
+            ALOGE("Error In omxJpegStart!!! Return");
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+
+        if (mHalCamCtrl->getJpegQuality())
+            mm_jpeg_encoder_setMainImageQuality(mHalCamCtrl->getJpegQuality());
+        else
+            mm_jpeg_encoder_setMainImageQuality(85);
+
+        ALOGE("%s: Dimension to encode: main: %dx%d thumbnail: %dx%d", __func__,
+             dimension.orig_picture_dx, dimension.orig_picture_dy,
+             dimension.thumbnail_width, dimension.thumbnail_height);
+
+        /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+          cropinfo. It'll be changed later.*/
+        memset(&crop,0,sizeof(common_crop_t));
+        memset(&main_crop_offset,0,sizeof(cam_point_t));
+        memset(&thumb_crop_offset,0,sizeof(cam_point_t));
+
+        /* Setting crop info */
+
+        /*Main image*/
+        crop.in2_w=mCrop.snapshot.main_crop.width;// dimension.picture_width
+        crop.in2_h=mCrop.snapshot.main_crop.height;// dimension.picture_height;
+        if (!mJpegDownscaling) {
+            crop.out2_w = mPictureWidth;
+            crop.out2_h = mPictureHeight;
+        } else {
+            crop.out2_w = mActualPictureWidth;
+            crop.out2_h = mActualPictureHeight;
+            if (!crop.in2_w || !crop.in2_h) {
+                crop.in2_w = mPictureWidth;
+                crop.in2_h = mPictureHeight;
+            }
+        }
+        main_crop_offset.x=mCrop.snapshot.main_crop.left;
+        main_crop_offset.y=mCrop.snapshot.main_crop.top;
+        /*Thumbnail image*/
+        crop.in1_w=mCrop.snapshot.thumbnail_crop.width; //dimension.thumbnail_width;
+        crop.in1_h=mCrop.snapshot.thumbnail_crop.height; // dimension.thumbnail_height;
+        if(isLiveSnapshot() || isFullSizeLiveshot()) {
+            crop.out1_w= mHalCamCtrl->thumbnailWidth;
+            crop.out1_h=  mHalCamCtrl->thumbnailHeight;
+            ALOGD("Thumbnail width= %d  height= %d for livesnapshot", crop.out1_w, crop.out1_h);
+        } else {
+            crop.out1_w = width;
+            crop.out1_h = height;
+        }
+        thumb_crop_offset.x=mCrop.snapshot.thumbnail_crop.left;
+        thumb_crop_offset.y=mCrop.snapshot.thumbnail_crop.top;
+
+        //update exif parameters in HAL
+        mHalCamCtrl->initExifData();
+
+        /*Fill in the encode parameters*/
+        encode_params.dimension = (const cam_ctrl_dimension_t *)&dimension;
+        //if (!isFullSizeLiveshot()) {
+            encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+            encode_params.thumbnail_fd = postviewframe->fd;
+            encode_params.thumbnail_offset = postviewframe->phy_offset;
+            encode_params.thumb_crop_offset = &thumb_crop_offset;
+        //}
+        encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+        encode_params.snapshot_fd = mainframe->fd;
+        encode_params.snapshot_offset = mainframe->phy_offset;
+        encode_params.scaling_params = &crop;
+        encode_params.exif_data = mHalCamCtrl->getExifData();
+        encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+
+        if (isLiveSnapshot() && !isFullSizeLiveshot())
+            encode_params.a_cbcroffset = mainframe->cbcr_off;
+        else
+            encode_params.a_cbcroffset = -1;
+        encode_params.main_crop_offset = &main_crop_offset;
+
+	    if (mDropThumbnail)
+            encode_params.hasThumbnail = 0;
+        else
+            encode_params.hasThumbnail = 1;
+        encode_params.thumb_crop_offset = &thumb_crop_offset;
+        encode_params.main_format = dimension.main_img_format;
+        encode_params.thumbnail_format = dimension.thumb_format;
+
+        if (!omxJpegEncode(&encode_params)){
+            ALOGE("%s: Failure! JPEG encoder returned error.", __func__);
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+
+        /* Save the pointer to the frame sent for encoding. we'll need it to
+           tell kernel that we are done with the frame.*/
+        mCurrentFrameEncoded = recvd_frame;
+        setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+    }
+
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+/* Called twice - 1st to play shutter sound and 2nd to configure
+   overlay/surfaceflinger for postview */
+void QCameraStream_Snapshot::notifyShutter(common_crop_t *crop,
+                                           bool mPlayShutterSoundOnly)
+{
+    ALOGD("%s: E", __func__);
+    if(!mActive && !isLiveSnapshot()) {
+      ALOGE("__debbug: Snapshot thread stopped \n");
+      return;
+    }
+    if(mHalCamCtrl->mNotifyCb)
+      mHalCamCtrl->mNotifyCb(CAMERA_MSG_SHUTTER, 0, mPlayShutterSoundOnly,
+                                 mHalCamCtrl->mCallbackCookie);
+    ALOGD("%s: X", __func__);
+}
+
+status_t  QCameraStream_Snapshot::
+encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+                     bool enqueued)
+{
+    status_t ret = NO_ERROR;
+    struct msm_frame *postview_frame;
+    struct ion_flush_data cache_inv_data;
+    int ion_fd;
+    int buf_index = 0;
+    ssize_t offset_addr = 0;
+    common_crop_t dummy_crop;
+    /* send frame for encoding */
+    ALOGE("%s: Send frame for encoding", __func__);
+    /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+      cropinfo. It'll be changed later.*/
+    if(!mActive) {
+        ALOGE("Cancel Picture.. Stop is called");
+        return NO_ERROR;
+    }
+    if(isZSLMode()){
+      ALOGE("%s: set JPEG rotation in ZSL mode", __func__);
+      mHalCamCtrl->setJpegRotation(isZSLMode());
+    }
+#ifdef USE_ION
+    /*Clean out(Write-back) cache before sending for JPEG*/
+    memset(&cache_inv_data, 0, sizeof(struct ion_flush_data));
+    cache_inv_data.vaddr = (void*)recvd_frame->snapshot.main.frame->buffer;
+    cache_inv_data.fd = recvd_frame->snapshot.main.frame->fd;
+    cache_inv_data.handle = recvd_frame->snapshot.main.frame->fd_data.handle;
+    cache_inv_data.length = recvd_frame->snapshot.main.frame->ion_alloc.len;
+    ion_fd = recvd_frame->snapshot.main.frame->ion_dev_fd;
+    if(ion_fd > 0) {
+      if(ioctl(ion_fd, ION_IOC_CLEAN_INV_CACHES, &cache_inv_data) < 0)
+          ALOGE("%s: Cache Invalidate failed\n", __func__);
+      else {
+          ALOGD("%s: Successful cache invalidate\n", __func__);
+	  if(!isFullSizeLiveshot()) {
+            ion_fd = recvd_frame->snapshot.thumbnail.frame->ion_dev_fd;
+            cache_inv_data.vaddr = (void*)recvd_frame->snapshot.thumbnail.frame->buffer;
+            cache_inv_data.fd = recvd_frame->snapshot.thumbnail.frame->fd;
+            cache_inv_data.handle = recvd_frame->snapshot.thumbnail.frame->fd_data.handle;
+            cache_inv_data.length = recvd_frame->snapshot.thumbnail.frame->ion_alloc.len;
+            if(ioctl(ion_fd, ION_IOC_CLEAN_INV_CACHES, &cache_inv_data) < 0)
+              ALOGE("%s: Cache Invalidate failed\n", __func__);
+            else
+              ALOGD("%s: Successful cache invalidate\n", __func__);
+          }
+      }
+    }
+#endif
+    memset(&dummy_crop,0,sizeof(common_crop_t));
+    ret = encodeData(recvd_frame, &dummy_crop, mSnapshotStreamBuf.frame_len,
+                     enqueued);
+    if (ret != NO_ERROR) {
+        ALOGE("%s: Failure configuring JPEG encoder", __func__);
+
+        goto end;
+    }
+
+    /* Display postview image*/
+    /* If it's burst mode, we won't be displaying postview of all the captured
+       images - only the first one */
+    ALOGD("%s: Burst mode flag  %d", __func__, mBurstModeFlag);
+
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame)
+{
+    int buf_index = 0;
+    common_crop_t crop;
+    int rc = NO_ERROR;
+
+    camera_notify_callback         notifyCb;
+    camera_data_callback           dataCb, jpgDataCb;
+
+    ALOGD("%s: E ", __func__);
+    mStopCallbackLock.lock( );
+    if(!mActive) {
+        mStopCallbackLock.unlock();
+        ALOGD("%s: Stop receiving raw pic ", __func__);
+        return NO_ERROR;
+    }
+
+    if(getSnapshotState() == SNAPSHOT_STATE_ERROR) {
+        cam_evt_buf_done(mCameraId, recvd_frame);
+	}
+
+    mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.main.frame, HAL_DUMP_FRM_MAIN);
+    if (!isFullSizeLiveshot())
+        mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.thumbnail.frame,
+                                     HAL_DUMP_FRM_THUMBNAIL);
+
+    /* If it's raw snapshot, we just want to tell upperlayer to save the image*/
+    if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+        ALOGD("%s: Call notifyShutter 2nd time in case of RAW", __func__);
+        mStopCallbackLock.unlock();
+        if(!mHalCamCtrl->mShutterSoundPlayed) {
+            notifyShutter(&crop, TRUE);
+        }
+        notifyShutter(&crop, FALSE);
+        mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+        mStopCallbackLock.lock( );
+        ALOGD("%s: Sending Raw Snapshot Callback to Upperlayer", __func__);
+        buf_index = recvd_frame->def.idx;
+
+        if (mHalCamCtrl->mDataCb && mActive &&
+            (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)){
+          dataCb = mHalCamCtrl->mDataCb;
+        } else {
+          dataCb = NULL;
+        }
+        mStopCallbackLock.unlock();
+
+        if(dataCb) {
+            dataCb(
+                CAMERA_MSG_COMPRESSED_IMAGE,
+                mHalCamCtrl->mRawMemory.camera_memory[buf_index], 0, NULL,
+                mHalCamCtrl->mCallbackCookie);
+        }
+        /* TBD: Temp: To be removed once event handling is enabled */
+        mm_app_snapshot_done();
+    } else {
+        /*TBD: v4l2 doesn't have support to provide cropinfo along with
+          frame. We'll need to query.*/
+        memset(&crop, 0, sizeof(common_crop_t));
+
+        /*maftab*/
+        #if 0
+        crop.in1_w=mCrop.snapshot.thumbnail_crop.width;
+        crop.in1_h=mCrop.snapshot.thumbnail_crop.height;
+        crop.out1_w=mThumbnailWidth;
+        crop.out1_h=mThumbnailHeight;
+        #endif
+
+        ALOGD("%s: Call notifyShutter 2nd time", __func__);
+        /* The recvd_frame structre we receive from lower library is a local
+           variable. So we'll need to save this structure so that we won't
+           be later pointing to garbage data when that variable goes out of
+           scope */
+        mm_camera_ch_data_buf_t* frame =
+            (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+        if (frame == NULL) {
+            ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+            cam_evt_buf_done(mCameraId, recvd_frame);
+            mStopCallbackLock.unlock();
+            return BAD_VALUE;
+        }
+        memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+
+        //mStopCallbackLock.lock();
+
+        // only in ZSL mode and Wavelet Denoise is enabled, we will send frame to deamon to do WDN
+        if (isZSLMode() && mHalCamCtrl->isWDenoiseEnabled()) {
+            if(mIsDoingWDN){
+                mWDNQueue.enqueue((void *)frame);
+                ALOGD("%s: Wavelet denoise is going on, queue frame", __func__);
+                rc = NO_ERROR;
+            } else {
+                ALOGD("%s: Start Wavelet denoise", __func__);
+                mIsDoingWDN = TRUE; // set the falg to TRUE because we are going to do WDN
+
+                // No WDN is going on so far, we will start it here
+                rc = doWaveletDenoise(frame);
+                if ( NO_ERROR != rc ) {
+                    ALOGE("%s: Error while doing wavelet denoise", __func__);
+                    mIsDoingWDN = FALSE;
+                }
+            }
+        }
+        else {
+          ALOGD("%s: encodeDisplayAndSave ", __func__);
+            rc = encodeDisplayAndSave(frame, 0);
+        }
+
+
+        // send upperlayer callback for raw image (data or notify, not both)
+        if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+          dataCb = mHalCamCtrl->mDataCb;
+        } else {
+          dataCb = NULL;
+        }
+        if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+          notifyCb = mHalCamCtrl->mNotifyCb;
+        } else {
+          notifyCb = NULL;
+        }
+
+        mStopCallbackLock.unlock();
+        if(!mHalCamCtrl->mShutterSoundPlayed) {
+            notifyShutter(&crop, TRUE);
+        }
+        notifyShutter(&crop, FALSE);
+        mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+
+        if (rc != NO_ERROR)
+        {
+            ALOGE("%s: Error while encoding/displaying/saving image", __func__);
+            cam_evt_buf_done(mCameraId, recvd_frame);
+
+            if(mHalCamCtrl->mDataCb &&
+                (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+                /* get picture failed. Give jpeg callback with NULL data
+                 * to the application to restore to preview mode
+                 */
+                jpgDataCb = mHalCamCtrl->mDataCb;
+            } else {
+              jpgDataCb = NULL;
+           	}
+            ALOGE("%s: encode err so data cb", __func__);
+            //mStopCallbackLock.unlock();
+            if (dataCb) {
+              dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+                                   1, NULL, mHalCamCtrl->mCallbackCookie);
+            }
+            if (notifyCb) {
+              notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+            }
+            if (jpgDataCb) {
+              jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+                                       NULL, 0, NULL,
+                                       mHalCamCtrl->mCallbackCookie);
+            }
+
+            if (frame != NULL) {
+                free(frame);
+            }
+        } else {
+
+          //mStopCallbackLock.unlock();
+          if (dataCb) {
+            dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+                                 1, NULL, mHalCamCtrl->mCallbackCookie);
+          }
+          if (notifyCb) {
+            notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+          }
+        }
+    }
+
+    ALOGD("%s: X", __func__);
+    return NO_ERROR;
+}
+
+//-------------------------------------------------------------------
+// Helper Functions
+//-------------------------------------------------------------------
+void QCameraStream_Snapshot::handleError()
+{
+    mm_camera_channel_type_t ch_type;
+    ALOGD("%s: E", __func__);
+
+    /* Depending upon the state we'll have to
+       handle error */
+    switch(getSnapshotState()) {
+    case SNAPSHOT_STATE_JPEG_ENCODING:
+        if(mJpegHeap != NULL) mJpegHeap.clear();
+        mJpegHeap = NULL;
+
+    case SNAPSHOT_STATE_YUV_RECVD:
+    case SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD:
+        stopPolling();
+    case SNAPSHOT_STATE_INITIALIZED:
+    case SNAPSHOT_STATE_BUF_INITIALIZED:
+        if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+            deinitSnapshotBuffers();
+        }else
+        {
+            deinitRawSnapshotBuffers();
+        }
+    case SNAPSHOT_STATE_BUF_NOTIF_REGD:
+    case SNAPSHOT_STATE_CH_ACQUIRED:
+        if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+            deinitSnapshotChannel(MM_CAMERA_CH_SNAPSHOT);
+        }else
+        {
+            deinitSnapshotChannel(MM_CAMERA_CH_RAW);
+        }
+    default:
+        /* Set the state to ERROR */
+        setSnapshotState(SNAPSHOT_STATE_ERROR);
+        break;
+    }
+
+    ALOGD("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::setSnapshotState(int state)
+{
+    ALOGD("%s: Setting snapshot state to: %d",
+         __func__, state);
+    mSnapshotState = state;
+}
+
+int QCameraStream_Snapshot::getSnapshotState()
+{
+    return mSnapshotState;
+}
+
+void QCameraStream_Snapshot::setModeLiveSnapshot(bool value)
+{
+    mModeLiveSnapshot = value;
+}
+
+bool QCameraStream_Snapshot::isLiveSnapshot(void)
+{
+    return mModeLiveSnapshot;
+}
+bool QCameraStream_Snapshot::isZSLMode()
+{
+    return (myMode & CAMERA_ZSL_MODE);
+}
+
+void QCameraStream_Snapshot::setFullSizeLiveshot(bool value)
+{
+    mFullLiveshot = value;
+}
+
+bool QCameraStream_Snapshot::isFullSizeLiveshot()
+{
+    return mFullLiveshot;
+}
+
+void QCameraStream_Snapshot::resetSnapshotCounters(void )
+{
+  mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+  if (mNumOfSnapshot <= 0) {
+      mNumOfSnapshot = 1;
+  }
+  mNumOfRecievedJPEG = 0;
+  ALOGD("%s: Number of images to be captured: %d", __func__, mNumOfSnapshot);
+}
+
+//------------------------------------------------------------------
+// Constructor and Destructor
+//------------------------------------------------------------------
+QCameraStream_Snapshot::
+QCameraStream_Snapshot(int cameraId, camera_mode_t mode)
+  : QCameraStream(cameraId,mode),
+    mSnapshotFormat(PICTURE_FORMAT_JPEG),
+    mPictureWidth(0), mPictureHeight(0),
+    mPictureFormat(CAMERA_YUV_420_NV21),
+    mPostviewWidth(0), mPostviewHeight(0),
+    mThumbnailWidth(0), mThumbnailHeight(0),
+    mThumbnailFormat(CAMERA_YUV_420_NV21),
+    mJpegOffset(0),
+    mSnapshotState(SNAPSHOT_STATE_UNINIT),
+    mNumOfSnapshot(1),
+    mModeLiveSnapshot(false),
+    mBurstModeFlag(false),
+    mActualPictureWidth(0),
+    mActualPictureHeight(0),
+    mJpegDownscaling(false),
+    mJpegHeap(NULL),
+    mDisplayHeap(NULL),
+    mPostviewHeap(NULL),
+    mCurrentFrameEncoded(NULL),
+    mJpegSessionId(0),
+    mFullLiveshot(false),
+    mDropThumbnail(false)
+  {
+    ALOGV("%s: E", __func__);
+
+    /*initialize snapshot queue*/
+    mSnapshotQueue.init();
+
+    /*initialize WDN queue*/
+    mWDNQueue.init();
+    mIsDoingWDN = FALSE;
+
+    memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+    memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+    mSnapshotBufferNum = 0;
+    mMainSize = 0;
+    mThumbSize = 0;
+    for(int i = 0; i < mMaxSnapshotBufferCount; i++) {
+        mMainfd[i] = 0;
+        mThumbfd[i] = 0;
+        mCameraMemoryPtrMain[i] = NULL;
+        mCameraMemoryPtrThumb[i] = NULL;
+    }
+    /*load the jpeg lib*/
+    mJpegSessionId = omxJpegOpen( );
+    ALOGV("%s: X", __func__);
+  }
+
+
+QCameraStream_Snapshot::~QCameraStream_Snapshot() {
+    ALOGV("%s: E", __func__);
+
+    /* deinit snapshot queue */
+    if (mSnapshotQueue.isInitialized()) {
+        mSnapshotQueue.deinit();
+    }
+    /* deinit snapshot queue */
+    if (mWDNQueue.isInitialized()) {
+        mWDNQueue.deinit();
+    }
+
+    if(mActive) {
+        stop();
+    }
+    if(mInit) {
+        release();
+    }
+    mInit = false;
+    mActive = false;
+    if (mJpegSessionId > 0) {
+      omxJpegClose( );
+      mJpegSessionId = 0;
+    }
+    ALOGV("%s: X", __func__);
+
+}
+
+//------------------------------------------------------------------
+// Public Members
+//------------------------------------------------------------------
+status_t QCameraStream_Snapshot::init()
+{
+    status_t ret = NO_ERROR;
+    mm_camera_op_mode_type_t op_mode;
+
+    ALOGV("%s: E", __func__);
+    /* Check the state. If we have already started snapshot
+       process just return*/
+    if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+        ret = isZSLMode() ? NO_ERROR : INVALID_OPERATION;
+        ALOGE("%s: Trying to take picture while snapshot is in progress",
+             __func__);
+        goto end;
+    }
+    mInit = true;
+
+end:
+    /*if (ret == NO_ERROR) {
+        setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+    }*/
+    ALOGV("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::start(void) {
+    status_t ret = NO_ERROR;
+
+    ALOGV("%s: E", __func__);
+
+    Mutex::Autolock lock(mStopCallbackLock);
+
+    /* Keep track of number of snapshots to take - in case of
+       multiple snapshot/burst mode */
+
+	if(mHalCamCtrl->isRawSnapshot()) {
+        ALOGD("%s: Acquire Raw Snapshot Channel", __func__);
+        ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_RAW);
+        if (NO_ERROR != ret) {
+            ALOGE("%s: Failure Acquiring Raw Snapshot Channel error =%d\n",
+                 __func__, ret);
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+        /* Snapshot channel is acquired */
+        setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+        ALOGD("%s: Register buffer notification. My object: %x",
+             __func__, (unsigned int) this);
+        (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+                                        snapshot_notify_cb,
+                                        MM_CAMERA_REG_BUF_CB_INFINITE,
+                                        0,
+                                        this);
+        /* Set the state to buffer notification completed */
+        setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+    }else{
+        ALOGD("%s: Acquire Snapshot Channel", __func__);
+        ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+        if (NO_ERROR != ret) {
+            ALOGE("%s: Failure Acquiring Snapshot Channel error =%d\n", __func__, ret);
+            ret = FAILED_TRANSACTION;
+            goto end;
+        }
+        /* Snapshot channel is acquired */
+        setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+        ALOGD("%s: Register buffer notification. My object: %x",
+             __func__, (unsigned int) this);
+        (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+                                        snapshot_notify_cb,
+                                        MM_CAMERA_REG_BUF_CB_INFINITE,
+                                        0,
+                                        this);
+        /* Set the state to buffer notification completed */
+        setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+    }
+
+    if (isZSLMode()) {
+        prepareHardware();
+        ret = initZSLSnapshot();
+        if(ret != NO_ERROR) {
+            ALOGE("%s : Error while Initializing ZSL snapshot",__func__);
+            goto end;
+        }
+        mHalCamCtrl->setExifTags();
+        /* In case of ZSL, start will only start snapshot stream and
+           continuously queue the frames in a queue. When user clicks
+           shutter we'll call get buffer from the queue and pass it on */
+        ret = startStreamZSL();
+        goto end;
+    }
+
+    if (isFullSizeLiveshot())
+      ret = initFullLiveshot();
+
+    /* Check if it's a raw snapshot or JPEG*/
+    if(mHalCamCtrl->isRawSnapshot()) {
+        mSnapshotFormat = PICTURE_FORMAT_RAW;
+        ret = initRawSnapshot(mNumOfSnapshot);
+    }else{
+        //JPEG
+        mSnapshotFormat = PICTURE_FORMAT_JPEG;
+        ret = initJPEGSnapshot(mNumOfSnapshot);
+    }
+    if(ret != NO_ERROR) {
+        ALOGE("%s : Error while Initializing snapshot",__func__);
+        goto end;
+    }
+
+    //Update Exiftag values.
+    mHalCamCtrl->setExifTags();
+
+    if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+        ret = takePictureRaw();
+        goto end;
+    }
+    else{
+        ret = takePictureJPEG();
+        goto end;
+    }
+
+end:
+    if (ret == NO_ERROR) {
+        setSnapshotState(SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD);
+        mActive = true;
+    } else {
+        deInitBuffer();
+    }
+
+    ALOGV("%s: X", __func__);
+    return ret;
+  }
+
+void QCameraStream_Snapshot::stopPolling(void)
+{
+    mm_camera_ops_type_t ops_type;
+
+    if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+        ops_type = isZSLMode() ? MM_CAMERA_OPS_ZSL : MM_CAMERA_OPS_SNAPSHOT;
+    }else
+        ops_type = MM_CAMERA_OPS_RAW;
+
+    if( NO_ERROR != cam_ops_action(mCameraId, FALSE,
+                                          ops_type, this)) {
+        ALOGE("%s: Failure stopping snapshot", __func__);
+    }
+}
+
+void QCameraStream_Snapshot::stop(void)
+{
+    mm_camera_ops_type_t ops_type;
+    status_t ret = NO_ERROR;
+
+    ALOGV("%s: E", __func__);
+    //Mutex::Autolock l(&snapshotLock);
+
+    if(!mActive) {
+      ALOGV("%s: Not Active return now", __func__);
+      return;
+    }
+    mActive = false;
+    Mutex::Autolock lock(mStopCallbackLock);
+    if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+        /* Stop polling for further frames */
+        stopPolling();
+
+        if(getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) {
+            ALOGV("Destroy Jpeg Instance");
+            omxJpegAbort();
+        }
+
+        /* Depending upon current state, we'll need to allocate-deallocate-deinit*/
+        deInitBuffer();
+    }
+
+    if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+        ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_RAW);
+        if(ret != MM_CAMERA_OK) {
+          ALOGE("%s:Deinit RAW channel failed=%d\n", __func__, ret);
+        }
+        (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+                                            NULL,
+                                            (mm_camera_register_buf_cb_type_t)NULL,
+                                            NULL,
+                                            NULL);
+    } else {
+        ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+        if(ret != MM_CAMERA_OK) {
+          ALOGE("%s:Deinit Snapshot channel failed=%d\n", __func__, ret);
+        }
+        (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+                                            NULL,
+                                            (mm_camera_register_buf_cb_type_t)NULL,
+                                            NULL,
+                                            NULL);
+    }
+
+    /* release is generally called in case of explicit call from
+       upper-layer during disconnect. So we need to deinit everything
+       whatever state we are in */
+    ALOGV("Calling omxjpegjoin from release\n");
+    omxJpegFinish();
+#if 0
+    omxJpegClose();
+#endif
+    mFullLiveshot = false;
+    ALOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::release()
+{
+    status_t ret = NO_ERROR;
+    ALOGV("%s: E", __func__);
+    //Mutex::Autolock l(&snapshotLock);
+
+    if(isLiveSnapshot()) {
+        deInitBuffer();
+    }
+    if(!mInit){
+        ALOGE("%s : Stream not Initalized",__func__);
+        return;
+    }
+
+    if(mActive) {
+      this->stop();
+      mActive = FALSE;
+    }
+
+    /* release is generally called in case of explicit call from
+       upper-layer during disconnect. So we need to deinit everything
+       whatever state we are in */
+
+    //deinit();
+    mInit = false;
+    ALOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::prepareHardware()
+{
+    ALOGV("%s: E", __func__);
+
+    /* Prepare snapshot*/
+    cam_ops_action(mCameraId,
+                          TRUE,
+                          MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+                          this);
+    ALOGV("%s: X", __func__);
+}
+
+sp<IMemoryHeap> QCameraStream_Snapshot::getRawHeap() const
+{
+    return ((mDisplayHeap != NULL) ? mDisplayHeap->mHeap : NULL);
+}
+
+QCameraStream*
+QCameraStream_Snapshot::createInstance(int cameraId,
+                                      camera_mode_t mode)
+{
+
+  QCameraStream* pme = new QCameraStream_Snapshot(cameraId, mode);
+
+  return pme;
+}
+
+void QCameraStream_Snapshot::deleteInstance(QCameraStream *p)
+{
+  if (p){
+    p->release();
+    delete p;
+    p = NULL;
+  }
+}
+
+void QCameraStream_Snapshot::notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie)
+{
+    camera_notify_callback         notifyCb;
+    camera_data_callback           dataCb, jpgDataCb;
+    int rc = NO_ERROR;
+    mm_camera_ch_data_buf_t *frame = (mm_camera_ch_data_buf_t *)cookie;
+
+    ALOGI("%s: WDN Done status (%d) received",__func__,status);
+    Mutex::Autolock lock(mStopCallbackLock);
+    if (frame == NULL) {
+        ALOGE("%s: cookie is returned NULL", __func__);
+    } else {
+        // first unmapping the fds
+        mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, frame->snapshot.main.idx, mCameraId,
+                                      CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+        mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, frame->snapshot.thumbnail.idx, mCameraId,
+                                      CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+
+        // then do JPEG encoding
+        rc = encodeDisplayAndSave(frame, 0);
+    }
+
+    // send upperlayer callback for raw image (data or notify, not both)
+    if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+      dataCb = mHalCamCtrl->mDataCb;
+    } else {
+      dataCb = NULL;
+    }
+    if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+      notifyCb = mHalCamCtrl->mNotifyCb;
+    } else {
+      notifyCb = NULL;
+    }
+    if(mHalCamCtrl->mDataCb &&
+        (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+        /* get picture failed. Give jpeg callback with NULL data
+         * to the application to restore to preview mode
+         */
+        jpgDataCb = mHalCamCtrl->mDataCb;
+    } else {
+      jpgDataCb = NULL;
+    }
+
+    // launch next WDN if there is more in WDN Queue
+    lauchNextWDenoiseFromQueue();
+
+    mStopCallbackLock.unlock();
+
+    if (rc != NO_ERROR)
+    {
+        ALOGE("%s: Error while encoding/displaying/saving image", __func__);
+        if (frame) {
+            cam_evt_buf_done(mCameraId, frame);
+        }
+
+        if (dataCb) {
+          dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+                               1, NULL, mHalCamCtrl->mCallbackCookie);
+        }
+        if (notifyCb) {
+          notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+        }
+        if (jpgDataCb) {
+          jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+                                   NULL, 0, NULL,
+                                   mHalCamCtrl->mCallbackCookie);
+        }
+
+        if (frame != NULL) {
+            free(frame);
+        }
+    }
+}
+
+void QCameraStream_Snapshot::lauchNextWDenoiseFromQueue()
+{
+    do {
+        mm_camera_ch_data_buf_t *frame = NULL;
+        if ( mWDNQueue.isEmpty() ||
+             (NULL == (frame = (mm_camera_ch_data_buf_t *)mWDNQueue.dequeue())) ) {
+            // set the flag back to FALSE when no WDN going on
+            mIsDoingWDN = FALSE;
+            break;
+        }
+
+        if ( NO_ERROR != doWaveletDenoise(frame) ) {
+            ALOGE("%s: Error while doing wavelet denoise", __func__);
+            if (frame != NULL) {
+                free(frame);
+            }
+        } else {
+            // we sent out req for WDN, so we can break here
+            ALOGD("%s: Send out req for doing wavelet denoise, return here", __func__);
+            break;
+        }
+    } while (TRUE);
+}
+
+status_t QCameraStream_Snapshot::doWaveletDenoise(mm_camera_ch_data_buf_t* frame)
+{
+    status_t ret = NO_ERROR;
+    cam_sock_packet_t packet;
+    cam_ctrl_dimension_t dim;
+
+    ALOGD("%s: E", __func__);
+
+    // get dim on the fly
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (NO_ERROR != ret) {
+        ALOGE("%s: error - can't get dimension!", __func__);
+        return FAILED_TRANSACTION;
+    }
+
+    // send main frame mapping through domain socket
+    if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN,
+                                                frame->snapshot.main.idx,
+                                                frame->snapshot.main.frame->fd,
+                                                dim.picture_frame_offset.frame_len, mCameraId,
+                                                CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+        ALOGE("%s: sending main frame mapping buf msg Failed", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    // send thumbnail frame mapping through domain socket
+    if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL,
+                                                frame->snapshot.thumbnail.idx,
+                                                frame->snapshot.thumbnail.frame->fd,
+                                                dim.display_frame_offset.frame_len, mCameraId,
+                                                CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+        ALOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    // ask deamon to start wdn operation
+    if (NO_ERROR != sendWDenoiseStartMsg(frame)) {
+        ALOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+        ret = FAILED_TRANSACTION;
+        goto end;
+    }
+
+end:
+    ALOGD("%s: X", __func__);
+    return ret;
+}
+
+status_t QCameraStream_Snapshot::sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame)
+{
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_SOCK_MSG_TYPE_WDN_START;
+    packet.payload.wdn_start.cookie = (unsigned long)frame;
+    packet.payload.wdn_start.num_frames = MM_MAX_WDN_NUM;
+    packet.payload.wdn_start.ext_mode[0] = MSM_V4L2_EXT_CAPTURE_MODE_MAIN;
+    packet.payload.wdn_start.ext_mode[1] = MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL;
+    packet.payload.wdn_start.frame_idx[0] = frame->snapshot.main.idx;
+    packet.payload.wdn_start.frame_idx[1] = frame->snapshot.thumbnail.idx;
+    if ( cam_ops_sendmsg(mCameraId, &packet, sizeof(packet), 0) <= 0 ) {
+        ALOGE("%s: sending start wavelet denoise msg failed", __func__);
+        return FAILED_TRANSACTION;
+    }
+    return NO_ERROR;
+}
+
+}; // namespace android
+
diff --git a/camera/QCameraParameters.cpp b/camera/QCameraParameters.cpp
new file mode 100644
index 0000000..2a31b35
--- /dev/null
+++ b/camera/QCameraParameters.cpp
@@ -0,0 +1,397 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define ALOG_TAG "QCameraParams"
+#include <utils/Log.h>
+#include <string.h>
+#include <stdlib.h>
+#include "QCameraParameters.h"
+
+namespace android {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_POWER_MODE_SUPPORTED[] = "power-mode-supported";
+const char QCameraParameters::KEY_ZSL[] = "zsl";
+const char QCameraParameters::KEY_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_POWER_MODE[] = "power-mode";
+/*only effective when KEY_AE_BRACKET_HDR set to ae_bracketing*/
+//const char QCameraParameters::KEY_AE_BRACKET_SETTING_KEY[] = "ae-bracket-setting";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd";   // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+
+// Values for auto scene detection settings.
+const char QCameraParameters::SCENE_DETECT_OFF[] = "off";
+const char QCameraParameters::SCENE_DETECT_ON[] = "on";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+
+// Values for focus mode settings.
+const char QCameraParameters::FOCUS_MODE_NORMAL[] = "normal";
+const char QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+
+ //Values for Lens Shading
+const char QCameraParameters::LENSSHADE_ENABLE[] = "enable";
+const char QCameraParameters::LENSSHADE_DISABLE[] = "disable";
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+
+const char QCameraParameters::KEY_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_GPS_STATUS[] = "gps-status";
+const char QCameraParameters::KEY_EXIF_DATETIME[] = "exif-datetime";
+
+const char QCameraParameters::KEY_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+//Values for Histogram Shading
+const char QCameraParameters::HISTOGRAM_ENABLE[] = "enable";
+const char QCameraParameters::HISTOGRAM_DISABLE[] = "disable";
+
+//Values for Skin Tone Enhancement Modes
+const char QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE[] = "enable";
+const char QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE[] = "disable";
+
+const char QCameraParameters::KEY_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_MAX_SATURATION[] = "max-saturation";
+
+const char QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED[] = "single-isp-output-enabled";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::SELECTABLE_ZONE_AF_AUTO[] = "auto";
+const char QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE[] = "frame-average";
+
+// Values for Face Detection settings.
+const char QCameraParameters::FACE_DETECTION_OFF[] = "off";
+const char QCameraParameters::FACE_DETECTION_ON[] = "on";
+
+// Values for MCE settings.
+const char QCameraParameters::MCE_ENABLE[] = "enable";
+const char QCameraParameters::MCE_DISABLE[] = "disable";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+
+// Values for Redeye Reduction settings.
+const char QCameraParameters::REDEYE_REDUCTION_ENABLE[] = "enable";
+const char QCameraParameters::REDEYE_REDUCTION_DISABLE[] = "disable";
+
+// Values for HDR settings.
+const char QCameraParameters::HDR_ENABLE[] = "enable";
+const char QCameraParameters::HDR_DISABLE[] = "disable";
+
+// Values for ZSL settings.
+const char QCameraParameters::ZSL_OFF[] = "off";
+const char QCameraParameters::ZSL_ON[] = "on";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_HDR_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET_HDR[] = "HDR";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+const char QCameraParameters::LOW_POWER[] = "Low_Power";
+const char QCameraParameters::NORMAL_POWER[] = "Normal_Power";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+//QCameraParameters::QCameraParameters()
+//                : mMap()
+//{
+//}
+
+QCameraParameters::~QCameraParameters()
+{
+}
+
+int QCameraParameters::getOrientation() const
+{
+    const char* orientation = get("orientation");
+    if (orientation && !strcmp(orientation, portrait))
+        return CAMERA_ORIENTATION_PORTRAIT;
+    return CAMERA_ORIENTATION_LANDSCAPE;
+}
+void QCameraParameters::setOrientation(int orientation)
+{
+    if (orientation == CAMERA_ORIENTATION_PORTRAIT) {
+        set("orientation", portrait);
+    } else {
+         set("orientation", landscape);
+    }
+}
+
+        //XXX ALOGE("Key \"%s\"contains invalid character (= or ;)", key);
+        //XXX ALOGE("Value \"%s\"contains invalid character (= or ;)", value);
+    //snprintf(str, sizeof(str), "%d", value);
+        //ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+		
+		
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if(num == NULL) {
+        ALOGE("Invalid output array (num == NULL)");
+        return -1;
+    }
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+        return -1;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return 0;
+}
+
+
+            //ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+    //snprintf(str, sizeof(str), "%dx%d", width, height);
+	
+	
+	
+// Parse string like "640x480" or "10000,20000"
+static int parse_pair(const char *str, int *first, int *second, char delim,
+                      char **endptr = NULL)
+{
+    // Find the first integer.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If a delimeter does not immediately follow, give up.
+    if (*end != delim) {
+        ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+        return -1;
+    }
+
+    // Find the second integer, immediately after the delimeter.
+    int h = (int)strtol(end+1, &end, 10);
+
+    *first = w;
+    *second = h;
+
+    if (endptr) {
+        *endptr = end;
+    }
+
+    return 0;
+}
+
+static void parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+    if (sizesStr == 0) {
+        return;
+    }
+
+    char *sizeStartPtr = (char *)sizesStr;
+
+    while (true) {
+        int width, height;
+        int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+                                 &sizeStartPtr);
+        if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+            ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+            return;
+        }
+        sizes.push(Size(width, height));
+
+        if (*sizeStartPtr == '\0') {
+            return;
+        }
+        sizeStartPtr++;
+    }
+}
+
+
+void QCameraParameters::getSupportedHfrSizes(Vector<Size> &sizes) const
+{
+    const char *hfrSizesStr = get(KEY_SUPPORTED_HFR_SIZES);
+    parseSizesList(hfrSizesStr, sizes);
+}
+
+void QCameraParameters::setPreviewFpsRange(int minFPS, int maxFPS)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%d,%d",minFPS,maxFPS);
+    set(KEY_PREVIEW_FPS_RANGE,str);
+}
+
+void QCameraParameters::setPreviewFrameRateMode(const char *mode)
+{
+    set(KEY_PREVIEW_FRAME_RATE_MODE, mode);
+}
+
+const char *QCameraParameters::getPreviewFrameRateMode() const
+{
+    return get(KEY_PREVIEW_FRAME_RATE_MODE);
+}
+
+
+    //ALOGD("dump: mMap.size = %d", mMap.size());
+        //ALOGD("%s: %s\n", k.string(), v.string());
+    
+
+
+void QCameraParameters::setTouchIndexAec(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_TOUCH_INDEX_AEC, str);
+}
+
+void QCameraParameters::getTouchIndexAec(int *x, int *y) const
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_TOUCH_INDEX_AEC);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+    }
+}
+
+void QCameraParameters::setTouchIndexAf(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_TOUCH_INDEX_AF, str);
+}
+
+void QCameraParameters::getTouchIndexAf(int *x, int *y) const
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_TOUCH_INDEX_AF);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+	}
+}
+
+void QCameraParameters::getMeteringAreaCenter(int *x, int *y) const
+{
+    //Default invalid values
+    *x = -2000;
+    *y = -2000;
+
+    const char *p = get(KEY_METERING_AREAS);
+    if(p != NULL) {
+        int arr[5] = {-2000, -2000, -2000, -2000, 0};
+        parseNDimVector(p, arr, 5); //p = "(x1, y1, x2, y2, weight)"
+        *x = (arr[0] + arr[2])/2; //center_x = (x1+x2)/2
+        *y = (arr[1] + arr[3])/2; //center_y = (y1+y2)/2
+    }
+}
+
+
+}; // namespace android
+
diff --git a/camera/QCameraParameters.h b/camera/QCameraParameters.h
new file mode 100644
index 0000000..6920413
--- /dev/null
+++ b/camera/QCameraParameters.h
@@ -0,0 +1,254 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+//#include <utils/KeyedVector.h>
+//#include <utils/String8.h>
+#include <camera/CameraParameters.h>
+
+namespace android {
+
+struct FPSRange{
+    int minFPS;
+    int maxFPS;
+    FPSRange(){
+        minFPS=0;
+        maxFPS=0;
+    };
+    FPSRange(int min,int max){
+        minFPS=min;
+        maxFPS=max;
+    };
+};
+class QCameraParameters: public CameraParameters
+{
+public:
+#if 1
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params): CameraParameters(params) {};
+    #else
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params) { unflatten(params); }
+#endif
+    ~QCameraParameters();
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_PREVIEW_FRAME_RATE_FIXED_MODE[];
+
+    static const char KEY_SKIN_TONE_ENHANCEMENT[] ;
+    static const char KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    //Touch Af/AEC settings.
+    static const char KEY_TOUCH_AF_AEC[];
+    static const char KEY_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or SCENE_DETECT_XXX constants. Read/write.
+    static const char KEY_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,backlight,snow/cloudy". Read only.
+    static const char KEY_SUPPORTED_SCENE_DETECT[];
+	   // Returns true if video snapshot is supported. That is, applications
+    static const char KEY_FULL_VIDEO_SNAP_SUPPORTED[];
+    static const char KEY_POWER_MODE_SUPPORTED[];
+
+    static const char KEY_ISO_MODE[];
+    static const char KEY_SUPPORTED_ISO_MODES[];
+    static const char KEY_LENSSHADE[] ;
+    static const char KEY_SUPPORTED_LENSSHADE_MODES[] ;
+
+    static const char KEY_AUTO_EXPOSURE[];
+    static const char KEY_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_GPS_LATITUDE_REF[];
+    static const char KEY_GPS_LONGITUDE_REF[];
+    static const char KEY_GPS_ALTITUDE_REF[];
+    static const char KEY_GPS_STATUS[];
+    static const char KEY_EXIF_DATETIME[];
+    static const char KEY_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+
+
+    static const char KEY_POWER_MODE[];
+
+    static const char KEY_ZSL[];
+    static const char KEY_SUPPORTED_ZSL_MODES[];
+
+    static const char KEY_CAMERA_MODE[];
+
+    static const char KEY_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_AE_BRACKET_HDR[];
+
+
+    // DENOISE
+    static const char KEY_DENOISE[];
+    static const char KEY_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_SELECTABLE_ZONE_AF[];
+    static const char KEY_SUPPORTED_SELECTABLE_ZONE_AF[];
+
+    //Face Detection
+    static const char KEY_FACE_DETECTION[];
+    static const char KEY_SUPPORTED_FACE_DETECTION[];
+
+    //Redeye Reduction
+    static const char KEY_REDEYE_REDUCTION[];
+    static const char KEY_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[] ;
+    static const char TOUCH_AF_AEC_ON[] ;
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+	static const char SCENE_DETECT_OFF[];
+    static const char SCENE_DETECT_ON[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+	static const char PIXEL_FORMAT_RAW[];
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+    // Normal focus mode. Applications should call
+    // CameraHardwareInterface.autoFocus to start the focus in this mode.
+    static const char FOCUS_MODE_NORMAL[];
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[] ;
+    static const char ISO_100[];
+    static const char ISO_200[] ;
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+    // Values for Lens Shading
+    static const char LENSSHADE_ENABLE[] ;
+    static const char LENSSHADE_DISABLE[] ;
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+
+    static const char KEY_SHARPNESS[];
+    static const char KEY_MAX_SHARPNESS[];
+    static const char KEY_CONTRAST[];
+    static const char KEY_MAX_CONTRAST[];
+    static const char KEY_SATURATION[];
+    static const char KEY_MAX_SATURATION[];
+
+    static const char KEY_HISTOGRAM[] ;
+    static const char KEY_SUPPORTED_HISTOGRAM_MODES[] ;
+    // Values for HISTOGRAM
+    static const char HISTOGRAM_ENABLE[] ;
+    static const char HISTOGRAM_DISABLE[] ;
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char SELECTABLE_ZONE_AF_AUTO[];
+    static const char SELECTABLE_ZONE_AF_SPOT_METERING[];
+    static const char SELECTABLE_ZONE_AF_CENTER_WEIGHTED[];
+    static const char SELECTABLE_ZONE_AF_FRAME_AVERAGE[];
+
+    // Values for Face Detection settings.
+    static const char FACE_DETECTION_OFF[];
+    static const char FACE_DETECTION_ON[];
+
+    // Values for MCE settings.
+    static const char MCE_ENABLE[];
+    static const char MCE_DISABLE[];
+
+    // Values for ZSL settings.
+    static const char ZSL_OFF[];
+    static const char ZSL_ON[];
+
+    // Values for HDR Bracketing settings.
+    static const char AE_BRACKET_HDR_OFF[];
+    static const char AE_BRACKET_HDR[];
+    static const char AE_BRACKET[];
+
+    // Values for Power mode settings.
+    static const char LOW_POWER[];
+    static const char NORMAL_POWER[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+
+    // Values for Redeye Reduction settings.
+    static const char REDEYE_REDUCTION_ENABLE[];
+    static const char REDEYE_REDUCTION_DISABLE[];
+    // Values for HDR settings.
+    static const char HDR_ENABLE[];
+    static const char HDR_DISABLE[];
+
+   // Values for Redeye Reduction settings.
+   // static const char REDEYE_REDUCTION_ENABLE[];
+   // static const char REDEYE_REDUCTION_DISABLE[];
+   // Values for HDR settings.
+   //    static const char HDR_ENABLE[];
+   //    static const char HDR_DISABLE[];
+
+
+   static const char KEY_SINGLE_ISP_OUTPUT_ENABLED[];
+
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+    int getOrientation() const;
+    void setOrientation(int orientation);
+    void getSupportedHfrSizes(Vector<Size> &sizes) const;
+    void setPreviewFpsRange(int minFPS,int maxFPS);
+	void setPreviewFrameRateMode(const char *mode);
+    const char *getPreviewFrameRateMode() const;
+    void setTouchIndexAec(int x, int y);
+    void getTouchIndexAec(int *x, int *y) const;
+    void setTouchIndexAf(int x, int y);
+    void getTouchIndexAf(int *x, int *y) const;
+    void getMeteringAreaCenter(int * x, int *y) const;
+
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraStream.cpp b/camera/QCameraStream.cpp
new file mode 100644
index 0000000..f9e661b
--- /dev/null
+++ b/camera/QCameraStream.cpp
@@ -0,0 +1,364 @@
+/*
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG __FILE__
+#include <utils/Log.h>
+#include <utils/threads.h>
+
+
+#include "QCameraStream.h"
+
+/* QCameraStream class implementation goes here*/
+/* following code implement the control logic of this class*/
+
+namespace android {
+
+StreamQueue::StreamQueue(){
+    mInitialized = false;
+}
+
+StreamQueue::~StreamQueue(){
+    flush();
+}
+
+void StreamQueue::init(){
+    Mutex::Autolock l(&mQueueLock);
+    mInitialized = true;
+    mQueueWait.signal();
+}
+
+void StreamQueue::deinit(){
+    Mutex::Autolock l(&mQueueLock);
+    mInitialized = false;
+    mQueueWait.signal();
+}
+
+bool StreamQueue::isInitialized(){
+   Mutex::Autolock l(&mQueueLock);
+   return mInitialized;
+}
+
+bool StreamQueue::enqueue(
+                 void * element){
+    Mutex::Autolock l(&mQueueLock);
+    if(mInitialized == false)
+        return false;
+
+    mContainer.add(element);
+    mQueueWait.signal();
+    return true;
+}
+
+bool StreamQueue::isEmpty(){
+    return (mInitialized && mContainer.isEmpty());
+}
+void* StreamQueue::dequeue(){
+
+    void *frame;
+    mQueueLock.lock();
+    while(mInitialized && mContainer.isEmpty()){
+        mQueueWait.wait(mQueueLock);
+    }
+
+    if(!mInitialized){
+        mQueueLock.unlock();
+        return NULL;
+    }
+
+    frame = mContainer.itemAt(0);
+    mContainer.removeAt(0);
+    mQueueLock.unlock();
+    return frame;
+}
+
+void StreamQueue::flush(){
+    Mutex::Autolock l(&mQueueLock);
+    mContainer.clear();
+}
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream
+// ---------------------------------------------------------------------------
+
+/* initialize a streaming channel*/
+status_t QCameraStream::initChannel(int cameraId,
+                                    uint32_t ch_type_mask)
+{
+#if 0
+    int rc = MM_CAMERA_OK;
+    int i;
+    status_t ret = NO_ERROR;
+    int width = 0;  /* width of channel      */
+    int height = 0; /* height of channel */
+    cam_ctrl_dimension_t dim;
+    mm_camera_ch_image_fmt_parm_t fmt;
+
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    rc = cam_config_get_parm(cameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != rc) {
+      ALOGE("%s: error - can't get camera dimension!", __func__);
+      ALOGE("%s: X", __func__);
+      return BAD_VALUE;
+    }
+
+    if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask) {
+        rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+        ALOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+
+        if(MM_CAMERA_OK != rc) {
+                ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+                ALOGE("%s: X", __func__);
+                return BAD_VALUE;
+        }
+        else{
+            memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+            fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+            fmt.def.fmt = CAMERA_YUV_420_NV12; //dim.prev_format;
+            fmt.def.dim.width = dim.display_width;
+            fmt.def.dim.height =  dim.display_height;
+            ALOGV("%s: preview channel fmt = %d", __func__,
+                     dim.prev_format);
+            ALOGV("%s: preview channel resolution = %d X %d", __func__,
+                     dim.display_width, dim.display_height);
+
+            rc = cam_config_set_parm(cameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+            ALOGV("%s: preview MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+            if(MM_CAMERA_OK != rc) {
+                    ALOGE("%s:set preview channel format err=%d\n", __func__, ret);
+                    ALOGE("%s: X", __func__);
+                    ret = BAD_VALUE;
+            }
+        }
+    }
+
+
+    if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask)
+    {
+        rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+        ALOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+
+        if(MM_CAMERA_OK != rc) {
+                ALOGE("%s: video channel acquir error =%d\n", __func__, rc);
+                ALOGE("%s: X", __func__);
+                ret = BAD_VALUE;
+        }
+        else {
+            memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+            fmt.ch_type = MM_CAMERA_CH_VIDEO;
+            fmt.video.video.fmt = CAMERA_YUV_420_NV12; //dim.enc_format;
+            fmt.video.video.dim.width = dim.video_width;
+            fmt.video.video.dim.height = dim.video_height;
+            ALOGV("%s: video channel fmt = %d", __func__,
+                     dim.enc_format);
+            ALOGV("%s: video channel resolution = %d X %d", __func__,
+                 dim.video_width, dim.video_height);
+
+            rc = cam_config_set_parm(cameraId,  MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+
+            ALOGV("%s: video MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+            if(MM_CAMERA_OK != rc) {
+                ALOGE("%s:set video channel format err=%d\n", __func__, rc);
+                ALOGE("%s: X", __func__);
+                ret= BAD_VALUE;
+            }
+        }
+
+  } /*MM_CAMERA_CH_VIDEO*/
+#endif
+
+    int rc = MM_CAMERA_OK;
+    status_t ret = NO_ERROR;
+    mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+    int i;
+
+    ALOGV("QCameraStream::initChannel : E");
+    if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+        rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+        ALOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+        if(MM_CAMERA_OK != rc) {
+                ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+                ALOGE("%s: X", __func__);
+                return BAD_VALUE;
+        }
+        /*Callback register*/
+        /* register a notify into the mmmm_camera_t object*/
+       /* ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+                                                preview_notify_cb,
+                                                this);
+        ALOGV("Buf notify MM_CAMERA_CH_PREVIEW, rc=%d\n",rc);*/
+    }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+        rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+        ALOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+        if(MM_CAMERA_OK != rc) {
+                ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+                ALOGE("%s: X", __func__);
+                return BAD_VALUE;
+        }
+        /*Callback register*/
+        /* register a notify into the mmmm_camera_t object*/
+        /*ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+                                                record_notify_cb,
+                                                this);
+        ALOGV("Buf notify MM_CAMERA_CH_VIDEO, rc=%d\n",rc);*/
+    }
+
+    ret = (MM_CAMERA_OK==rc)? NO_ERROR : BAD_VALUE;
+    ALOGV("%s: X, ret = %d", __func__, ret);
+    return ret;
+}
+
+status_t QCameraStream::deinitChannel(int cameraId,
+                                    mm_camera_channel_type_t ch_type)
+{
+
+    int rc = MM_CAMERA_OK;
+
+    ALOGV("%s: E, channel = %d\n", __func__, ch_type);
+
+    if (MM_CAMERA_CH_MAX <= ch_type) {
+        ALOGE("%s: X: BAD_VALUE", __func__);
+        return BAD_VALUE;
+    }
+
+    cam_ops_ch_release(cameraId, ch_type);
+
+    ALOGV("%s: X, channel = %d\n", __func__, ch_type);
+    return NO_ERROR;
+}
+
+status_t QCameraStream::setMode(int enable) {
+  ALOGE("%s :myMode %x ", __func__, myMode);
+  if (enable) {
+      myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+  } else {
+      myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+  }
+  return NO_ERROR;
+}
+
+status_t QCameraStream::setFormat(uint8_t ch_type_mask)
+{
+    int rc = MM_CAMERA_OK;
+    status_t ret = NO_ERROR;
+    int width = 0;  /* width of channel      */
+    int height = 0; /* height of channel */
+    cam_ctrl_dimension_t dim;
+    mm_camera_ch_image_fmt_parm_t fmt;
+    int preview_format;
+    ALOGE("%s: E",__func__);
+
+    memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+    rc = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+    if (MM_CAMERA_OK != rc) {
+      ALOGE("%s: error - can't get camera dimension!", __func__);
+      ALOGE("%s: X", __func__);
+      return BAD_VALUE;
+    }
+    char mDeviceName[PROPERTY_VALUE_MAX];
+    property_get("ro.product.device",mDeviceName," ");
+    memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+    if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+        fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+        ret = cam_config_get_parm(mCameraId,
+                  MM_CAMERA_PARM_PREVIEW_FORMAT, &preview_format);
+        fmt.def.fmt = (cam_format_t)preview_format;
+        fmt.def.dim.width = dim.display_width;
+        fmt.def.dim.height =  dim.display_height;
+    }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+        fmt.ch_type = MM_CAMERA_CH_VIDEO;
+        fmt.video.video.fmt = CAMERA_YUV_420_NV21; //dim.enc_format;
+        fmt.video.video.dim.width = dim.video_width;
+        fmt.video.video.dim.height = dim.video_height;
+    }/*else if(MM_CAMERA_CH_SNAPSHOT_MASK & ch_type_mask){
+        if(mHalCamCtrl->isRawSnapshot()) {
+            fmt.ch_type = MM_CAMERA_CH_RAW;
+            fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+            fmt.def.dim.width = dim.raw_picture_width;
+            fmt.def.dim.height = dim.raw_picture_height;
+        }else{
+            //Jpeg???
+            fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+            fmt.snapshot.main.fmt = dim.main_img_format;
+            fmt.snapshot.main.dim.width = dim.picture_width;
+            fmt.snapshot.main.dim.height = dim.picture_height;
+
+            fmt.snapshot.thumbnail.fmt = dim.thumb_format;
+            fmt.snapshot.thumbnail.dim.width = dim.ui_thumbnail_width;
+            fmt.snapshot.thumbnail.dim.height = dim.ui_thumbnail_height;
+        }
+    }*/
+
+    rc = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+    ALOGV("%s: Stream MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+    if(MM_CAMERA_OK != rc) {
+        ALOGE("%s:set stream channel format err=%d\n", __func__, ret);
+        ALOGE("%s: X", __func__);
+        ret = BAD_VALUE;
+    }
+    ALOGE("%s: X",__func__);
+    return ret;
+}
+
+QCameraStream::QCameraStream (){
+    mInit = false;
+    mActive = false;
+    /* memset*/
+    memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::QCameraStream (int cameraId, camera_mode_t mode)
+              :mCameraId(cameraId),
+               myMode(mode)
+{
+    mInit = false;
+    mActive = false;
+
+    /* memset*/
+    memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::~QCameraStream () {;}
+
+
+status_t QCameraStream::init() {
+    return NO_ERROR;
+}
+
+status_t QCameraStream::start() {
+    return NO_ERROR;
+}
+
+void QCameraStream::stop() {
+    return;
+}
+
+void QCameraStream::release() {
+    return;
+}
+
+void QCameraStream::setHALCameraControl(QCameraHardwareInterface* ctrl) {
+
+    /* provide a frame data user,
+    for the  queue monitor thread to call the busy queue is not empty*/
+    mHalCamCtrl = ctrl;
+}
+
+}; // namespace android
diff --git a/camera/QCameraStream.h b/camera/QCameraStream.h
new file mode 100644
index 0000000..4617375
--- /dev/null
+++ b/camera/QCameraStream.h
@@ -0,0 +1,375 @@
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2009-2012, Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_STREAM_H
+#define ANDROID_HARDWARE_QCAMERA_STREAM_H
+
+
+#include <utils/threads.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+
+#include "QCameraHWI.h"
+#include "QCameraHWI_Mem.h"
+#include "QCamera_Intf.h"
+extern "C" {
+#include <mm_camera_interface2.h>
+
+#define DEFAULT_STREAM_WIDTH 320
+#define DEFAULT_STREAM_HEIGHT 240
+#define DEFAULT_LIVESHOT_WIDTH 2592
+#define DEFAULT_LIVESHOT_HEIGHT 1944
+
+#define MM_CAMERA_CH_PREVIEW_MASK    (0x01 << MM_CAMERA_CH_PREVIEW)
+#define MM_CAMERA_CH_VIDEO_MASK      (0x01 << MM_CAMERA_CH_VIDEO)
+#define MM_CAMERA_CH_SNAPSHOT_MASK   (0x01 << MM_CAMERA_CH_SNAPSHOT)
+
+} /* extern C*/
+
+
+namespace android {
+
+class QCameraHardwareInterface;
+
+class StreamQueue {
+private:
+    Mutex mQueueLock;
+    Condition mQueueWait;
+    bool mInitialized;
+
+    //Vector<struct msm_frame *> mContainer;
+    Vector<void *> mContainer;
+public:
+    StreamQueue();
+    virtual ~StreamQueue();
+    bool enqueue(void *element);
+    void flush();
+    void* dequeue();
+    void init();
+    void deinit();
+    bool isInitialized();
+bool isEmpty();
+};
+
+
+class QCameraStream { //: public virtual RefBase{
+
+public:
+    bool mInit;
+    bool mActive;
+
+    virtual status_t    init();
+    virtual status_t    start();
+    virtual void        stop();
+    virtual void        release();
+
+    status_t setFormat(uint8_t ch_type_mask);
+    status_t setMode(int enable);
+
+    virtual void        setHALCameraControl(QCameraHardwareInterface* ctrl);
+
+    //static status_t     openChannel(mm_camera_t *, mm_camera_channel_type_t ch_type);
+    virtual status_t    initChannel(int cameraId, uint32_t ch_type_mask);
+    virtual status_t    deinitChannel(int cameraId, mm_camera_channel_type_t ch_type);
+    virtual void releaseRecordingFrame(const void *opaque)
+    {
+      ;
+    }
+#if 0 // mzhu
+    virtual status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize)
+    {
+      return NO_ERROR;
+    }
+#endif // mzhu
+    virtual void prepareHardware()
+    {
+      ;
+    }
+    virtual sp<IMemoryHeap> getHeap() const{return NULL;}
+    virtual status_t    initDisplayBuffers(){return NO_ERROR;}
+    virtual status_t initPreviewOnlyBuffers(){return NO_ERROR;}
+    virtual sp<IMemoryHeap> getRawHeap() const {return NULL;}
+    virtual void *getLastQueuedFrame(void){return NULL;}
+    virtual status_t takePictureZSL(void){return NO_ERROR;}
+    virtual status_t takeLiveSnapshot(){return NO_ERROR;}
+    virtual status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+                                 cam_ctrl_dimension_t *dim,
+                                 int frame_len){return NO_ERROR;}
+	virtual void setModeLiveSnapshot(bool){;}
+    virtual status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+                                 int num_of_buf){return NO_ERROR;}
+
+    virtual void setFullSizeLiveshot(bool){};
+    /* Set the ANativeWindow */
+    virtual int setPreviewWindow(preview_stream_ops_t* window) {return NO_ERROR;}
+    virtual void notifyROIEvent(fd_roi_t roi) {;}
+    virtual void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie) {;}
+    virtual void resetSnapshotCounters(void ){};
+
+    QCameraStream();
+    QCameraStream(int, camera_mode_t);
+    virtual             ~QCameraStream();
+    QCameraHardwareInterface*  mHalCamCtrl;
+    mm_camera_ch_crop_t mCrop;
+
+    int mCameraId;
+    camera_mode_t myMode;
+
+    mutable Mutex mStopCallbackLock;
+private:
+   StreamQueue mBusyQueue;
+   StreamQueue mFreeQueue;
+public:
+     friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+};
+
+/*
+*   Record Class
+*/
+class QCameraStream_record : public QCameraStream {
+public:
+  status_t    init();
+  status_t    start() ;
+  void        stop()  ;
+  void        release() ;
+
+  static QCameraStream*  createInstance(int cameraId, camera_mode_t);
+  static void            deleteInstance(QCameraStream *p);
+
+  QCameraStream_record() {};
+  virtual             ~QCameraStream_record();
+
+  status_t processRecordFrame(void *data);
+  status_t initEncodeBuffers();
+  status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize);
+  //sp<IMemoryHeap> getHeap() const;
+
+  void releaseRecordingFrame(const void *opaque);
+  void debugShowVideoFPS() const;
+
+  status_t takeLiveSnapshot();
+private:
+  QCameraStream_record(int, camera_mode_t);
+  void releaseEncodeBuffer();
+
+  cam_ctrl_dimension_t             dim;
+  bool mDebugFps;
+
+  mm_camera_reg_buf_t              mRecordBuf;
+  //int                              record_frame_len;
+  //static const int                 maxFrameCnt = 16;
+  //camera_memory_t                 *mCameraMemoryPtr[maxFrameCnt];
+  //int                              mNumRecordFrames;
+  //sp<PmemPool>                     mRecordHeap[maxFrameCnt];
+  struct msm_frame                *recordframes;
+  //uint32_t                         record_offset[VIDEO_BUFFER_COUNT];
+  mm_camera_ch_data_buf_t          mRecordedFrames[MM_CAMERA_MAX_NUM_FRAMES];
+  //Mutex                            mRecordFreeQueueLock;
+  //Vector<mm_camera_ch_data_buf_t>  mRecordFreeQueue;
+
+  int mJpegMaxSize;
+  QCameraStream *mStreamSnap;
+
+};
+
+class QCameraStream_preview : public QCameraStream {
+public:
+    status_t    init();
+    status_t    start() ;
+    void        stop()  ;
+    void        release() ;
+
+    static QCameraStream*  createInstance(int, camera_mode_t);
+    static void            deleteInstance(QCameraStream *p);
+
+    QCameraStream_preview() {};
+    virtual             ~QCameraStream_preview();
+    void *getLastQueuedFrame(void);
+    /*init preview buffers with display case*/
+    status_t initDisplayBuffers();
+    /*init preview buffers without display case*/
+    status_t initPreviewOnlyBuffers();
+
+    status_t processPreviewFrame(mm_camera_ch_data_buf_t *frame);
+
+    /*init preview buffers with display case*/
+    status_t processPreviewFrameWithDisplay(mm_camera_ch_data_buf_t *frame);
+    /*init preview buffers without display case*/
+    status_t processPreviewFrameWithOutDisplay(mm_camera_ch_data_buf_t *frame);
+
+    int setPreviewWindow(preview_stream_ops_t* window);
+    void notifyROIEvent(fd_roi_t roi);
+    friend class QCameraHardwareInterface;
+
+private:
+    QCameraStream_preview(int cameraId, camera_mode_t);
+    /*allocate and free buffers with display case*/
+    status_t                 getBufferFromSurface();
+    status_t                 putBufferToSurface();
+
+    /*allocate and free buffers without display case*/
+    status_t                 getBufferNoDisplay();
+    status_t                 freeBufferNoDisplay();
+
+    void                     dumpFrameToFile(struct msm_frame* newFrame);
+    bool                     mFirstFrameRcvd;
+
+    int8_t                   my_id;
+    mm_camera_op_mode_type_t op_mode;
+    cam_ctrl_dimension_t     dim;
+    struct msm_frame        *mLastQueuedFrame;
+    mm_camera_reg_buf_t      mDisplayBuf;
+    mm_cameara_stream_buf_t  mDisplayStreamBuf;
+    Mutex                   mDisplayLock;
+    preview_stream_ops_t   *mPreviewWindow;
+    static const int        kPreviewBufferCount = PREVIEW_BUFFER_COUNT;
+    mm_camera_ch_data_buf_t mNotifyBuffer[16];
+    int8_t                  mNumFDRcvd;
+    int                     mVFEOutputs;
+    int                     mHFRFrameCnt;
+    int                     mHFRFrameSkip;
+};
+
+/* Snapshot Class - handle data flow*/
+class QCameraStream_Snapshot : public QCameraStream {
+public:
+    status_t    init();
+    status_t    start();
+    void        stop();
+    void        release();
+    void        prepareHardware();
+    static QCameraStream* createInstance(int cameraId, camera_mode_t);
+    static void deleteInstance(QCameraStream *p);
+
+    status_t takePictureZSL(void);
+    status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+                                 cam_ctrl_dimension_t *dim,
+                                 int frame_len);
+    status_t receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame);
+    void receiveCompleteJpegPicture(jpeg_event_t event);
+	void jpegErrorHandler(jpeg_event_t event);
+    void receiveJpegFragment(uint8_t *ptr, uint32_t size);
+    void deInitBuffer(void);
+    sp<IMemoryHeap> getRawHeap() const;
+    int getSnapshotState();
+    /*Temp: to be removed once event handling is enabled in mm-camera*/
+    void runSnapshotThread(void *data);
+    bool isZSLMode();
+    void setFullSizeLiveshot(bool);
+    void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie);
+    friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+    void resetSnapshotCounters(void );
+
+private:
+    QCameraStream_Snapshot(int, camera_mode_t);
+    virtual ~QCameraStream_Snapshot();
+
+    /* snapshot related private members */
+    status_t initJPEGSnapshot(int num_of_snapshots);
+    status_t initRawSnapshot(int num_of_snapshots);
+    status_t initZSLSnapshot(void);
+    status_t initFullLiveshot(void);
+	status_t cancelPicture();
+    void notifyShutter(common_crop_t *crop,
+                       bool play_shutter_sound);
+    status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+                                 int num_of_buf);
+    status_t initRawSnapshotBuffers(cam_ctrl_dimension_t *dim,
+                                    int num_of_buf);
+    status_t deinitRawSnapshotBuffers(void);
+    status_t deinitSnapshotBuffers(void);
+    status_t initRawSnapshotChannel(cam_ctrl_dimension_t* dim,
+                                    int num_snapshots);
+    status_t initSnapshotFormat(cam_ctrl_dimension_t *dim);
+    status_t takePictureRaw(void);
+    status_t takePictureJPEG(void);
+    status_t startStreamZSL(void);
+    void deinitSnapshotChannel(mm_camera_channel_type_t);
+    status_t configSnapshotDimension(cam_ctrl_dimension_t* dim);
+    status_t encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+                        common_crop_t *crop_info,
+                        int frame_len,
+                        bool enqueued);
+    status_t encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+                                  bool enqueued);
+    status_t setZSLChannelAttribute(void);
+    void handleError();
+    void setSnapshotState(int state);
+    void setModeLiveSnapshot(bool);
+    bool isLiveSnapshot(void);
+    void stopPolling(void);
+    bool isFullSizeLiveshot(void);
+    status_t doWaveletDenoise(mm_camera_ch_data_buf_t* frame);
+    status_t sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame);
+    void lauchNextWDenoiseFromQueue();
+
+    /* Member variables */
+
+    int mSnapshotFormat;
+    int mPictureWidth;
+    int mPictureHeight;
+    cam_format_t mPictureFormat;
+    int mPostviewWidth;
+    int mPostviewHeight;
+    int mThumbnailWidth;
+    int mThumbnailHeight;
+    cam_format_t mThumbnailFormat;
+	int mJpegOffset;
+    int mSnapshotState;
+    int mNumOfSnapshot;
+	int mNumOfRecievedJPEG;
+    bool mModeLiveSnapshot;
+    bool mBurstModeFlag;
+	int mActualPictureWidth;
+    int mActualPictureHeight;
+    bool mJpegDownscaling;
+    sp<AshmemPool> mJpegHeap;
+    /*TBD:Bikas: This is defined in HWI too.*/
+#ifdef USE_ION
+    sp<IonPool>  mDisplayHeap;
+    sp<IonPool>  mPostviewHeap;
+#else
+    sp<PmemPool>  mDisplayHeap;
+    sp<PmemPool>  mPostviewHeap;
+#endif
+    mm_camera_ch_data_buf_t *mCurrentFrameEncoded;
+    mm_cameara_stream_buf_t mSnapshotStreamBuf;
+    mm_cameara_stream_buf_t mPostviewStreamBuf;
+    StreamQueue             mSnapshotQueue;
+    static const int        mMaxSnapshotBufferCount = 16;
+    int                     mSnapshotBufferNum;
+    int                     mMainfd[mMaxSnapshotBufferCount];
+    int                     mThumbfd[mMaxSnapshotBufferCount];
+    int                     mMainSize;
+    int                     mThumbSize;
+	camera_memory_t        *mCameraMemoryPtrMain[mMaxSnapshotBufferCount];
+	camera_memory_t        *mCameraMemoryPtrThumb[mMaxSnapshotBufferCount];
+    int                     mJpegSessionId;
+	int                     dump_fd;
+    bool mFullLiveshot;
+    StreamQueue             mWDNQueue; // queue to hold frames while one frame is sent out for WDN
+    bool                    mIsDoingWDN; // flag to indicate if WDN is going on (one frame is sent out for WDN)
+	bool                    mDropThumbnail;
+	int                     mJpegQuality;
+}; // QCameraStream_Snapshot
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCamera_Intf.h b/camera/QCamera_Intf.h
new file mode 100644
index 0000000..f116a07
--- /dev/null
+++ b/camera/QCamera_Intf.h
@@ -0,0 +1,899 @@
+/* Copyright (c) 2012, Code Aurora Forum. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of Code Aurora Forum, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+#include <stdint.h>
+#include <pthread.h>
+#include <inttypes.h>
+
+#define PAD_TO_WORD(a)               (((a)+3)&~3)
+#define PAD_TO_2K(a)                 (((a)+2047)&~2047)
+#define PAD_TO_4K(a)                 (((a)+4095)&~4095)
+#define PAD_TO_8K(a)                 (((a)+8191)&~8191)
+
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ROI 2
+#define MAX_NUM_PARM 5
+#define MAX_NUM_OPS 2
+#define VIDEO_MAX_PLANES 8
+#define MAX_SNAPSHOT_BUFFERS 5
+#define MAX_EXP_BRACKETING_LENGTH 32
+
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+typedef enum {
+  BACK_CAMERA,
+  FRONT_CAMERA,
+}cam_position_t;
+
+typedef enum {
+  CAM_CTRL_FAILED,        /* Failure in doing operation */
+  CAM_CTRL_SUCCESS,       /* Operation Succeded */
+  CAM_CTRL_INVALID_PARM,  /* Inavlid parameter provided */
+  CAM_CTRL_NOT_SUPPORTED, /* Parameter/operation not supported */
+  CAM_CTRL_ACCEPTED,      /* Parameter accepted */
+  CAM_CTRL_MAX,
+} cam_ctrl_status_t;
+
+typedef enum {
+  CAMERA_YUV_420_NV12,
+  CAMERA_YUV_420_NV21,
+  CAMERA_YUV_420_NV21_ADRENO,
+  CAMERA_BAYER_SBGGR10,
+  CAMERA_RDI,
+  CAMERA_YUV_420_YV12,
+  CAMERA_YUV_422_NV16,
+  CAMERA_YUV_422_NV61
+} cam_format_t;
+
+typedef enum {
+  CAMERA_PAD_NONE,
+  CAMERA_PAD_TO_WORD,   /*2 bytes*/
+  CAMERA_PAD_TO_LONG_WORD, /*4 bytes*/
+  CAMERA_PAD_TO_8, /*8 bytes*/
+  CAMERA_PAD_TO_16, /*16 bytes*/
+
+  CAMERA_PAD_TO_1K, /*1k bytes*/
+  CAMERA_PAD_TO_2K, /*2k bytes*/
+  CAMERA_PAD_TO_4K,
+  CAMERA_PAD_TO_8K
+} cam_pad_format_t;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+  int fd;         /* origin fd */
+  uint32_t size;
+} mm_camera_frame_map_type;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+} mm_camera_frame_unmap_type;
+
+typedef enum {
+  CAM_SOCK_MSG_TYPE_FD_MAPPING,
+  CAM_SOCK_MSG_TYPE_FD_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_WDN_START,
+  CAM_SOCK_MSG_TYPE_HIST_MAPPING,
+  CAM_SOCK_MSG_TYPE_HIST_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_MAX
+}mm_camera_socket_msg_type;
+
+#define MM_MAX_WDN_NUM 2
+typedef struct {
+  unsigned long cookie;
+  int num_frames;
+  int ext_mode[MM_MAX_WDN_NUM];
+  int frame_idx[MM_MAX_WDN_NUM];
+} mm_camera_wdn_start_type;
+
+typedef struct {
+  mm_camera_socket_msg_type msg_type;
+  union {
+    mm_camera_frame_map_type frame_fd_map;
+    mm_camera_frame_unmap_type frame_fd_unmap;
+    mm_camera_wdn_start_type wdn_start;
+  } payload;
+} cam_sock_packet_t;
+
+
+typedef enum {
+  CAM_VIDEO_FRAME,
+  CAM_SNAPSHOT_FRAME,
+  CAM_PREVIEW_FRAME,
+}cam_frame_type_t;
+
+
+typedef enum {
+  CAMERA_MODE_2D = (1<<0),
+  CAMERA_MODE_3D = (1<<1),
+  CAMERA_NONZSL_MODE = (1<<2),
+  CAMERA_ZSL_MODE = (1<<3),
+  CAMERA_MODE_MAX = CAMERA_ZSL_MODE,
+} camera_mode_t;
+
+
+typedef struct {
+  int  modes_supported;
+  int8_t camera_id;
+  cam_position_t position;
+  uint32_t sensor_mount_angle;
+}camera_info_t;
+
+typedef struct {
+  camera_mode_t mode;
+  int8_t camera_id;
+  camera_mode_t cammode;
+}config_params_t;
+
+typedef struct {
+  uint32_t len;
+  uint32_t y_offset;
+  uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+  uint32_t len;
+  uint32_t offset;
+} cam_mp_len_offset_t;
+
+typedef struct {
+  int num_planes;
+  union {
+    cam_sp_len_offset_t sp;
+    cam_mp_len_offset_t mp[8];
+  };
+  uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+  uint32_t parm[MAX_NUM_PARM];
+  uint32_t ops[MAX_NUM_OPS];
+  uint8_t yuv_output;
+  uint8_t jpeg_capture;
+  uint32_t max_pict_width;
+  uint32_t max_pict_height;
+  uint32_t max_preview_width;
+  uint32_t max_preview_height;
+  uint32_t max_video_width;
+  uint32_t max_video_height;
+  uint32_t effect;
+  camera_mode_t modes;
+  uint8_t preview_format;
+  uint32_t preview_sizes_cnt;
+  uint32_t thumb_sizes_cnt;
+  uint32_t video_sizes_cnt;
+  uint32_t hfr_sizes_cnt;
+  uint8_t vfe_output_enable;
+  uint8_t hfr_frame_skip;
+  uint32_t default_preview_width;
+  uint32_t default_preview_height;
+  uint32_t bestshot_reconfigure;
+}cam_prop_t;
+
+typedef struct {
+  uint16_t video_width;         /* Video width seen by VFE could be different than orig. Ex. DIS */
+  uint16_t video_height;        /* Video height seen by VFE */
+  uint16_t picture_width;       /* Picture width seen by VFE */
+  uint16_t picture_height;      /* Picture height seen by VFE */
+  uint16_t display_width;       /* width of display */
+  uint16_t display_height;      /* height of display */
+  uint16_t orig_video_width;    /* original video width received */
+  uint16_t orig_video_height;   /* original video height received */
+  uint16_t orig_picture_dx;     /* original picture width received */
+  uint16_t orig_picture_dy;     /* original picture height received */
+  uint16_t ui_thumbnail_height; /* Just like orig_picture_dx */
+  uint16_t ui_thumbnail_width;  /* Just like orig_picture_dy */
+  uint16_t thumbnail_height;
+  uint16_t thumbnail_width;
+  uint16_t orig_picture_width;
+  uint16_t orig_picture_height;
+  uint16_t orig_thumb_width;
+  uint16_t orig_thumb_height;
+  uint16_t raw_picture_height;
+  uint16_t raw_picture_width;
+  uint32_t hjr_xtra_buff_for_bayer_filtering;
+  cam_format_t    prev_format;
+  cam_format_t    enc_format;
+  cam_format_t    thumb_format;
+  cam_format_t    main_img_format;
+  cam_pad_format_t prev_padding_format;
+  cam_pad_format_t enc_padding_format;
+  cam_pad_format_t thumb_padding_format;
+  cam_pad_format_t main_padding_format;
+  uint16_t display_luma_width;
+  uint16_t display_luma_height;
+  uint16_t display_chroma_width;
+  uint16_t display_chroma_height;
+  uint16_t video_luma_width;
+  uint16_t video_luma_height;
+  uint16_t video_chroma_width;
+  uint16_t video_chroma_height;
+  uint16_t thumbnail_luma_width;
+  uint16_t thumbnail_luma_height;
+  uint16_t thumbnail_chroma_width;
+  uint16_t thumbnail_chroma_height;
+  uint16_t main_img_luma_width;
+  uint16_t main_img_luma_height;
+  uint16_t main_img_chroma_width;
+  uint16_t main_img_chroma_height;
+  int rotation;
+  cam_frame_len_offset_t display_frame_offset;
+  cam_frame_len_offset_t video_frame_offset;
+  cam_frame_len_offset_t picture_frame_offset;
+  cam_frame_len_offset_t thumb_frame_offset;
+} cam_ctrl_dimension_t;
+
+/* Add enumenrations at the bottom but before MM_CAMERA_PARM_MAX */
+typedef enum {
+    MM_CAMERA_PARM_PICT_SIZE,
+    MM_CAMERA_PARM_ZOOM_RATIO,
+    MM_CAMERA_PARM_HISTOGRAM,
+    MM_CAMERA_PARM_DIMENSION,
+    MM_CAMERA_PARM_FPS,
+    MM_CAMERA_PARM_FPS_MODE, /*5*/
+    MM_CAMERA_PARM_EFFECT,
+    MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+    MM_CAMERA_PARM_EXPOSURE,
+    MM_CAMERA_PARM_SHARPNESS,
+    MM_CAMERA_PARM_CONTRAST, /*10*/
+    MM_CAMERA_PARM_SATURATION,
+    MM_CAMERA_PARM_BRIGHTNESS,
+    MM_CAMERA_PARM_WHITE_BALANCE,
+    MM_CAMERA_PARM_LED_MODE,
+    MM_CAMERA_PARM_ANTIBANDING, /*15*/
+    MM_CAMERA_PARM_ROLLOFF,
+    MM_CAMERA_PARM_CONTINUOUS_AF,
+    MM_CAMERA_PARM_FOCUS_RECT,
+    MM_CAMERA_PARM_AEC_ROI,
+    MM_CAMERA_PARM_AF_ROI, /*20*/
+    MM_CAMERA_PARM_HJR,
+    MM_CAMERA_PARM_ISO,
+    MM_CAMERA_PARM_BL_DETECTION,
+    MM_CAMERA_PARM_SNOW_DETECTION,
+    MM_CAMERA_PARM_BESTSHOT_MODE, /*25*/
+    MM_CAMERA_PARM_ZOOM,
+    MM_CAMERA_PARM_VIDEO_DIS,
+    MM_CAMERA_PARM_VIDEO_ROT,
+    MM_CAMERA_PARM_SCE_FACTOR,
+    MM_CAMERA_PARM_FD, /*30*/
+    MM_CAMERA_PARM_MODE,
+    /* 2nd 32 bits */
+    MM_CAMERA_PARM_3D_FRAME_FORMAT,
+    MM_CAMERA_PARM_CAMERA_ID,
+    MM_CAMERA_PARM_CAMERA_INFO,
+    MM_CAMERA_PARM_PREVIEW_SIZE, /*35*/
+    MM_CAMERA_PARM_QUERY_FALSH4SNAP,
+    MM_CAMERA_PARM_FOCUS_DISTANCES,
+    MM_CAMERA_PARM_BUFFER_INFO,
+    MM_CAMERA_PARM_JPEG_ROTATION,
+    MM_CAMERA_PARM_JPEG_MAINIMG_QUALITY, /* 40 */
+    MM_CAMERA_PARM_JPEG_THUMB_QUALITY,
+    MM_CAMERA_PARM_ZSL_ENABLE,
+    MM_CAMERA_PARM_FOCAL_LENGTH,
+    MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+    MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE, /* 45 */
+    MM_CAMERA_PARM_MCE,
+    MM_CAMERA_PARM_RESET_LENS_TO_INFINITY,
+    MM_CAMERA_PARM_SNAPSHOTDATA,
+    MM_CAMERA_PARM_HFR,
+    MM_CAMERA_PARM_REDEYE_REDUCTION, /* 50 */
+    MM_CAMERA_PARM_WAVELET_DENOISE,
+    MM_CAMERA_PARM_3D_DISPLAY_DISTANCE,
+    MM_CAMERA_PARM_3D_VIEW_ANGLE,
+    MM_CAMERA_PARM_PREVIEW_FORMAT,
+    MM_CAMERA_PARM_HFR_SIZE, /* 55 */
+    MM_CAMERA_PARM_3D_EFFECT,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_RANGE,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_VALUE,
+    MM_CAMERA_PARM_ENABLE_3D_MANUAL_CONVERGENCE,
+    /* These are new parameters defined here */
+    MM_CAMERA_PARM_CH_IMAGE_FMT, /* 60 */       // mm_camera_ch_image_fmt_parm_t
+    MM_CAMERA_PARM_OP_MODE,             // camera state, sub state also
+    MM_CAMERA_PARM_SHARPNESS_CAP,       //
+    MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,  // num shots per snapshot action
+    MM_CAMERA_PARM_LIVESHOT_MAIN,       // enable/disable full size live shot
+    MM_CAMERA_PARM_MAXZOOM, /* 65 */
+    MM_CAMERA_PARM_LUMA_ADAPTATION,     // enable/disable
+    MM_CAMERA_PARM_HDR,
+    MM_CAMERA_PARM_CROP,
+    MM_CAMERA_PARM_MAX_PICTURE_SIZE,
+    MM_CAMERA_PARM_MAX_PREVIEW_SIZE, /* 70 */
+    MM_CAMERA_PARM_ASD_ENABLE,
+    MM_CAMERA_PARM_RECORDING_HINT,
+    MM_CAMERA_PARM_CAF_ENABLE,
+    MM_CAMERA_PARM_FULL_LIVESHOT,
+    MM_CAMERA_PARM_DIS_ENABLE, /* 75 */
+    MM_CAMERA_PARM_AEC_LOCK,
+    MM_CAMERA_PARM_AWB_LOCK,
+    MM_CAMERA_PARM_AF_MTR_AREA,
+    MM_CAMERA_PARM_AEC_MTR_AREA,
+    MM_CAMERA_PARM_LOW_POWER_MODE,
+    MM_CAMERA_PARM_MAX_HFR_MODE, /* 80 */
+    MM_CAMERA_PARM_MAX_VIDEO_SIZE,
+    MM_CAMERA_PARM_DEF_PREVIEW_SIZES,
+    MM_CAMERA_PARM_DEF_VIDEO_SIZES,
+    MM_CAMERA_PARM_DEF_THUMB_SIZES,
+    MM_CAMERA_PARM_DEF_HFR_SIZES,
+    MM_CAMERA_PARM_PREVIEW_SIZES_CNT,
+    MM_CAMERA_PARM_VIDEO_SIZES_CNT,
+    MM_CAMERA_PARM_THUMB_SIZES_CNT,
+    MM_CAMERA_PARM_HFR_SIZES_CNT,
+    MM_CAMERA_PARM_GRALLOC_USAGE,
+    MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, //to check whether both oputputs are
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+    MM_CAMERA_PARM_FOCUS_MODE,
+    MM_CAMERA_PARM_HFR_FRAME_SKIP,
+    //or single output enabled to differentiate 7x27a with others
+    MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+    MM_CAMERA_MAX_NUM_FACES_DECT,
+    MM_CAMERA_PARM_FPS_RANGE,
+    MM_CAMERA_PARM_MAX
+} mm_camera_parm_type_t;
+
+typedef enum {
+  CAMERA_SET_PARM_DISPLAY_INFO,
+  CAMERA_SET_PARM_DIMENSION,
+
+  CAMERA_SET_PARM_ZOOM,
+  CAMERA_SET_PARM_SENSOR_POSITION,
+  CAMERA_SET_PARM_FOCUS_RECT,
+  CAMERA_SET_PARM_LUMA_ADAPTATION,
+  CAMERA_SET_PARM_CONTRAST,
+  CAMERA_SET_PARM_BRIGHTNESS,
+  CAMERA_SET_PARM_EXPOSURE_COMPENSATION,
+  CAMERA_SET_PARM_SHARPNESS,
+  CAMERA_SET_PARM_HUE,  /* 10 */
+  CAMERA_SET_PARM_SATURATION,
+  CAMERA_SET_PARM_EXPOSURE,
+  CAMERA_SET_PARM_AUTO_FOCUS,
+  CAMERA_SET_PARM_WB,
+  CAMERA_SET_PARM_EFFECT,
+  CAMERA_SET_PARM_FPS,
+  CAMERA_SET_PARM_FLASH,
+  CAMERA_SET_PARM_NIGHTSHOT_MODE,
+  CAMERA_SET_PARM_REFLECT,
+  CAMERA_SET_PARM_PREVIEW_MODE,  /* 20 */
+  CAMERA_SET_PARM_ANTIBANDING,
+  CAMERA_SET_PARM_RED_EYE_REDUCTION,
+  CAMERA_SET_PARM_FOCUS_STEP,
+  CAMERA_SET_PARM_EXPOSURE_METERING,
+  CAMERA_SET_PARM_AUTO_EXPOSURE_MODE,
+  CAMERA_SET_PARM_ISO,
+  CAMERA_SET_PARM_BESTSHOT_MODE,
+  CAMERA_SET_PARM_ENCODE_ROTATION,
+
+  CAMERA_SET_PARM_PREVIEW_FPS,
+  CAMERA_SET_PARM_AF_MODE,  /* 30 */
+  CAMERA_SET_PARM_HISTOGRAM,
+  CAMERA_SET_PARM_FLASH_STATE,
+  CAMERA_SET_PARM_FRAME_TIMESTAMP,
+  CAMERA_SET_PARM_STROBE_FLASH,
+  CAMERA_SET_PARM_FPS_LIST,
+  CAMERA_SET_PARM_HJR,
+  CAMERA_SET_PARM_ROLLOFF,
+
+  CAMERA_STOP_PREVIEW,
+  CAMERA_START_PREVIEW,
+  CAMERA_START_SNAPSHOT, /* 40 */
+  CAMERA_START_RAW_SNAPSHOT,
+  CAMERA_STOP_SNAPSHOT,
+  CAMERA_EXIT,
+  CAMERA_ENABLE_BSM,
+  CAMERA_DISABLE_BSM,
+  CAMERA_GET_PARM_ZOOM,
+  CAMERA_GET_PARM_MAXZOOM,
+  CAMERA_GET_PARM_ZOOMRATIOS,
+  CAMERA_GET_PARM_AF_SHARPNESS,
+  CAMERA_SET_PARM_LED_MODE, /* 50 */
+  CAMERA_SET_MOTION_ISO,
+  CAMERA_AUTO_FOCUS_CANCEL,
+  CAMERA_GET_PARM_FOCUS_STEP,
+  CAMERA_ENABLE_AFD,
+  CAMERA_PREPARE_SNAPSHOT,
+  CAMERA_SET_FPS_MODE,
+  CAMERA_START_VIDEO,
+  CAMERA_STOP_VIDEO,
+  CAMERA_START_RECORDING,
+  CAMERA_STOP_RECORDING, /* 60 */
+  CAMERA_SET_VIDEO_DIS_PARAMS,
+  CAMERA_SET_VIDEO_ROT_PARAMS,
+  CAMERA_SET_PARM_AEC_ROI,
+  CAMERA_SET_CAF,
+  CAMERA_SET_PARM_BL_DETECTION_ENABLE,
+  CAMERA_SET_PARM_SNOW_DETECTION_ENABLE,
+  CAMERA_SET_PARM_STROBE_FLASH_MODE,
+  CAMERA_SET_PARM_AF_ROI,
+  CAMERA_START_LIVESHOT,
+  CAMERA_SET_SCE_FACTOR, /* 70 */
+  CAMERA_GET_CAPABILITIES,
+  CAMERA_GET_PARM_DIMENSION,
+  CAMERA_GET_PARM_LED_MODE,
+  CAMERA_SET_PARM_FD,
+  CAMERA_GET_PARM_3D_FRAME_FORMAT,
+  CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+  CAMERA_GET_PARM_FOCUS_DISTANCES,
+  CAMERA_START_ZSL,
+  CAMERA_STOP_ZSL,
+  CAMERA_ENABLE_ZSL, /* 80 */
+  CAMERA_GET_PARM_FOCAL_LENGTH,
+  CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+  CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+  CAMERA_SET_PARM_WAVELET_DENOISE,
+  CAMERA_SET_PARM_MCE,
+  CAMERA_ENABLE_STEREO_CAM,
+  CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+  CAMERA_GET_PARM_SNAPSHOTDATA,
+  CAMERA_SET_PARM_HFR,
+  CAMERA_SET_REDEYE_REDUCTION, /* 90 */
+  CAMERA_SET_PARM_3D_DISPLAY_DISTANCE,
+  CAMERA_SET_PARM_3D_VIEW_ANGLE,
+  CAMERA_SET_PARM_3D_EFFECT,
+  CAMERA_SET_PARM_PREVIEW_FORMAT,
+  CAMERA_GET_PARM_3D_DISPLAY_DISTANCE, /* 95 */
+  CAMERA_GET_PARM_3D_VIEW_ANGLE,
+  CAMERA_GET_PARM_3D_EFFECT,
+  CAMERA_GET_PARM_3D_MANUAL_CONV_RANGE,
+  CAMERA_SET_PARM_3D_MANUAL_CONV_VALUE,
+  CAMERA_ENABLE_3D_MANUAL_CONVERGENCE, /* 100 */
+  CAMERA_SET_PARM_HDR,
+  CAMERA_SET_ASD_ENABLE,
+  CAMERA_POSTPROC_ABORT,
+  CAMERA_SET_AEC_MTR_AREA,
+  CAMERA_SET_AEC_LOCK,       /*105*/
+  CAMERA_SET_AWB_LOCK,
+  CAMERA_SET_RECORDING_HINT,
+  CAMERA_SET_PARM_CAF,
+  CAMERA_SET_FULL_LIVESHOT,
+  CAMERA_SET_DIS_ENABLE,  /*110*/
+  CAMERA_GET_PARM_MAX_HFR_MODE,
+  CAMERA_SET_LOW_POWER_MODE,
+  CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+  CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+  CAMERA_GET_PARM_DEF_THUMB_SIZES, /*115*/
+  CAMERA_GET_PARM_DEF_HFR_SIZES,
+  CAMERA_GET_PARM_MAX_LIVESHOT_SIZE,
+  CAMERA_GET_PARM_FPS_RANGE,
+  CAMERA_SET_3A_CONVERGENCE,
+  CAMERA_SET_PREVIEW_HFR, /*120*/
+  CAMERA_GET_MAX_DIMENSION,
+  CAMERA_GET_MAX_NUM_FACES_DECT,
+  CAMERA_CTRL_PARM_MAX
+} cam_ctrl_type;
+
+typedef enum {
+  CAMERA_ERROR_NO_MEMORY,
+  CAMERA_ERROR_EFS_FAIL,                /* Low-level operation failed */
+  CAMERA_ERROR_EFS_FILE_OPEN,           /* File already opened */
+  CAMERA_ERROR_EFS_FILE_NOT_OPEN,       /* File not opened */
+  CAMERA_ERROR_EFS_FILE_ALREADY_EXISTS, /* File already exists */
+  CAMERA_ERROR_EFS_NONEXISTENT_DIR,     /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_NONEXISTENT_FILE,    /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_BAD_FILE_NAME,       /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_BAD_FILE_HANDLE,     /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_SPACE_EXHAUSTED,     /* Out of file system space */
+  CAMERA_ERROR_EFS_OPEN_TABLE_FULL,     /* Out of open-file table slots                */
+  CAMERA_ERROR_EFS_OTHER_ERROR,         /* Other error                                 */
+  CAMERA_ERROR_CONFIG,
+  CAMERA_ERROR_EXIF_ENCODE,
+  CAMERA_ERROR_VIDEO_ENGINE,
+  CAMERA_ERROR_IPL,
+  CAMERA_ERROR_INVALID_FORMAT,
+  CAMERA_ERROR_TIMEOUT,
+  CAMERA_ERROR_ESD,
+  CAMERA_ERROR_MAX
+} camera_error_type;
+
+#if defined CAMERA_ANTIBANDING_OFF
+#undef CAMERA_ANTIBANDING_OFF
+#endif
+
+#if defined CAMERA_ANTIBANDING_60HZ
+#undef CAMERA_ANTIBANDING_60HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_50HZ
+#undef CAMERA_ANTIBANDING_50HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_AUTO
+#undef CAMERA_ANTIBANDING_AUTO
+#endif
+
+typedef enum {
+  CAMERA_ANTIBANDING_OFF,
+  CAMERA_ANTIBANDING_60HZ,
+  CAMERA_ANTIBANDING_50HZ,
+  CAMERA_ANTIBANDING_AUTO,
+  CAMERA_ANTIBANDING_AUTO_50HZ,
+  CAMERA_ANTIBANDING_AUTO_60HZ,
+  CAMERA_MAX_ANTIBANDING,
+} camera_antibanding_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+  CAMERA_ISO_AUTO = 0,
+  CAMERA_ISO_DEBLUR,
+  CAMERA_ISO_100,
+  CAMERA_ISO_200,
+  CAMERA_ISO_400,
+  CAMERA_ISO_800,
+  CAMERA_ISO_1600,
+  CAMERA_ISO_MAX
+} camera_iso_mode_type;
+
+
+typedef enum {
+  AEC_ROI_OFF,
+  AEC_ROI_ON
+} aec_roi_ctrl_t;
+
+typedef enum {
+  AEC_ROI_BY_INDEX,
+  AEC_ROI_BY_COORDINATE,
+} aec_roi_type_t;
+
+typedef struct {
+  uint32_t x;
+  uint32_t y;
+} cam_coordinate_type_t;
+
+/*
+ * Define DRAW_RECTANGLES to draw rectangles on screen. Just for test purpose.
+ */
+//#define DRAW_RECTANGLES
+
+typedef struct {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+} roi_t;
+
+typedef struct {
+  aec_roi_ctrl_t aec_roi_enable;
+  aec_roi_type_t aec_roi_type;
+  union {
+    cam_coordinate_type_t coordinate;
+    uint32_t aec_roi_idx;
+  } aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+  uint32_t frm_id;
+  uint8_t num_roi;
+  roi_t roi[MAX_ROI];
+  uint8_t is_multiwindow;
+} roi_info_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+typedef struct
+{
+  exif_tag_type_t type;
+  uint8_t copy;
+  uint32_t count;
+  union
+  {
+    char      *_ascii;
+    uint8_t   *_bytes;
+    uint8_t    _byte;
+    uint16_t  *_shorts;
+    uint16_t   _short;
+    uint32_t  *_longs;
+    uint32_t   _long;
+    rat_t     *_rats;
+    rat_t      _rat;
+    uint8_t   *_undefined;
+    int32_t   *_slongs;
+    int32_t    _slong;
+    srat_t    *_srats;
+    srat_t     _srat;
+  } data;
+} exif_tag_entry_t;
+
+typedef struct {
+    uint32_t      tag_id;
+    exif_tag_entry_t  tag_entry;
+} exif_tags_info_t;
+
+
+typedef enum {
+ HDR_BRACKETING_OFF,
+ HDR_MODE,
+ EXP_BRACKETING_MODE
+ } hdr_mode;
+
+typedef struct {
+  hdr_mode mode;
+  uint32_t hdr_enable;
+  uint32_t total_frames;
+  uint32_t total_hal_frames;
+  char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} exp_bracketing_t;
+typedef struct {
+  roi_t      mtr_area[MAX_ROI];
+  uint32_t   num_area;
+  int        weight[MAX_ROI];
+} aec_mtr_area_t;
+
+typedef struct {
+  int denoise_enable;
+  int process_plates;
+} denoise_param_t;
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+  #define HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+  int width;
+  int height;
+};
+#endif
+
+typedef struct {
+  uint32_t yoffset;
+  uint32_t cbcr_offset;
+  uint32_t size;
+  struct camera_size_type resolution;
+}cam_buf_info_t;
+
+typedef struct {
+  int x;
+  int y;
+}cam_point_t;
+
+typedef struct {
+  /* AF parameters */
+  uint8_t focus_position;
+  /* AEC parameters */
+  uint32_t line_count;
+  uint8_t luma_target;
+  /* AWB parameters */
+  int32_t r_gain;
+  int32_t b_gain;
+  int32_t g_gain;
+  uint8_t exposure_mode;
+  uint8_t exposure_program;
+  float exposure_time;
+  uint32_t iso_speed;
+} snapshotData_info_t;
+
+
+typedef enum {
+  CAMERA_HFR_MODE_OFF = 1,
+  CAMERA_HFR_MODE_60FPS,
+  CAMERA_HFR_MODE_90FPS,
+  CAMERA_HFR_MODE_120FPS,
+  CAMERA_HFR_MODE_150FPS,
+} camera_hfr_mode_t;
+
+/* frame Q*/
+struct fifo_node
+{
+  struct fifo_node *next;
+  void *f;
+};
+
+struct fifo_queue
+{
+  int num_of_frames;
+  struct fifo_node *front;
+  struct fifo_node *back;
+  pthread_mutex_t mut;
+  pthread_cond_t wait;
+  char* name;
+};
+
+typedef struct {
+  uint32_t buf_len;
+  uint8_t num;
+  uint8_t pmem_type;
+  uint32_t vaddr[8];
+} mm_camera_histo_mem_info_t;
+
+typedef enum {
+  MM_CAMERA_CTRL_EVT_ZOOM_DONE,
+  MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE,
+  MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT,
+  MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE,
+  MM_CAMERA_CTRL_EVT_WDN_DONE, // wavelet denoise done
+  MM_CAMERA_CTRL_EVT_ERROR,
+  MM_CAMERA_CTRL_EVT_MAX
+}mm_camera_ctrl_event_type_t;
+
+typedef struct {
+  mm_camera_ctrl_event_type_t evt;
+  cam_ctrl_status_t status;
+  unsigned long cookie;
+} mm_camera_ctrl_event_t;
+
+typedef enum {
+  MM_CAMERA_CH_EVT_STREAMING_ON,
+  MM_CAMERA_CH_EVT_STREAMING_OFF,
+  MM_CAMERA_CH_EVT_STREAMING_ERR,
+  MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE,
+  MM_CAMERA_CH_EVT_DATA_REQUEST_MORE,
+  MM_CAMERA_CH_EVT_MAX
+}mm_camera_ch_event_type_t;
+
+typedef struct {
+  uint32_t ch;
+  mm_camera_ch_event_type_t evt;
+} mm_camera_ch_event_t;
+
+typedef struct {
+  uint32_t index;
+  /* TBD: need more fields for histo stats? */
+} mm_camera_stats_histo_t;
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_stats_histo_t    stats_histo;
+  } e;
+} mm_camera_stats_event_t;
+
+typedef enum {
+  FD_ROI_TYPE_HEADER,
+  FD_ROI_TYPE_DATA
+} fd_roi_type_t;
+
+typedef struct {
+  uint32_t frame_id;
+  int16_t num_face_detected;
+} fd_roi_header_type;
+
+struct fd_rect_t {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+};
+
+typedef struct {
+  struct fd_rect_t face_boundary;
+  uint16_t left_eye_center[2];
+  uint16_t right_eye_center[2];
+  uint16_t mouth_center[2];
+  uint8_t smile_degree;  //0 -100
+  uint8_t smile_confidence;  //
+  uint8_t blink_detected;  // 0 or 1
+  uint8_t is_face_recognised;  // 0 or 1
+  int8_t gaze_angle;  // -90 -45 0 45 90 for head left to rigth tilt
+  int8_t updown_dir;  // -90 to 90
+  int8_t leftright_dir;  //-90 to 90
+  int8_t roll_dir;  // -90 to 90
+  int8_t left_right_gaze;  // -50 to 50
+  int8_t top_bottom_gaze;  // -50 to 50
+  uint8_t left_blink;  // 0 - 100
+  uint8_t right_blink;  // 0 - 100
+  int8_t id;  // unique id for face tracking within view unless view changes
+  int8_t score;  // score of confidence( 0 -100)
+} fd_face_type;
+
+typedef struct {
+  uint32_t frame_id;
+  uint8_t idx;
+  fd_face_type face;
+} fd_roi_data_type;
+
+struct fd_roi_t {
+  fd_roi_type_t type;
+  union {
+    fd_roi_header_type hdr;
+    fd_roi_data_type data;
+  } d;
+};
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_histo_mem_info_t histo_mem_info;
+    struct fd_roi_t roi;
+  } e;
+} mm_camera_info_event_t;
+
+
+typedef enum {
+  MM_CAMERA_EVT_TYPE_CH,
+  MM_CAMERA_EVT_TYPE_CTRL,
+  MM_CAMERA_EVT_TYPE_STATS,
+  MM_CAMERA_EVT_TYPE_INFO,
+  MM_CAMERA_EVT_TYPE_MAX
+} mm_camera_event_type_t;
+
+/******************************************************************************
+ * Function: exif_set_tag
+ * Description: Inserts or modifies an Exif tag to the Exif Info object. Typical
+ *              use is to call this function multiple times - to insert all the
+ *              desired Exif Tags individually to the Exif Info object and
+ *              then pass the info object to the Jpeg Encoder object so
+ *              the inserted tags would be emitted as tags in the Exif header.
+ * Input parameters:
+ *   obj       - The Exif Info object where the tag would be inserted to or
+ *               modified from.
+ *   tag_id    - The Exif Tag ID of the tag to be inserted/modified.
+ *   p_entry   - The pointer to the tag entry structure which contains the
+ *               details of tag. The pointer can be set to NULL to un-do
+ *               previous insertion for a certain tag.
+ * Return values:
+ *     JPEGERR_SUCCESS
+ *     JPEGERR_ENULLPTR
+ *     JPEGERR_EFAILED
+ * (See jpegerr.h for description of error values.)
+ * Notes: none
+ *****************************************************************************/
+int exif_set_tag(exif_info_obj_t    obj,
+                 exif_tag_id_t      tag_id,
+                 exif_tag_entry_t  *p_entry);
+
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/camera/QualcommCamera.cpp b/camera/QualcommCamera.cpp
new file mode 100644
index 0000000..3a317f1
--- /dev/null
+++ b/camera/QualcommCamera.cpp
@@ -0,0 +1,702 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+/* include QCamera Hardware Interface Header*/
+#include "QualcommCamera.h"
+#include "QualcommCameraHardware.h"
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+static hw_module_methods_t camera_module_methods = {
+    open: camera_device_open,
+};
+
+
+static hw_module_t camera_common  = {
+  tag: HARDWARE_MODULE_TAG,
+  version_major: 0,
+  version_minor: 01,
+  id: CAMERA_HARDWARE_MODULE_ID,
+  name: "Qcamera",
+  author:"Qcom",
+  methods: &camera_module_methods,
+  dso: NULL,
+  //reserved[0]:  0,
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+  common: camera_common,
+  get_number_of_cameras: get_number_of_cameras,
+  get_camera_info: get_camera_info,
+};
+
+camera_device_ops_t camera_ops = {
+  set_preview_window: android::set_preview_window,
+  set_callbacks:      android::set_callbacks,
+  enable_msg_type:    android::enable_msg_type,
+  disable_msg_type:   android::disable_msg_type,
+  msg_type_enabled:   android::msg_type_enabled,
+
+  start_preview:      android::start_preview,
+  stop_preview:       android::stop_preview,
+  preview_enabled:    android::preview_enabled,
+  store_meta_data_in_buffers: android::store_meta_data_in_buffers,
+
+  start_recording:            android::start_recording,
+  stop_recording:             android::stop_recording,
+  recording_enabled:          android::recording_enabled,
+  release_recording_frame:    android::release_recording_frame,
+
+  auto_focus:                 android::auto_focus,
+  cancel_auto_focus:          android::cancel_auto_focus,
+
+  take_picture:               android::take_picture,
+  cancel_picture:             android::cancel_picture,
+
+  set_parameters:             android::set_parameters,
+  get_parameters:             android::get_parameters,
+  put_parameters:             android::put_parameters,
+  send_command:               android::send_command,
+
+  release:                    android::release,
+  dump:                       android::dump,
+};
+
+namespace android {
+
+typedef struct {
+  QualcommCameraHardware *hardware;
+  int camera_released;
+  QCameraParameters parameters;
+  #if 1
+  camera_notify_callback notify_cb;
+  camera_data_callback data_cb;
+  camera_data_timestamp_callback data_cb_timestamp;
+  camera_request_memory get_memory;
+  void *user_data;
+  #endif
+} camera_hardware_t;
+
+typedef struct {
+  camera_memory_t mem;
+  int32_t msgType;
+  sp<IMemory> dataPtr;
+  void* user;
+  unsigned int index;
+} q_cam_memory_t;
+
+
+static void camera_release_memory(struct camera_memory *mem)
+{
+}
+
+void cam_notify_callback(int32_t msgType,
+                                int32_t ext1,
+                                int32_t ext2,
+                                void* user)
+{
+  ALOGE("Q%s: E", __func__);
+  camera_device * device = (camera_device *)user;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_notify_callback notify_cb = camHal->notify_cb;
+      void *user_data = camHal->user_data;
+      if(notify_cb) {
+        notify_cb(msgType, ext1, ext2, user_data);
+      }
+    }
+  }
+}
+
+camera_memory_t* get_mem(int fd,size_t buf_size,
+                                unsigned int num_bufs,
+                                void *user)
+{
+  ALOGE("Q%s: E", __func__);
+  camera_device * device = (camera_device *)user;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_request_memory getmem_cb = camHal->get_memory;
+      void *user_data = camHal->user_data;
+      if(getmem_cb) {
+        return getmem_cb(fd, buf_size, num_bufs, user_data);
+      }
+    }
+  }
+  return NULL;
+}
+#if 0
+void native_send_data_callback(int32_t msgType,
+                              camera_memory_t * framebuffer,
+                              void* user)
+{
+  ALOGE("Q%s: E", __func__);
+  static unsigned int counter = 0;
+#if 0
+  camera_device * device = (camera_device *)user;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_data_callback data_cb = camHal->data_cb;
+      void *user_data = camHal->user_data;
+      if(data_cb) {
+        q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+        if (qmem) {
+          qmem->dataPtr = dataPtr;
+          qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+          qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+          qmem->mem.size = dataPtr->size( );
+          qmem->mem.release = camera_release_memory;
+          qmem->msgType = msgType;
+          qmem->index = counter;
+#endif
+          data_cb(msgType, framebuffer, counter, NULL, user);
+          counter++;
+#if 0
+        } else {
+          ALOGE("%s: out of memory", __func__);
+        }
+#endif
+//      }
+//    }
+//  }
+}
+#endif
+
+static void cam_data_callback(int32_t msgType,
+                              const sp<IMemory>& dataPtr,
+                              void* user)
+{
+  ALOGE("Q%s: E", __func__);
+  static unsigned int counter = 0;
+  camera_device * device = (camera_device *)user;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_data_callback data_cb = camHal->data_cb;
+      void *user_data = camHal->user_data;
+      if(data_cb) {
+        q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+        if (qmem) {
+          qmem->dataPtr = dataPtr;
+          qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+          qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+          qmem->mem.size = dataPtr->size( );
+          qmem->mem.release = camera_release_memory;
+          qmem->msgType = msgType;
+          qmem->index = counter;
+          counter++;
+          data_cb(msgType, (camera_memory_t *)qmem, counter, NULL, user_data);
+        } else {
+          ALOGE("%s: out of memory", __func__);
+        }
+      }
+    }
+  }
+}
+
+static void cam_data_callback_timestamp(nsecs_t timestamp,
+                                        int32_t msgType,
+                                        const sp<IMemory>& dataPtr,
+                                        void* user)
+{
+  ALOGE("Q%s: E", __func__);
+
+  static unsigned int counter = 0;
+  camera_device * device = (camera_device *)user;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_data_timestamp_callback data_cb_timestamp = camHal->data_cb_timestamp;
+      void *user_data = camHal->user_data;
+      if(data_cb_timestamp) {
+        q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+        if (qmem) {
+          qmem->dataPtr = dataPtr;
+          qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+          qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+          qmem->mem.size = dataPtr->size( );
+          qmem->mem.release = camera_release_memory;
+          qmem->msgType = msgType;
+          qmem->index = counter;
+          counter++;
+          data_cb_timestamp(timestamp, msgType, (camera_memory_t *)qmem, counter, user_data);
+        } else {
+          ALOGE("%s: out of memory", __func__);
+        }
+      }
+    }
+  }
+}
+
+QualcommCameraHardware * util_get_Hal_obj( struct camera_device * device)
+{
+  QualcommCameraHardware* hardware = NULL;
+  if(device && device->priv){
+      camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+      hardware = camHal->hardware;
+  }
+  return hardware;
+}
+void close_Hal_obj( struct camera_device * device)
+{
+  ALOGI("%s: E", __func__);
+  QualcommCameraHardware* hardware = NULL;
+  if(device && device->priv){
+      camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+      ALOGI("%s: clear hw", __func__);
+      hardware = camHal->hardware;
+      delete hardware;
+  }
+  ALOGI("%s: X", __func__);
+}
+
+
+QCameraParameters* util_get_HAL_parameter( struct camera_device * device)
+{
+  QCameraParameters *param = NULL;
+  if(device && device->priv){
+      camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+      param = &(camHal->parameters);
+  }
+  return param;
+}
+
+
+extern "C" int get_number_of_cameras()
+{
+    /* try to query every time we get the call!*/
+
+    ALOGE("Q%s: E", __func__);
+    return android::HAL_getNumberOfCameras( );
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+  int rc = -1;
+  ALOGE("Q%s: E", __func__);
+  if(info) {
+    struct CameraInfo camInfo;
+    memset(&camInfo, -1, sizeof (struct CameraInfo));
+    HAL_getCameraInfo(camera_id, &camInfo);
+    if (camInfo.facing >= 0) {
+      rc = 0;
+      info->facing = camInfo.facing;
+      info->orientation = camInfo.orientation;
+    }
+  }
+   ALOGV("Q%s: X", __func__);
+   return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int  camera_device_open(
+  const struct hw_module_t* module, const char* id,
+          struct hw_device_t** hw_device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    camera_device *device = NULL;
+    if(module && id && hw_device) {
+      int cameraId = atoi(id);
+
+      if (!strcmp(module->name, camera_common.name)) {
+        device =
+          (camera_device *)malloc(sizeof (struct camera_device));
+        if(device) {
+          camera_hardware_t *camHal =
+            (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+          if(camHal) {
+            memset(camHal, 0, sizeof (camera_hardware_t));
+            camHal->hardware = HAL_openCameraHardware(cameraId);
+            if (camHal->hardware != NULL) {
+              /*To Do: populate camHal*/
+              device->common.close = close_camera_device;
+              device->ops = &camera_ops;
+              device->priv = (void *)camHal;
+              rc =  0;
+            } else {
+              free(camHal);
+              free (device);
+             device = NULL;
+            }
+          } else {
+            free (device);
+            device = NULL;
+          }
+        }
+      }
+    }
+    *hw_device = (hw_device_t*)device;
+    return rc;
+}
+
+extern "C"  int close_camera_device( hw_device_t *hw_dev)
+{
+  ALOGE("Q%s: device =%p E", __func__, hw_dev);
+  int rc =  -1;
+  camera_device_t *device = (camera_device_t *)hw_dev;
+  if(device) {
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal ) {
+      //if(!camHal->camera_released) {
+         QualcommCameraHardware* hardware = util_get_Hal_obj( device);
+         if(hardware != NULL) {
+           if(camHal->camera_released != true)
+           hardware->release( );
+           //hardware.clear( );
+
+         }
+      //}
+      close_Hal_obj(device);
+      free(device->priv);
+      device->priv = NULL;
+    }
+    free(device);
+    rc = 0;
+  }
+  return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+  ALOGE("Q%s: E window = %p", __func__, window);
+  int rc = -1;
+  QualcommCameraHardware *hardware = util_get_Hal_obj(device);
+  if(hardware != NULL) {
+   rc = hardware->set_PreviewWindow((void *)window);
+  }
+  return rc;
+}
+
+void set_callbacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    if(camHal) {
+      camera_notify_callback cam_nt_cb;
+      camera_data_callback cam_dt_cb;
+      camera_data_timestamp_callback cam_dt_timestamp_cb;
+
+      camHal->notify_cb = notify_cb;
+      camHal->data_cb = data_cb;
+      camHal->data_cb_timestamp = data_cb_timestamp;
+      camHal->user_data = user;
+      camHal->get_memory = get_memory;
+      #if 0
+      if(notify_cb) {
+        cam_nt_cb = cam_notify_callback;
+      } else {
+        cam_nt_cb = NULL;
+      }
+
+      if(data_cb) {
+        cam_dt_cb = cam_data_callback;
+      } else {
+        cam_dt_cb = NULL;
+      }
+
+      if(data_cb_timestamp) {
+        cam_dt_timestamp_cb = cam_data_callback_timestamp;
+      } else {
+        cam_dt_timestamp_cb = NULL;
+      }
+      #endif
+      ALOGE("cam_nt_cb =%p,cam_dt_cb=%p,cam_dt_timestamp_cb=%p",  cam_nt_cb, cam_dt_cb, cam_dt_timestamp_cb);
+      hardware->setCallbacks(notify_cb,data_cb,data_cb_timestamp,get_memory, user);
+    }
+  }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    hardware->enableMsgType(msg_type);
+  }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  ALOGE("Q%s: E", __func__);
+  if(hardware != NULL){
+    hardware->disableMsgType(msg_type);
+  }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->msgTypeEnabled(msg_type);
+  }
+  return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->startPreview( );
+  }
+  ALOGE("Q%s: X", __func__);
+  return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    hardware->stopPreview( );
+  }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware* hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->previewEnabled( );
+  }
+  return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->storeMetaDataInBuffers( enable);
+  }
+  return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->startRecording( );
+  }
+  return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware* hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    hardware->stopRecording( );
+  }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->recordingEnabled( );
+  }
+  return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    hardware->releaseRecordingFrame( opaque);
+  }
+}
+
+int auto_focus(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->autoFocus( );
+  }
+  return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->cancelAutoFocus( );
+  }
+  return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->takePicture( );
+  }
+  return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->cancelPicture( );
+  }
+  return rc;
+}
+
+QCameraParameters g_param;
+String8 g_str;
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL && parms){
+    // = util_get_HAL_parameter(device);
+    g_str = String8(parms);
+
+   g_param.unflatten(g_str);
+   rc = hardware->setParameters( g_param );
+  }
+  return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  char* rc = NULL;
+
+  QCameraParameters param;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    g_param = hardware->getParameters( );
+    g_str = g_param.flatten( );
+    rc = (char *)g_str.string( );
+    if (!rc) {
+      ALOGE("get_parameters: NULL string");
+    } else {
+      //ALOGE("get_parameters: %s", rc);
+    }
+  }
+  ALOGE("get_parameters X");
+  return rc;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    if(hardware != NULL){
+      //rc = hardware->putParameters(parm );
+    }
+  }
+  ALOGE("put_parameters X");
+}
+
+int send_command(struct camera_device * device,
+            int32_t cmd, int32_t arg1, int32_t arg2)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    rc = hardware->sendCommand( cmd, arg1, arg2);
+  }
+  return rc;
+}
+
+void release(struct camera_device * device)
+{
+  ALOGE("Q%s: E", __func__);
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+    hardware->release( );
+    camHal->camera_released = true;
+  }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+  ALOGE("Q%s: E", __func__);
+  int rc = -1;
+  QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+  if(hardware != NULL){
+    //rc = hardware->dump( fd );
+    rc = 0;
+  }
+  return rc;
+}
+
+}; // namespace android
diff --git a/camera/QualcommCamera.h b/camera/QualcommCamera.h
new file mode 100644
index 0000000..36d56c7
--- /dev/null
+++ b/camera/QualcommCamera.h
@@ -0,0 +1,95 @@
+/*
+** Copyright (c) 2011 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <hardware/camera.h>
+
+  int get_number_of_cameras();
+  int get_camera_info(int camera_id, struct camera_info *info);
+
+  int camera_device_open(const struct hw_module_t* module, const char* id,
+          struct hw_device_t** device);
+
+  hw_device_t * open_camera_device(int cameraId);
+
+  int close_camera_device( hw_device_t *);
+
+namespace android {
+  int set_preview_window(struct camera_device *,
+          struct preview_stream_ops *window);
+  void set_callbacks(struct camera_device *,
+          camera_notify_callback notify_cb,
+          camera_data_callback data_cb,
+          camera_data_timestamp_callback data_cb_timestamp,
+          camera_request_memory get_memory,
+          void *user);
+
+  void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+  void disable_msg_type(struct camera_device *, int32_t msg_type);
+  int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+  int start_preview(struct camera_device *);
+
+  void stop_preview(struct camera_device *);
+
+  int preview_enabled(struct camera_device *);
+  int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+  int start_recording(struct camera_device *);
+
+  void stop_recording(struct camera_device *);
+
+  int recording_enabled(struct camera_device *);
+
+  void release_recording_frame(struct camera_device *,
+                  const void *opaque);
+
+  int auto_focus(struct camera_device *);
+
+  int cancel_auto_focus(struct camera_device *);
+
+  int take_picture(struct camera_device *);
+
+  int cancel_picture(struct camera_device *);
+
+  int set_parameters(struct camera_device *, const char *parms);
+
+  char* get_parameters(struct camera_device *);
+
+  void put_parameters(struct camera_device *, char *);
+
+  int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+
+  void release(struct camera_device *);
+
+  int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/camera/QualcommCamera2.cpp b/camera/QualcommCamera2.cpp
new file mode 100644
index 0000000..45896ff
--- /dev/null
+++ b/camera/QualcommCamera2.cpp
@@ -0,0 +1,506 @@
+/* Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of Code Aurora Forum, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+/* include QCamera Hardware Interface Header*/
+#include "QualcommCamera2.h"
+//#include "QualcommCameraHardware.h"
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+static hw_module_methods_t camera_module_methods = {
+    open: camera_device_open,
+};
+
+static hw_module_t camera_common  = {
+  tag: HARDWARE_MODULE_TAG,
+  version_major: 0,
+  version_minor: 01,
+  id: CAMERA_HARDWARE_MODULE_ID,
+  name: "Qcamera",
+  author:"Qcom",
+  methods: &camera_module_methods,
+  dso: NULL,
+  //reserved[0]:  0,
+};
+camera_module_t HAL_MODULE_INFO_SYM = {
+  common: camera_common,
+  get_number_of_cameras: get_number_of_cameras,
+  get_camera_info: get_camera_info,
+};
+
+camera_device_ops_t camera_ops = {
+  set_preview_window:         android::set_preview_window,
+  set_callbacks:              android::set_CallBacks,
+  enable_msg_type:            android::enable_msg_type,
+  disable_msg_type:           android::disable_msg_type,
+  msg_type_enabled:           android::msg_type_enabled,
+
+  start_preview:              android::start_preview,
+  stop_preview:               android::stop_preview,
+  preview_enabled:            android::preview_enabled,
+  store_meta_data_in_buffers: android::store_meta_data_in_buffers,
+
+  start_recording:            android::start_recording,
+  stop_recording:             android::stop_recording,
+  recording_enabled:          android::recording_enabled,
+  release_recording_frame:    android::release_recording_frame,
+
+  auto_focus:                 android::auto_focus,
+  cancel_auto_focus:          android::cancel_auto_focus,
+
+  take_picture:               android::take_picture,
+  cancel_picture:             android::cancel_picture,
+
+  set_parameters:             android::set_parameters,
+  get_parameters:             android::get_parameters,
+  put_parameters:             android::put_parameters,
+  send_command:               android::send_command,
+
+  release:                    android::release,
+  dump:                       android::dump,
+};
+
+namespace android {
+
+typedef struct {
+  camera_device hw_dev;
+  //sp<CameraHardwareInterface> hardware;
+  QCameraHardwareInterface *hardware;
+  int camera_released;
+  int cameraId;
+  //QCameraParameters parameters;
+} camera_hardware_t;
+
+typedef struct {
+  camera_memory_t mem;
+  int32_t msgType;
+  sp<IMemory> dataPtr;
+  void* user;
+  unsigned int index;
+} q_cam_memory_t;
+
+QCameraHardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+    QCameraHardwareInterface *hardware = NULL;
+    if(device && device->priv){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware = camHal->hardware;
+    }
+    return hardware;
+}
+
+#if 0 //mzhu
+QCameraParameters* util_get_HAL_parameter( struct camera_device * device)
+{
+    QCameraParameters *param = NULL;
+    if(device && device->priv){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        param = &(camHal->parameters);
+    }
+    return param;
+}
+#endif //mzhu
+
+extern "C" int get_number_of_cameras()
+{
+    /* try to query every time we get the call!*/
+
+    ALOGE("Q%s: E", __func__);
+    return android::HAL_getNumberOfCameras( );
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+    int rc = -1;
+    ALOGE("Q%s: E", __func__);
+    if(info) {
+        struct CameraInfo camInfo;
+        memset(&camInfo, -1, sizeof (struct CameraInfo));
+        android::HAL_getCameraInfo(camera_id, &camInfo);
+        if (camInfo.facing >= 0) {
+            rc = 0;
+            info->facing = camInfo.facing;
+            info->orientation = camInfo.orientation;
+        }
+    }
+    ALOGV("Q%s: X", __func__);
+    return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int  camera_device_open(
+  const struct hw_module_t* module, const char* id,
+          struct hw_device_t** hw_device)
+{
+    int rc = -1;
+	int mode = 0; // TODO: need to add 3d/2d mode, etc
+    camera_device *device = NULL;
+    if(module && id && hw_device) {
+        int cameraId = atoi(id);
+
+        if (!strcmp(module->name, camera_common.name)) {
+            camera_hardware_t *camHal =
+                (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+            if(!camHal) {
+                *hw_device = NULL;
+				    ALOGE("%s:  end in no mem", __func__);
+				    return rc;
+		    }
+            /* we have the camera_hardware obj malloced */
+            memset(camHal, 0, sizeof (camera_hardware_t));
+            camHal->hardware = new QCameraHardwareInterface(cameraId, mode); //HAL_openCameraHardware(cameraId);
+            if (camHal->hardware && camHal->hardware->isCameraReady()) {
+				camHal->cameraId = cameraId;
+		        device = &camHal->hw_dev;
+                device->common.close = close_camera_device;
+                device->ops = &camera_ops;
+                device->priv = (void *)camHal;
+                rc =  0;
+            } else {
+                if (camHal->hardware) {
+                    delete camHal->hardware;
+                    camHal->hardware = NULL;
+                }
+                free(camHal);
+                device = NULL;
+            }
+        }
+    }
+	/* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+    *hw_device = (hw_device_t*)&device->common;
+    ALOGE("%s:  end rc %d", __func__, rc);
+    return rc;
+}
+
+extern "C"  int close_camera_device( hw_device_t *hw_dev)
+{
+    ALOGE("Q%s: device =%p E", __func__, hw_dev);
+    int rc =  -1;
+    camera_device_t *device = (camera_device_t *)hw_dev;
+
+    if(device) {
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        if(camHal ) {
+            QCameraHardwareInterface *hardware = util_get_Hal_obj( device);
+            if(!camHal->camera_released) {
+                if(hardware != NULL) {
+                    hardware->release( );
+                }
+            }
+            if(hardware != NULL)
+                delete hardware;
+            free(camHal);
+        }
+        rc = 0;
+    }
+    return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+        struct preview_stream_ops *window)
+{
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+
+    if(hardware != NULL) {
+        rc = hardware->setPreviewWindow(window);
+    }
+    return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->setCallbacks(notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+    }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->enableMsgType(msg_type);
+    }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    ALOGE("Q%s: E", __func__);
+    if(hardware != NULL){
+        hardware->disableMsgType(msg_type);
+    }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->msgTypeEnabled(msg_type);
+    }
+    return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->startPreview( );
+    }
+    ALOGE("Q%s: X", __func__);
+    return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stopPreview( );
+    }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->previewEnabled( );
+    }
+    return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      rc = hardware->storeMetaDataInBuffers(enable);
+    }
+    return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->startRecording( );
+    }
+    return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->stopRecording( );
+    }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->recordingEnabled( );
+    }
+    return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+                const void *opaque)
+{
+    ALOGV("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        hardware->releaseRecordingFrame(opaque);
+    }
+}
+
+int auto_focus(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->autoFocus( );
+    }
+    return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancelAutoFocus( );
+    }
+    return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->takePicture( );
+    }
+    return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->cancelPicture( );
+    }
+    return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL && parms){
+        //QCameraParameters param;// = util_get_HAL_parameter(device);
+        //String8 str = String8(parms);
+
+        //param.unflatten(str);
+        rc = hardware->setParameters(parms);
+        //rc = 0;
+  }
+  return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+		char *parms = NULL;
+        hardware->getParameters(&parms);
+		return parms;
+    }
+    return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+      hardware->putParameters(parm);
+    }
+}
+
+int send_command(struct camera_device * device,
+            int32_t cmd, int32_t arg1, int32_t arg2)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->sendCommand( cmd, arg1, arg2);
+    }
+    return rc;
+}
+
+void release(struct camera_device * device)
+{
+    ALOGE("Q%s: E", __func__);
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+        hardware->release( );
+        camHal->camera_released = true;
+    }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+    ALOGE("Q%s: E", __func__);
+    int rc = -1;
+    QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+    if(hardware != NULL){
+        rc = hardware->dump( fd );
+      //rc = 0;
+    }
+    return rc;
+}
+
+}; // namespace android
diff --git a/camera/QualcommCamera2.h b/camera/QualcommCamera2.h
new file mode 100644
index 0000000..81dbf93
--- /dev/null
+++ b/camera/QualcommCamera2.h
@@ -0,0 +1,108 @@
+/* Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of Code Aurora Forum, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+/*#include <hardware/camera.h>*/
+
+  int get_number_of_cameras();
+  int get_camera_info(int camera_id, struct camera_info *info);
+
+  int camera_device_open(const struct hw_module_t* module, const char* id,
+          struct hw_device_t** device);
+
+  hw_device_t * open_camera_device(int cameraId);
+
+  int close_camera_device( hw_device_t *);
+
+namespace android {
+  int set_preview_window(struct camera_device *,
+          struct preview_stream_ops *window);
+  void set_CallBacks(struct camera_device *,
+          camera_notify_callback notify_cb,
+          camera_data_callback data_cb,
+          camera_data_timestamp_callback data_cb_timestamp,
+          camera_request_memory get_memory,
+          void *user);
+
+  void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+  void disable_msg_type(struct camera_device *, int32_t msg_type);
+  int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+  int start_preview(struct camera_device *);
+
+  void stop_preview(struct camera_device *);
+
+  int preview_enabled(struct camera_device *);
+  int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+  int start_recording(struct camera_device *);
+
+  void stop_recording(struct camera_device *);
+
+  int recording_enabled(struct camera_device *);
+
+  void release_recording_frame(struct camera_device *,
+                  const void *opaque);
+
+  int auto_focus(struct camera_device *);
+
+  int cancel_auto_focus(struct camera_device *);
+
+  int take_picture(struct camera_device *);
+
+  int cancel_picture(struct camera_device *);
+
+  int set_parameters(struct camera_device *, const char *parms);
+
+  char* get_parameters(struct camera_device *);
+
+  void put_parameters(struct camera_device *, char *);
+
+  int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+
+  void release(struct camera_device *);
+
+  int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/camera/QualcommCameraHardware.cpp b/camera/QualcommCameraHardware.cpp
new file mode 100644
index 0000000..80775b4
--- /dev/null
+++ b/camera/QualcommCameraHardware.cpp
@@ -0,0 +1,10025 @@
+
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2011-2012 Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define ALOG_TAG "QualcommCameraHardware"
+#include <utils/Log.h>
+#include "QualcommCameraHardware.h"
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+
+#include <binder/MemoryHeapPmem.h>
+#if 0
+#include <binder/MemoryHeapIon.h>
+#endif
+#include <camera/Camera.h>
+#include <hardware/camera.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#define LIKELY(exp)   __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+#define CAMERA_HAL_UNUSED(expr) do { (void)(expr); } while (0)
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+
+
+#include <camera.h>
+#include <cam_fifo.h>
+#include <liveshot.h>
+#include <jpege.h>
+#include <jpeg_encoder.h>
+
+#define DUMP_LIVESHOT_JPEG_FILE 0
+
+#define DEFAULT_PICTURE_WIDTH  640
+#define DEFAULT_PICTURE_HEIGHT 480
+#define DEFAULT_PICTURE_WIDTH_3D 1920
+#define DEFAULT_PICTURE_HEIGHT_3D 1080
+#define INITIAL_PREVIEW_HEIGHT 144
+#define INITIAL_PREVIEW_WIDTH 176
+
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define MAX_ZOOM_LEVEL 5
+#define NOT_FOUND -1
+// Number of video buffers held by kernal (initially 1,2 &3)
+#define ACTIVE_VIDEO_BUFFERS 3
+#define ACTIVE_PREVIEW_BUFFERS 3
+#define ACTIVE_ZSL_BUFFERS 3
+#define APP_ORIENTATION 90
+#define HDR_HAL_FRAME 2
+
+#define FLASH_AUTO 24
+#define FLASH_SNAP 32
+
+#define DUMMY_CAMERA_STARTED 1;
+#define DUMMY_CAMERA_STOPPED 0;
+#define FLOOR16(X) ((X) & 0xFFF0)
+#if DLOPEN_LIBMMCAMERA
+#include <dlfcn.h>
+
+
+// Conversion routines from YV420sp to YV12 format
+int (*LINK_yuv_convert_ycrcb420sp_to_yv12_inplace) (yuv_image_type* yuvStructPtr);
+int (*LINK_yuv_convert_ycrcb420sp_to_yv12) (yuv_image_type* yuvStructPtrin, yuv_image_type* yuvStructPtrout);
+#define NUM_YV12_FRAMES 1
+#define FOCUS_AREA_INIT "(-1000,-1000,1000,1000,1000)"
+
+void *libmmcamera;
+void* (*LINK_cam_conf)(void *data);
+void* (*LINK_cam_frame)(void *data);
+void* (*LINK_wait_cam_frame_thread_ready)(void);
+void* (*LINK_cam_frame_set_exit_flag)(int flag);
+bool  (*LINK_jpeg_encoder_init)();
+void  (*LINK_jpeg_encoder_join)();
+bool  (*LINK_jpeg_encoder_encode)(const cam_ctrl_dimension_t *dimen,
+                const uint8_t *thumbnailbuf, int thumbnailfd,
+                const uint8_t *snapshotbuf, int snapshotfd,
+                common_crop_t *scaling_parms, exif_tags_info_t *exif_data,
+                int exif_table_numEntries, int jpegPadding, const int32_t cbcroffset,int zsl_enable);
+void (*LINK_camframe_terminate)(void);
+//for 720p
+// Function pointer , called by camframe when a video frame is available.
+void (**LINK_camframe_video_callback)(struct msm_frame * frame);
+// Function to add a frame to free Q
+void (*LINK_camframe_add_frame)(cam_frame_type_t type,struct msm_frame *frame);
+
+void (*LINK_camframe_release_all_frames)(cam_frame_type_t type);
+
+int8_t (*LINK_jpeg_encoder_setMainImageQuality)(uint32_t quality);
+int8_t (*LINK_jpeg_encoder_setThumbnailQuality)(uint32_t quality);
+int8_t (*LINK_jpeg_encoder_setRotation)(uint32_t rotation);
+int8_t (*LINK_jpeg_encoder_get_buffer_offset)(uint32_t width, uint32_t height,
+                                               uint32_t* p_y_offset,
+                                                uint32_t* p_cbcr_offset,
+                                                 uint32_t* p_buf_size);
+int8_t (*LINK_jpeg_encoder_setLocation)(const camera_position_type *location);
+void (*LINK_jpeg_encoder_set_3D_info)(cam_3d_frame_format_t format);
+const struct camera_size_type *(*LINK_default_sensor_get_snapshot_sizes)(int *len);
+int (*LINK_launch_cam_conf_thread)(void);
+int (*LINK_release_cam_conf_thread)(void);
+mm_camera_status_t (*LINK_mm_camera_init)(mm_camera_config *, mm_camera_notify*, mm_camera_ops*,uint8_t);
+mm_camera_status_t (*LINK_mm_camera_deinit)();
+mm_camera_status_t (*LINK_mm_camera_destroy)();
+mm_camera_status_t (*LINK_mm_camera_exec)();
+mm_camera_status_t (*LINK_mm_camera_get_camera_info) (camera_info_t* p_cam_info, int* p_num_cameras);
+
+int8_t (*LINK_zoom_crop_upscale)(uint32_t width, uint32_t height,
+    uint32_t cropped_width, uint32_t cropped_height, uint8_t *img_buf);
+
+// callbacks
+void  (**LINK_mmcamera_shutter_callback)(common_crop_t *crop);
+void  (**LINK_cancel_liveshot)(void);
+int8_t  (*LINK_set_liveshot_params)(uint32_t a_width, uint32_t a_height, exif_tags_info_t *a_exif_data,
+                         int a_exif_numEntries, uint8_t* a_out_buffer, uint32_t a_outbuffer_size);
+void (*LINK_set_liveshot_frame)(struct msm_frame *liveshot_frame);
+#else
+#define LINK_cam_conf cam_conf
+#define LINK_cam_frame cam_frame
+#define LINK_wait_cam_frame_thread_ready wait_cam_frame_thread_ready
+#define LINK_cam_frame cam_frame_set_exit_flag
+#define LINK_jpeg_encoder_init jpeg_encoder_init
+#define LINK_jpeg_encoder_join jpeg_encoder_join
+#define LINK_jpeg_encoder_encode jpeg_encoder_encode
+#define LINK_camframe_terminate camframe_terminate
+#define LINK_jpeg_encoder_setMainImageQuality jpeg_encoder_setMainImageQuality
+#define LINK_jpeg_encoder_setThumbnailQuality jpeg_encoder_setThumbnailQuality
+#define LINK_jpeg_encoder_setRotation jpeg_encoder_setRotation
+#define LINK_jpeg_encoder_get_buffer_offset jpeg_encoder_get_buffer_offset
+#define LINK_jpeg_encoder_setLocation jpeg_encoder_setLocation
+#define LINK_jpeg_encoder_set_3D_info jpeg_encoder_set_3D_info
+#define LINK_default_sensor_get_snapshot_sizes default_sensor_get_snapshot_sizes
+#define LINK_launch_cam_conf_thread launch_cam_conf_thread
+#define LINK_release_cam_conf_thread release_cam_conf_thread
+#define LINK_zoom_crop_upscale zoom_crop_upscale
+#define LINK_mm_camera_init mm_camera_config_init
+#define LINK_mm_camera_deinit mm_camera_config_deinit
+#define LINK_mm_camera_destroy mm_camera_config_destroy
+#define LINK_mm_camera_exec mm_camera_exec
+#define LINK_camframe_add_frame camframe_add_frame
+#define LINK_camframe_release_all_frames camframe_release_all_frames
+#define LINK_mm_camera_get_camera_info mm_camera_get_camera_info
+
+extern void (*mmcamera_camframe_callback)(struct msm_frame *frame);
+extern void (*mmcamera_camstats_callback)(camstats_type stype, camera_preview_histogram_info* histinfo);
+extern void (*mmcamera_jpegfragment_callback)(uint8_t *buff_ptr,
+                                      uint32_t buff_size);
+extern void (*mmcamera_jpeg_callback)(jpeg_event_t status);
+extern void (*mmcamera_shutter_callback)(common_crop_t *crop);
+extern void (*mmcamera_liveshot_callback)(liveshot_status status, uint32_t jpeg_size);
+#define LINK_set_liveshot_params set_liveshot_params
+#define LINK_set_liveshot_frame set_liveshot_frame
+#endif
+
+} // extern "C"
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+    int width;
+    int height;
+};
+#endif
+#if 0
+typedef struct crop_info_struct {
+    int32_t x;
+    int32_t y;
+    int32_t w;
+    int32_t h;
+} zoom_crop_info;
+#endif
+union zoomimage
+{
+    char d[sizeof(struct mdp_blit_req_list) + sizeof(struct mdp_blit_req) * 1];
+    struct mdp_blit_req_list list;
+} zoomImage;
+
+//Default to VGA
+#define DEFAULT_PREVIEW_WIDTH 640
+#define DEFAULT_PREVIEW_HEIGHT 480
+#define DEFAULT_PREVIEW_WIDTH_3D 1280
+#define DEFAULT_PREVIEW_HEIGHT_3D 720
+
+//Default FPS
+#define MINIMUM_FPS 5
+#define MAXIMUM_FPS 31
+#define DEFAULT_FPS MAXIMUM_FPS
+#define DEFAULT_FIXED_FPS_VALUE 30
+/*
+ * Modifying preview size requires modification
+ * in bitmasks for boardproperties
+ */
+static uint32_t  PREVIEW_SIZE_COUNT;
+static uint32_t  HFR_SIZE_COUNT;
+
+board_property boardProperties[] = {
+        {TARGET_MSM7625, 0x00000fff, false, false, false},
+        {TARGET_MSM7625A, 0x00000fff, false, false, false},
+        {TARGET_MSM7627, 0x000006ff, false, false, false},
+        {TARGET_MSM7627A, 0x000006ff, false, false, false},
+        {TARGET_MSM7630, 0x00000fff, true, true, false},
+        {TARGET_MSM8660, 0x00001fff, true, true, false},
+        {TARGET_QSD8250, 0x00000fff, false, false, false}
+};
+
+//static const camera_size_type* picture_sizes;
+//static int PICTURE_SIZE_COUNT;
+/*       TODO
+ * Ideally this should be a populated by lower layers.
+ * But currently this is no API to do that at lower layer.
+ * Hence populating with default sizes for now. This needs
+ * to be changed once the API is supported.
+ */
+//sorted on column basis
+static struct camera_size_type zsl_picture_sizes[] = {
+  { 1024, 768}, // 1MP XGA
+  { 800, 600}, //SVGA
+  { 800, 480}, // WVGA
+  { 640, 480}, // VGA
+  { 352, 288}, //CIF
+  { 320, 240}, // QVGA
+  { 176, 144} // QCIF
+};
+
+static struct camera_size_type for_3D_picture_sizes[] = {
+  { 1920, 1080},
+};
+
+static int data_counter = 0;
+static int sensor_rotation = 0;
+static int record_flag = 0;
+static camera_size_type* picture_sizes;
+static camera_size_type* preview_sizes;
+static camera_size_type* hfr_sizes;
+static unsigned int PICTURE_SIZE_COUNT;
+static const camera_size_type * picture_sizes_ptr;
+static int supportedPictureSizesCount;
+static liveshotState liveshot_state = LIVESHOT_DONE;
+
+#ifdef Q12
+#undef Q12
+#endif
+
+#define Q12 4096
+
+static const target_map targetList [] = {
+    { "msm7625", TARGET_MSM7625 },
+    { "msm7625a", TARGET_MSM7625A },
+    { "msm7627", TARGET_MSM7627 },
+    { "msm7627a", TARGET_MSM7627A },
+    { "qsd8250", TARGET_QSD8250 },
+    { "msm7630", TARGET_MSM7630 },
+    { "msm8660", TARGET_MSM8660 }
+
+};
+static targetType mCurrentTarget = TARGET_MAX;
+
+typedef struct {
+    uint32_t aspect_ratio;
+    uint32_t width;
+    uint32_t height;
+} thumbnail_size_type;
+
+static thumbnail_size_type thumbnail_sizes[] = {
+    { 7281, 512, 288 }, //1.777778
+    { 6826, 480, 288 }, //1.666667
+    { 6808, 256, 154 }, //1.662337
+    { 6144, 432, 288 }, //1.5
+    { 5461, 512, 384 }, //1.333333
+    { 5006, 352, 288 }, //1.222222
+};
+#define THUMBNAIL_SIZE_COUNT (sizeof(thumbnail_sizes)/sizeof(thumbnail_size_type))
+#define DEFAULT_THUMBNAIL_SETTING 4
+#define THUMBNAIL_WIDTH_STR "512"
+#define THUMBNAIL_HEIGHT_STR "384"
+#define THUMBNAIL_SMALL_HEIGHT 144
+static camera_size_type jpeg_thumbnail_sizes[]  = {
+    { 512, 288 },
+    { 480, 288 },
+    { 432, 288 },
+    { 512, 384 },
+    { 352, 288 },
+    {0,0}
+};
+//supported preview fps ranges should be added to this array in the form (minFps,maxFps)
+static  android::FPSRange FpsRangesSupported[] = {{MINIMUM_FPS*1000,MAXIMUM_FPS*1000}};
+
+#define FPS_RANGES_SUPPORTED_COUNT (sizeof(FpsRangesSupported)/sizeof(FpsRangesSupported[0]))
+
+#define JPEG_THUMBNAIL_SIZE_COUNT (sizeof(jpeg_thumbnail_sizes)/sizeof(camera_size_type))
+static int attr_lookup(const str_map arr[], int len, const char *name)
+{
+    if (name) {
+        for (int i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name))
+                return arr[i].val;
+        }
+    }
+    return NOT_FOUND;
+}
+
+// round to the next power of two
+static inline unsigned clp2(unsigned x)
+{
+    x = x - 1;
+    x = x | (x >> 1);
+    x = x | (x >> 2);
+    x = x | (x >> 4);
+    x = x | (x >> 8);
+    x = x | (x >>16);
+    return x + 1;
+}
+
+static int exif_table_numEntries = 0;
+#define MAX_EXIF_TABLE_ENTRIES 14
+exif_tags_info_t exif_data[MAX_EXIF_TABLE_ENTRIES];
+//static zoom_crop_info zoomCropInfo;
+static android_native_rect_t zoomCropInfo;
+static void *mLastQueuedFrame = NULL;
+#define RECORD_BUFFERS 9
+#define RECORD_BUFFERS_8x50 8
+static int kRecordBufferCount;
+/* controls whether VPE is avialable for the target
+ * under consideration.
+ * 1: VPE support is available
+ * 0: VPE support is not available (default)
+ */
+static bool mVpeEnabled;
+static cam_frame_start_parms camframeParams;
+
+static int HAL_numOfCameras;
+static camera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+static int HAL_currentCameraId;
+static int HAL_currentCameraMode;
+static mm_camera_config mCfgControl;
+static bool mCameraOpen;
+
+static int HAL_currentSnapshotMode;
+static int previewWidthToNativeZoom;
+static int previewHeightToNativeZoom;
+#define CAMERA_SNAPSHOT_NONZSL 0x04
+#define CAMERA_SNAPSHOT_ZSL 0x08
+
+namespace android {
+	extern void native_send_data_callback(int32_t msgType,
+                              camera_memory_t * framebuffer,
+	                              void* user);
+
+	extern camera_memory_t* get_mem(int fd,size_t buf_size,
+	                                unsigned int num_bufs,
+	                                void *user);
+
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+// from aeecamera.h
+static const str_map whitebalance[] = {
+    { QCameraParameters::WHITE_BALANCE_AUTO,            CAMERA_WB_AUTO },
+    { QCameraParameters::WHITE_BALANCE_INCANDESCENT,    CAMERA_WB_INCANDESCENT },
+    { QCameraParameters::WHITE_BALANCE_FLUORESCENT,     CAMERA_WB_FLUORESCENT },
+    { QCameraParameters::WHITE_BALANCE_DAYLIGHT,        CAMERA_WB_DAYLIGHT },
+    { QCameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, CAMERA_WB_CLOUDY_DAYLIGHT }
+};
+
+// from camera_effect_t. This list must match aeecamera.h
+static const str_map effects[] = {
+    { QCameraParameters::EFFECT_NONE,       CAMERA_EFFECT_OFF },
+    { QCameraParameters::EFFECT_MONO,       CAMERA_EFFECT_MONO },
+    { QCameraParameters::EFFECT_NEGATIVE,   CAMERA_EFFECT_NEGATIVE },
+    { QCameraParameters::EFFECT_SOLARIZE,   CAMERA_EFFECT_SOLARIZE },
+    { QCameraParameters::EFFECT_SEPIA,      CAMERA_EFFECT_SEPIA },
+    { QCameraParameters::EFFECT_POSTERIZE,  CAMERA_EFFECT_POSTERIZE },
+    { QCameraParameters::EFFECT_WHITEBOARD, CAMERA_EFFECT_WHITEBOARD },
+    { QCameraParameters::EFFECT_BLACKBOARD, CAMERA_EFFECT_BLACKBOARD },
+    { QCameraParameters::EFFECT_AQUA,       CAMERA_EFFECT_AQUA }
+};
+
+// from qcamera/common/camera.h
+static const str_map autoexposure[] = {
+    { QCameraParameters::AUTO_EXPOSURE_FRAME_AVG,  CAMERA_AEC_FRAME_AVERAGE },
+    { QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED, CAMERA_AEC_CENTER_WEIGHTED },
+    { QCameraParameters::AUTO_EXPOSURE_SPOT_METERING, CAMERA_AEC_SPOT_METERING }
+};
+
+// from qcamera/common/camera.h
+static const str_map antibanding[] = {
+    { QCameraParameters::ANTIBANDING_OFF,  CAMERA_ANTIBANDING_OFF },
+    { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+    { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ },
+    { QCameraParameters::ANTIBANDING_AUTO, CAMERA_ANTIBANDING_AUTO }
+};
+
+static const str_map antibanding_3D[] = {
+    { QCameraParameters::ANTIBANDING_OFF,  CAMERA_ANTIBANDING_OFF },
+    { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+    { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ }
+};
+
+/* Mapping from MCC to antibanding type */
+struct country_map {
+    uint32_t country_code;
+    camera_antibanding_type type;
+};
+
+#if 0 //not using this function. keeping this as this came from Google.
+static struct country_map country_numeric[] = {
+    { 202, CAMERA_ANTIBANDING_50HZ }, // Greece
+    { 204, CAMERA_ANTIBANDING_50HZ }, // Netherlands
+    { 206, CAMERA_ANTIBANDING_50HZ }, // Belgium
+    { 208, CAMERA_ANTIBANDING_50HZ }, // France
+    { 212, CAMERA_ANTIBANDING_50HZ }, // Monaco
+    { 213, CAMERA_ANTIBANDING_50HZ }, // Andorra
+    { 214, CAMERA_ANTIBANDING_50HZ }, // Spain
+    { 216, CAMERA_ANTIBANDING_50HZ }, // Hungary
+    { 219, CAMERA_ANTIBANDING_50HZ }, // Croatia
+    { 220, CAMERA_ANTIBANDING_50HZ }, // Serbia
+    { 222, CAMERA_ANTIBANDING_50HZ }, // Italy
+    { 226, CAMERA_ANTIBANDING_50HZ }, // Romania
+    { 228, CAMERA_ANTIBANDING_50HZ }, // Switzerland
+    { 230, CAMERA_ANTIBANDING_50HZ }, // Czech Republic
+    { 231, CAMERA_ANTIBANDING_50HZ }, // Slovakia
+    { 232, CAMERA_ANTIBANDING_50HZ }, // Austria
+    { 234, CAMERA_ANTIBANDING_50HZ }, // United Kingdom
+    { 235, CAMERA_ANTIBANDING_50HZ }, // United Kingdom
+    { 238, CAMERA_ANTIBANDING_50HZ }, // Denmark
+    { 240, CAMERA_ANTIBANDING_50HZ }, // Sweden
+    { 242, CAMERA_ANTIBANDING_50HZ }, // Norway
+    { 244, CAMERA_ANTIBANDING_50HZ }, // Finland
+    { 246, CAMERA_ANTIBANDING_50HZ }, // Lithuania
+    { 247, CAMERA_ANTIBANDING_50HZ }, // Latvia
+    { 248, CAMERA_ANTIBANDING_50HZ }, // Estonia
+    { 250, CAMERA_ANTIBANDING_50HZ }, // Russian Federation
+    { 255, CAMERA_ANTIBANDING_50HZ }, // Ukraine
+    { 257, CAMERA_ANTIBANDING_50HZ }, // Belarus
+    { 259, CAMERA_ANTIBANDING_50HZ }, // Moldova
+    { 260, CAMERA_ANTIBANDING_50HZ }, // Poland
+    { 262, CAMERA_ANTIBANDING_50HZ }, // Germany
+    { 266, CAMERA_ANTIBANDING_50HZ }, // Gibraltar
+    { 268, CAMERA_ANTIBANDING_50HZ }, // Portugal
+    { 270, CAMERA_ANTIBANDING_50HZ }, // Luxembourg
+    { 272, CAMERA_ANTIBANDING_50HZ }, // Ireland
+    { 274, CAMERA_ANTIBANDING_50HZ }, // Iceland
+    { 276, CAMERA_ANTIBANDING_50HZ }, // Albania
+    { 278, CAMERA_ANTIBANDING_50HZ }, // Malta
+    { 280, CAMERA_ANTIBANDING_50HZ }, // Cyprus
+    { 282, CAMERA_ANTIBANDING_50HZ }, // Georgia
+    { 283, CAMERA_ANTIBANDING_50HZ }, // Armenia
+    { 284, CAMERA_ANTIBANDING_50HZ }, // Bulgaria
+    { 286, CAMERA_ANTIBANDING_50HZ }, // Turkey
+    { 288, CAMERA_ANTIBANDING_50HZ }, // Faroe Islands
+    { 290, CAMERA_ANTIBANDING_50HZ }, // Greenland
+    { 293, CAMERA_ANTIBANDING_50HZ }, // Slovenia
+    { 294, CAMERA_ANTIBANDING_50HZ }, // Macedonia
+    { 295, CAMERA_ANTIBANDING_50HZ }, // Liechtenstein
+    { 297, CAMERA_ANTIBANDING_50HZ }, // Montenegro
+    { 302, CAMERA_ANTIBANDING_60HZ }, // Canada
+    { 310, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 311, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 312, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 313, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 314, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 315, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 316, CAMERA_ANTIBANDING_60HZ }, // United States of America
+    { 330, CAMERA_ANTIBANDING_60HZ }, // Puerto Rico
+    { 334, CAMERA_ANTIBANDING_60HZ }, // Mexico
+    { 338, CAMERA_ANTIBANDING_50HZ }, // Jamaica
+    { 340, CAMERA_ANTIBANDING_50HZ }, // Martinique
+    { 342, CAMERA_ANTIBANDING_50HZ }, // Barbados
+    { 346, CAMERA_ANTIBANDING_60HZ }, // Cayman Islands
+    { 350, CAMERA_ANTIBANDING_60HZ }, // Bermuda
+    { 352, CAMERA_ANTIBANDING_50HZ }, // Grenada
+    { 354, CAMERA_ANTIBANDING_60HZ }, // Montserrat
+    { 362, CAMERA_ANTIBANDING_50HZ }, // Netherlands Antilles
+    { 363, CAMERA_ANTIBANDING_60HZ }, // Aruba
+    { 364, CAMERA_ANTIBANDING_60HZ }, // Bahamas
+    { 365, CAMERA_ANTIBANDING_60HZ }, // Anguilla
+    { 366, CAMERA_ANTIBANDING_50HZ }, // Dominica
+    { 368, CAMERA_ANTIBANDING_60HZ }, // Cuba
+    { 370, CAMERA_ANTIBANDING_60HZ }, // Dominican Republic
+    { 372, CAMERA_ANTIBANDING_60HZ }, // Haiti
+    { 401, CAMERA_ANTIBANDING_50HZ }, // Kazakhstan
+    { 402, CAMERA_ANTIBANDING_50HZ }, // Bhutan
+    { 404, CAMERA_ANTIBANDING_50HZ }, // India
+    { 405, CAMERA_ANTIBANDING_50HZ }, // India
+    { 410, CAMERA_ANTIBANDING_50HZ }, // Pakistan
+    { 413, CAMERA_ANTIBANDING_50HZ }, // Sri Lanka
+    { 414, CAMERA_ANTIBANDING_50HZ }, // Myanmar
+    { 415, CAMERA_ANTIBANDING_50HZ }, // Lebanon
+    { 416, CAMERA_ANTIBANDING_50HZ }, // Jordan
+    { 417, CAMERA_ANTIBANDING_50HZ }, // Syria
+    { 418, CAMERA_ANTIBANDING_50HZ }, // Iraq
+    { 419, CAMERA_ANTIBANDING_50HZ }, // Kuwait
+    { 420, CAMERA_ANTIBANDING_60HZ }, // Saudi Arabia
+    { 421, CAMERA_ANTIBANDING_50HZ }, // Yemen
+    { 422, CAMERA_ANTIBANDING_50HZ }, // Oman
+    { 424, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+    { 425, CAMERA_ANTIBANDING_50HZ }, // Israel
+    { 426, CAMERA_ANTIBANDING_50HZ }, // Bahrain
+    { 427, CAMERA_ANTIBANDING_50HZ }, // Qatar
+    { 428, CAMERA_ANTIBANDING_50HZ }, // Mongolia
+    { 429, CAMERA_ANTIBANDING_50HZ }, // Nepal
+    { 430, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+    { 431, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+    { 432, CAMERA_ANTIBANDING_50HZ }, // Iran
+    { 434, CAMERA_ANTIBANDING_50HZ }, // Uzbekistan
+    { 436, CAMERA_ANTIBANDING_50HZ }, // Tajikistan
+    { 437, CAMERA_ANTIBANDING_50HZ }, // Kyrgyz Rep
+    { 438, CAMERA_ANTIBANDING_50HZ }, // Turkmenistan
+    { 440, CAMERA_ANTIBANDING_60HZ }, // Japan
+    { 441, CAMERA_ANTIBANDING_60HZ }, // Japan
+    { 452, CAMERA_ANTIBANDING_50HZ }, // Vietnam
+    { 454, CAMERA_ANTIBANDING_50HZ }, // Hong Kong
+    { 455, CAMERA_ANTIBANDING_50HZ }, // Macao
+    { 456, CAMERA_ANTIBANDING_50HZ }, // Cambodia
+    { 457, CAMERA_ANTIBANDING_50HZ }, // Laos
+    { 460, CAMERA_ANTIBANDING_50HZ }, // China
+    { 466, CAMERA_ANTIBANDING_60HZ }, // Taiwan
+    { 470, CAMERA_ANTIBANDING_50HZ }, // Bangladesh
+    { 472, CAMERA_ANTIBANDING_50HZ }, // Maldives
+    { 502, CAMERA_ANTIBANDING_50HZ }, // Malaysia
+    { 505, CAMERA_ANTIBANDING_50HZ }, // Australia
+    { 510, CAMERA_ANTIBANDING_50HZ }, // Indonesia
+    { 514, CAMERA_ANTIBANDING_50HZ }, // East Timor
+    { 515, CAMERA_ANTIBANDING_60HZ }, // Philippines
+    { 520, CAMERA_ANTIBANDING_50HZ }, // Thailand
+    { 525, CAMERA_ANTIBANDING_50HZ }, // Singapore
+    { 530, CAMERA_ANTIBANDING_50HZ }, // New Zealand
+    { 535, CAMERA_ANTIBANDING_60HZ }, // Guam
+    { 536, CAMERA_ANTIBANDING_50HZ }, // Nauru
+    { 537, CAMERA_ANTIBANDING_50HZ }, // Papua New Guinea
+    { 539, CAMERA_ANTIBANDING_50HZ }, // Tonga
+    { 541, CAMERA_ANTIBANDING_50HZ }, // Vanuatu
+    { 542, CAMERA_ANTIBANDING_50HZ }, // Fiji
+    { 544, CAMERA_ANTIBANDING_60HZ }, // American Samoa
+    { 545, CAMERA_ANTIBANDING_50HZ }, // Kiribati
+    { 546, CAMERA_ANTIBANDING_50HZ }, // New Caledonia
+    { 548, CAMERA_ANTIBANDING_50HZ }, // Cook Islands
+    { 602, CAMERA_ANTIBANDING_50HZ }, // Egypt
+    { 603, CAMERA_ANTIBANDING_50HZ }, // Algeria
+    { 604, CAMERA_ANTIBANDING_50HZ }, // Morocco
+    { 605, CAMERA_ANTIBANDING_50HZ }, // Tunisia
+    { 606, CAMERA_ANTIBANDING_50HZ }, // Libya
+    { 607, CAMERA_ANTIBANDING_50HZ }, // Gambia
+    { 608, CAMERA_ANTIBANDING_50HZ }, // Senegal
+    { 609, CAMERA_ANTIBANDING_50HZ }, // Mauritania
+    { 610, CAMERA_ANTIBANDING_50HZ }, // Mali
+    { 611, CAMERA_ANTIBANDING_50HZ }, // Guinea
+    { 613, CAMERA_ANTIBANDING_50HZ }, // Burkina Faso
+    { 614, CAMERA_ANTIBANDING_50HZ }, // Niger
+    { 616, CAMERA_ANTIBANDING_50HZ }, // Benin
+    { 617, CAMERA_ANTIBANDING_50HZ }, // Mauritius
+    { 618, CAMERA_ANTIBANDING_50HZ }, // Liberia
+    { 619, CAMERA_ANTIBANDING_50HZ }, // Sierra Leone
+    { 620, CAMERA_ANTIBANDING_50HZ }, // Ghana
+    { 621, CAMERA_ANTIBANDING_50HZ }, // Nigeria
+    { 622, CAMERA_ANTIBANDING_50HZ }, // Chad
+    { 623, CAMERA_ANTIBANDING_50HZ }, // Central African Republic
+    { 624, CAMERA_ANTIBANDING_50HZ }, // Cameroon
+    { 625, CAMERA_ANTIBANDING_50HZ }, // Cape Verde
+    { 627, CAMERA_ANTIBANDING_50HZ }, // Equatorial Guinea
+    { 631, CAMERA_ANTIBANDING_50HZ }, // Angola
+    { 633, CAMERA_ANTIBANDING_50HZ }, // Seychelles
+    { 634, CAMERA_ANTIBANDING_50HZ }, // Sudan
+    { 636, CAMERA_ANTIBANDING_50HZ }, // Ethiopia
+    { 637, CAMERA_ANTIBANDING_50HZ }, // Somalia
+    { 638, CAMERA_ANTIBANDING_50HZ }, // Djibouti
+    { 639, CAMERA_ANTIBANDING_50HZ }, // Kenya
+    { 640, CAMERA_ANTIBANDING_50HZ }, // Tanzania
+    { 641, CAMERA_ANTIBANDING_50HZ }, // Uganda
+    { 642, CAMERA_ANTIBANDING_50HZ }, // Burundi
+    { 643, CAMERA_ANTIBANDING_50HZ }, // Mozambique
+    { 645, CAMERA_ANTIBANDING_50HZ }, // Zambia
+    { 646, CAMERA_ANTIBANDING_50HZ }, // Madagascar
+    { 647, CAMERA_ANTIBANDING_50HZ }, // France
+    { 648, CAMERA_ANTIBANDING_50HZ }, // Zimbabwe
+    { 649, CAMERA_ANTIBANDING_50HZ }, // Namibia
+    { 650, CAMERA_ANTIBANDING_50HZ }, // Malawi
+    { 651, CAMERA_ANTIBANDING_50HZ }, // Lesotho
+    { 652, CAMERA_ANTIBANDING_50HZ }, // Botswana
+    { 653, CAMERA_ANTIBANDING_50HZ }, // Swaziland
+    { 654, CAMERA_ANTIBANDING_50HZ }, // Comoros
+    { 655, CAMERA_ANTIBANDING_50HZ }, // South Africa
+    { 657, CAMERA_ANTIBANDING_50HZ }, // Eritrea
+    { 702, CAMERA_ANTIBANDING_60HZ }, // Belize
+    { 704, CAMERA_ANTIBANDING_60HZ }, // Guatemala
+    { 706, CAMERA_ANTIBANDING_60HZ }, // El Salvador
+    { 708, CAMERA_ANTIBANDING_60HZ }, // Honduras
+    { 710, CAMERA_ANTIBANDING_60HZ }, // Nicaragua
+    { 712, CAMERA_ANTIBANDING_60HZ }, // Costa Rica
+    { 714, CAMERA_ANTIBANDING_60HZ }, // Panama
+    { 722, CAMERA_ANTIBANDING_50HZ }, // Argentina
+    { 724, CAMERA_ANTIBANDING_60HZ }, // Brazil
+    { 730, CAMERA_ANTIBANDING_50HZ }, // Chile
+    { 732, CAMERA_ANTIBANDING_60HZ }, // Colombia
+    { 734, CAMERA_ANTIBANDING_60HZ }, // Venezuela
+    { 736, CAMERA_ANTIBANDING_50HZ }, // Bolivia
+    { 738, CAMERA_ANTIBANDING_60HZ }, // Guyana
+    { 740, CAMERA_ANTIBANDING_60HZ }, // Ecuador
+    { 742, CAMERA_ANTIBANDING_50HZ }, // French Guiana
+    { 744, CAMERA_ANTIBANDING_50HZ }, // Paraguay
+    { 746, CAMERA_ANTIBANDING_60HZ }, // Suriname
+    { 748, CAMERA_ANTIBANDING_50HZ }, // Uruguay
+    { 750, CAMERA_ANTIBANDING_50HZ }, // Falkland Islands
+};
+#define country_number (sizeof(country_numeric) / sizeof(country_map))
+/* Look up pre-sorted antibanding_type table by current MCC. */
+static camera_antibanding_type camera_get_location(void) {
+    char value[PROP_VALUE_MAX];
+    char country_value[PROP_VALUE_MAX];
+    uint32_t country_code;
+    memset(value, 0x00, sizeof(value));
+    memset(country_value, 0x00, sizeof(country_value));
+    if (!__system_property_get("gsm.operator.numeric", value)) {
+        return CAMERA_ANTIBANDING_60HZ;
+    }
+    memcpy(country_value, value, 3);
+    country_code = atoi(country_value);
+    ALOGD("value:%s, country value:%s, country code:%d\n",
+            value, country_value, country_code);
+    int left = 0;
+    int right = country_number - 1;
+    while (left <= right) {
+        int index = (left + right) >> 1;
+        if (country_numeric[index].country_code == country_code)
+            return country_numeric[index].type;
+        else if (country_numeric[index].country_code > country_code)
+            right = index - 1;
+        else
+            left = index + 1;
+    }
+    return CAMERA_ANTIBANDING_60HZ;
+}
+#endif
+
+static const str_map scenemode[] = {
+    { QCameraParameters::SCENE_MODE_AUTO,           CAMERA_BESTSHOT_OFF },
+    { QCameraParameters::SCENE_MODE_ASD,           CAMERA_BESTSHOT_AUTO },
+    { QCameraParameters::SCENE_MODE_ACTION,         CAMERA_BESTSHOT_ACTION },
+    { QCameraParameters::SCENE_MODE_PORTRAIT,       CAMERA_BESTSHOT_PORTRAIT },
+    { QCameraParameters::SCENE_MODE_LANDSCAPE,      CAMERA_BESTSHOT_LANDSCAPE },
+    { QCameraParameters::SCENE_MODE_NIGHT,          CAMERA_BESTSHOT_NIGHT },
+    { QCameraParameters::SCENE_MODE_NIGHT_PORTRAIT, CAMERA_BESTSHOT_NIGHT_PORTRAIT },
+    { QCameraParameters::SCENE_MODE_THEATRE,        CAMERA_BESTSHOT_THEATRE },
+    { QCameraParameters::SCENE_MODE_BEACH,          CAMERA_BESTSHOT_BEACH },
+    { QCameraParameters::SCENE_MODE_SNOW,           CAMERA_BESTSHOT_SNOW },
+    { QCameraParameters::SCENE_MODE_SUNSET,         CAMERA_BESTSHOT_SUNSET },
+    { QCameraParameters::SCENE_MODE_STEADYPHOTO,    CAMERA_BESTSHOT_ANTISHAKE },
+    { QCameraParameters::SCENE_MODE_FIREWORKS ,     CAMERA_BESTSHOT_FIREWORKS },
+    { QCameraParameters::SCENE_MODE_SPORTS ,        CAMERA_BESTSHOT_SPORTS },
+    { QCameraParameters::SCENE_MODE_PARTY,          CAMERA_BESTSHOT_PARTY },
+    { QCameraParameters::SCENE_MODE_CANDLELIGHT,    CAMERA_BESTSHOT_CANDLELIGHT },
+    { QCameraParameters::SCENE_MODE_BACKLIGHT,      CAMERA_BESTSHOT_BACKLIGHT },
+    { QCameraParameters::SCENE_MODE_FLOWERS,        CAMERA_BESTSHOT_FLOWERS },
+    { QCameraParameters::SCENE_MODE_AR,             CAMERA_BESTSHOT_AR },
+};
+
+static const str_map scenedetect[] = {
+    { QCameraParameters::SCENE_DETECT_OFF, FALSE  },
+    { QCameraParameters::SCENE_DETECT_ON, TRUE },
+};
+
+// from camera.h, led_mode_t
+static const str_map flash[] = {
+    { QCameraParameters::FLASH_MODE_OFF,  LED_MODE_OFF },
+    { QCameraParameters::FLASH_MODE_AUTO, LED_MODE_AUTO },
+    { QCameraParameters::FLASH_MODE_ON, LED_MODE_ON },
+    { QCameraParameters::FLASH_MODE_TORCH, LED_MODE_TORCH}
+};
+
+// from mm-camera/common/camera.h.
+static const str_map iso[] = {
+    { QCameraParameters::ISO_AUTO,  CAMERA_ISO_AUTO},
+    { QCameraParameters::ISO_HJR,   CAMERA_ISO_DEBLUR},
+    { QCameraParameters::ISO_100,   CAMERA_ISO_100},
+    { QCameraParameters::ISO_200,   CAMERA_ISO_200},
+    { QCameraParameters::ISO_400,   CAMERA_ISO_400},
+    { QCameraParameters::ISO_800,   CAMERA_ISO_800 },
+    { QCameraParameters::ISO_1600,  CAMERA_ISO_1600 }
+};
+
+static const str_map iso_3D[] = {
+    { QCameraParameters::ISO_AUTO,  CAMERA_ISO_AUTO},
+    { QCameraParameters::ISO_100,   CAMERA_ISO_100},
+    { QCameraParameters::ISO_200,   CAMERA_ISO_200},
+    { QCameraParameters::ISO_400,   CAMERA_ISO_400},
+    { QCameraParameters::ISO_800,   CAMERA_ISO_800 },
+    { QCameraParameters::ISO_1600,  CAMERA_ISO_1600 }
+};
+
+
+#define DONT_CARE AF_MODE_MAX
+static const str_map focus_modes[] = {
+    { QCameraParameters::FOCUS_MODE_AUTO,     AF_MODE_AUTO},
+    { QCameraParameters::FOCUS_MODE_INFINITY, DONT_CARE },
+    { QCameraParameters::FOCUS_MODE_NORMAL,   AF_MODE_NORMAL },
+    { QCameraParameters::FOCUS_MODE_MACRO,    AF_MODE_MACRO },
+    { QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, AF_MODE_CAF },
+    { QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, DONT_CARE }
+};
+
+static const str_map lensshade[] = {
+    { QCameraParameters::LENSSHADE_ENABLE, TRUE },
+    { QCameraParameters::LENSSHADE_DISABLE, FALSE }
+};
+
+static const str_map hfr[] = {
+    { QCameraParameters::VIDEO_HFR_OFF, CAMERA_HFR_MODE_OFF },
+    { QCameraParameters::VIDEO_HFR_2X, CAMERA_HFR_MODE_60FPS },
+    { QCameraParameters::VIDEO_HFR_3X, CAMERA_HFR_MODE_90FPS },
+    { QCameraParameters::VIDEO_HFR_4X, CAMERA_HFR_MODE_120FPS },
+};
+
+static const str_map mce[] = {
+    { QCameraParameters::MCE_ENABLE, TRUE },
+    { QCameraParameters::MCE_DISABLE, FALSE }
+};
+
+static const str_map hdr[] = {
+    { QCameraParameters::HDR_ENABLE, TRUE },
+    { QCameraParameters::HDR_DISABLE, FALSE }
+};
+
+static const str_map histogram[] = {
+    { QCameraParameters::HISTOGRAM_ENABLE, TRUE },
+    { QCameraParameters::HISTOGRAM_DISABLE, FALSE }
+};
+
+static const str_map skinToneEnhancement[] = {
+    { QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE, TRUE },
+    { QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE, FALSE }
+};
+
+static const str_map denoise[] = {
+    { QCameraParameters::DENOISE_OFF, FALSE },
+    { QCameraParameters::DENOISE_ON, TRUE }
+};
+
+static const str_map selectable_zone_af[] = {
+    { QCameraParameters::SELECTABLE_ZONE_AF_AUTO,  AUTO },
+    { QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING, SPOT },
+    { QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED, CENTER_WEIGHTED },
+    { QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE, AVERAGE }
+};
+
+static const str_map facedetection[] = {
+    { QCameraParameters::FACE_DETECTION_OFF, FALSE },
+    { QCameraParameters::FACE_DETECTION_ON, TRUE }
+};
+
+#define DONT_CARE_COORDINATE -1
+static const str_map touchafaec[] = {
+    { QCameraParameters::TOUCH_AF_AEC_OFF, FALSE },
+    { QCameraParameters::TOUCH_AF_AEC_ON, TRUE }
+};
+
+static const str_map redeye_reduction[] = {
+    { QCameraParameters::REDEYE_REDUCTION_ENABLE, TRUE },
+    { QCameraParameters::REDEYE_REDUCTION_DISABLE, FALSE }
+};
+
+static const str_map zsl_modes[] = {
+    { QCameraParameters::ZSL_OFF, FALSE  },
+    { QCameraParameters::ZSL_ON, TRUE },
+};
+
+/*
+ * Values based on aec.c
+ */
+#define DONT_CARE_COORDINATE -1
+#define CAMERA_HISTOGRAM_ENABLE 1
+#define CAMERA_HISTOGRAM_DISABLE 0
+#define HISTOGRAM_STATS_SIZE 257
+
+/*
+ * Values based on aec.c
+ */
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+#define EXPOSURE_COMPENSATION_STEP ((float (1))/EXPOSURE_COMPENSATION_DENOMINATOR)
+
+static const str_map picture_formats[] = {
+        {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+        {QCameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static const str_map recording_Hints[] = {
+        {"false", FALSE},
+        {"true",  TRUE}
+};
+
+static const str_map picture_formats_zsl[] = {
+        {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG}
+};
+
+static const str_map frame_rate_modes[] = {
+        {QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+        {QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static int mPreviewFormat;
+static const str_map preview_formats[] = {
+        {QCameraParameters::PIXEL_FORMAT_YUV420SP,   CAMERA_YUV_420_NV21},
+        {QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, CAMERA_YUV_420_NV21_ADRENO},
+        {QCameraParameters::PIXEL_FORMAT_YUV420P, CAMERA_YUV_420_YV12}
+};
+static const str_map preview_formats1[] = {
+        {QCameraParameters::PIXEL_FORMAT_YUV420SP,   CAMERA_YUV_420_NV21},
+        {QCameraParameters::PIXEL_FORMAT_YUV420P, CAMERA_YUV_420_YV12}
+};
+
+static const str_map app_preview_formats[] = {
+{QCameraParameters::PIXEL_FORMAT_YUV420SP,   HAL_PIXEL_FORMAT_YCrCb_420_SP}, //nv21
+//{QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO}, //nv21_adreno
+{QCameraParameters::PIXEL_FORMAT_YUV420P, HAL_PIXEL_FORMAT_YV12}, //YV12
+};
+
+
+static bool parameter_string_initialized = false;
+static String8 preview_size_values;
+static String8 hfr_size_values;
+static String8 picture_size_values;
+static String8 fps_ranges_supported_values;
+static String8 jpeg_thumbnail_size_values;
+static String8 antibanding_values;
+static String8 effect_values;
+static String8 autoexposure_values;
+static String8 whitebalance_values;
+static String8 flash_values;
+static String8 focus_mode_values;
+static String8 iso_values;
+static String8 lensshade_values;
+static String8 mce_values;
+static String8 hdr_values;
+static String8 histogram_values;
+static String8 skinToneEnhancement_values;
+static String8 touchafaec_values;
+static String8 picture_format_values;
+static String8 scenemode_values;
+static String8 denoise_values;
+static String8 zoom_ratio_values;
+static String8 preview_frame_rate_values;
+static String8 frame_rate_mode_values;
+static String8 scenedetect_values;
+static String8 preview_format_values;
+static String8 selectable_zone_af_values;
+static String8 facedetection_values;
+static String8 hfr_values;
+static String8 redeye_reduction_values;
+static String8 zsl_values;
+
+mm_camera_notify mCamNotify;
+mm_camera_ops mCamOps;
+static mm_camera_buffer_t mEncodeOutputBuffer[MAX_SNAPSHOT_BUFFERS];
+static encode_params_t mImageEncodeParms;
+static capture_params_t mImageCaptureParms;
+static raw_capture_params_t mRawCaptureParms;
+static zsl_capture_params_t mZslCaptureParms;
+static zsl_params_t mZslParms;
+static yv12_format_parms_t myv12_params;
+
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        sprintf(buffer, "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        sprintf(buffer, ",%dx%d", sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        sprintf(buffer, "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        sprintf(buffer, ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+        str.append(buffer);
+    }
+    return str;
+}
+
+static String8 create_values_str(const str_map *values, int len) {
+    String8 str;
+
+    if (len > 0) {
+        str.append(values[0].desc);
+    }
+    for (int i = 1; i < len; i++) {
+        str.append(",");
+        str.append(values[i].desc);
+    }
+    return str;
+}
+
+
+static String8 create_str(int16_t *arr, int length){
+    String8 str;
+    char buffer[32];
+
+    if(length > 0){
+        snprintf(buffer, sizeof(buffer), "%d", arr[0]);
+        str.append(buffer);
+    }
+
+    for (int i =1;i<length;i++){
+        snprintf(buffer, sizeof(buffer), ",%d",arr[i]);
+        str.append(buffer);
+    }
+    return str;
+}
+
+static String8 create_values_range_str(int min, int max){
+    String8 str;
+    char buffer[32];
+
+    if(min <= max){
+        snprintf(buffer, sizeof(buffer), "%d", min);
+        str.append(buffer);
+
+        for (int i = min + 1; i <= max; i++) {
+            snprintf(buffer, sizeof(buffer), ",%d", i);
+            str.append(buffer);
+        }
+    }
+    return str;
+}
+
+extern "C" {
+//------------------------------------------------------------------------
+//   : 720p busyQ funcitons
+//   --------------------------------------------------------------------
+static struct fifo_queue g_busy_frame_queue =
+    {0, 0, 0, PTHREAD_MUTEX_INITIALIZER, PTHREAD_COND_INITIALIZER, (char *)"video_busy_q"};
+};
+
+static void cam_frame_wait_video (void)
+{
+    ALOGV("cam_frame_wait_video E ");
+    if ((g_busy_frame_queue.num_of_frames) <=0){
+        pthread_cond_wait(&(g_busy_frame_queue.wait), &(g_busy_frame_queue.mut));
+    }
+    ALOGV("cam_frame_wait_video X");
+    return;
+}
+
+void cam_frame_flush_video (void)
+{
+    ALOGV("cam_frame_flush_video: in n = %d\n", g_busy_frame_queue.num_of_frames);
+    pthread_mutex_lock(&(g_busy_frame_queue.mut));
+
+    while (g_busy_frame_queue.front)
+    {
+       //dequeue from the busy queue
+       struct fifo_node *node  = dequeue (&g_busy_frame_queue);
+       if(node)
+           free(node);
+
+       ALOGV("cam_frame_flush_video: node \n");
+    }
+    pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+    ALOGV("cam_frame_flush_video: out n = %d\n", g_busy_frame_queue.num_of_frames);
+    return ;
+}
+
+static struct msm_frame * cam_frame_get_video()
+{
+    struct msm_frame *p = NULL;
+    ALOGV("cam_frame_get_video... in\n");
+    ALOGV("cam_frame_get_video... got lock\n");
+    if (g_busy_frame_queue.front)
+    {
+        //dequeue
+       struct fifo_node *node  = dequeue (&g_busy_frame_queue);
+       if (node)
+       {
+           p = (struct msm_frame *)node->f;
+           free (node);
+       }
+       ALOGV("cam_frame_get_video... out = %x\n", p->buffer);
+    }
+    return p;
+}
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector_HAL(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if(num == NULL) {
+        ALOGE("Invalid output array (num == NULL)");
+        return -1;
+    }
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+        return -1;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return 0;
+}
+static int countChar(const char *str , char ch )
+{
+    int noOfChar = 0;
+
+    for ( int i = 0; str[i] != '\0'; i++) {
+        if ( str[i] == ch )
+          noOfChar = noOfChar + 1;
+    }
+
+    return noOfChar;
+}
+int checkAreaParameters(const char *str)
+{
+    int areaValues[6];
+    int left, right, top, bottom, weight;
+
+    if(countChar(str, ',') > 4) {
+        ALOGE("%s: No of area parameters exceeding the expected number %s", __FUNCTION__, str);
+        return -1;
+    }
+
+    if(parseNDimVector_HAL(str, areaValues, 5) !=0) {
+        ALOGE("%s: Failed to parse the input string %s", __FUNCTION__, str);
+        return -1;
+    }
+
+    ALOGV("%s: Area values are %d,%d,%d,%d,%d", __FUNCTION__,
+          areaValues[0], areaValues[1], areaValues[2], areaValues[3], areaValues[4]);
+
+    left = areaValues[0];
+    top = areaValues[1];
+    right = areaValues[2];
+    bottom = areaValues[3];
+    weight = areaValues[4];
+
+    // left should >= -1000
+    if (!(left >= -1000))
+        return -1;
+    // top should >= -1000
+    if(!(top >= -1000))
+        return -1;
+    // right should <= 1000
+    if(!(right <= 1000))
+        return -1;
+    // bottom should <= 1000
+    if(!(bottom <= 1000))
+        return -1;
+    // weight should >= 1
+    // weight should <= 1000
+    if(!((1 <= weight) && (weight <= 1000)))
+        return -1;
+    // left should < right
+    if(!(left < right))
+        return -1;
+    // top should < bottom
+    if(!(top < bottom))
+        return -1;
+
+    return 0;
+}
+
+static void cam_frame_post_video (struct msm_frame *p)
+{
+    if (!p)
+    {
+        ALOGE("post video , buffer is null");
+        return;
+    }
+    ALOGV("cam_frame_post_video... in = %x\n", (unsigned int)(p->buffer));
+    pthread_mutex_lock(&(g_busy_frame_queue.mut));
+    ALOGV("post_video got lock. q count before enQ %d", g_busy_frame_queue.num_of_frames);
+    //enqueue to busy queue
+    struct fifo_node *node = (struct fifo_node *)malloc (sizeof (struct fifo_node));
+    if (node)
+    {
+        ALOGV(" post video , enqueing in busy queue");
+        node->f = p;
+        node->next = NULL;
+        enqueue (&g_busy_frame_queue, node);
+        ALOGV("post_video got lock. q count after enQ %d", g_busy_frame_queue.num_of_frames);
+    }
+    else
+    {
+        ALOGE("cam_frame_post_video error... out of memory\n");
+    }
+
+    pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+    pthread_cond_signal(&(g_busy_frame_queue.wait));
+
+    ALOGV("cam_frame_post_video... out = %x\n", p->buffer);
+
+    return;
+}
+
+QualcommCameraHardware::FrameQueue::FrameQueue(){
+    mInitialized = false;
+}
+
+QualcommCameraHardware::FrameQueue::~FrameQueue(){
+    flush();
+}
+
+void QualcommCameraHardware::FrameQueue::init(){
+    Mutex::Autolock l(&mQueueLock);
+    mInitialized = true;
+    mQueueWait.signal();
+}
+
+void QualcommCameraHardware::FrameQueue::deinit(){
+    Mutex::Autolock l(&mQueueLock);
+    mInitialized = false;
+    mQueueWait.signal();
+}
+
+bool QualcommCameraHardware::FrameQueue::isInitialized(){
+   Mutex::Autolock l(&mQueueLock);
+   return mInitialized;
+}
+
+bool QualcommCameraHardware::FrameQueue::add(
+                struct msm_frame * element){
+    Mutex::Autolock l(&mQueueLock);
+    if(mInitialized == false)
+        return false;
+
+    mContainer.add(element);
+    mQueueWait.signal();
+    return true;
+}
+
+struct msm_frame * QualcommCameraHardware::FrameQueue::get(){
+
+    struct msm_frame *frame;
+    mQueueLock.lock();
+    while(mInitialized && mContainer.isEmpty()){
+        mQueueWait.wait(mQueueLock);
+    }
+
+    if(!mInitialized){
+        mQueueLock.unlock();
+        return NULL;
+    }
+
+    frame = mContainer.itemAt(0);
+    mContainer.removeAt(0);
+    mQueueLock.unlock();
+    return frame;
+}
+
+void QualcommCameraHardware::FrameQueue::flush(){
+    Mutex::Autolock l(&mQueueLock);
+    mContainer.clear();
+
+}
+
+
+void QualcommCameraHardware::storeTargetType(void) {
+    char mDeviceName[PROPERTY_VALUE_MAX];
+    property_get("ro.product.device",mDeviceName," ");
+    mCurrentTarget = TARGET_MAX;
+    for( int i = 0; i < TARGET_MAX ; i++) {
+       if( !strncmp(mDeviceName, targetList[i].targetStr, 7)) {
+         mCurrentTarget = targetList[i].targetEnum;
+         if(mCurrentTarget == TARGET_MSM7625) {
+           if(!strncmp(mDeviceName, "msm7625a" , 8))
+             mCurrentTarget = TARGET_MSM7625A;
+            }
+           if(mCurrentTarget == TARGET_MSM7627) {
+             if(!strncmp(mDeviceName, "msm7627a" , 8))
+               mCurrentTarget = TARGET_MSM7627A;
+           }
+           break;
+       }
+    }
+    ALOGV(" Storing the current target type as %d ", mCurrentTarget );
+    return;
+}
+
+void *openCamera(void *data) {
+    ALOGV(" openCamera : E");
+    mCameraOpen = false;
+
+    if (!libmmcamera) {
+        ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+        return false;
+    }
+
+    *(void **)&LINK_mm_camera_init =
+        ::dlsym(libmmcamera, "mm_camera_init");
+
+    *(void **)&LINK_mm_camera_exec =
+        ::dlsym(libmmcamera, "mm_camera_exec");
+
+    *(void **)&LINK_mm_camera_deinit =
+        ::dlsym(libmmcamera, "mm_camera_deinit");
+
+
+    if (MM_CAMERA_SUCCESS != LINK_mm_camera_init(&mCfgControl, &mCamNotify, &mCamOps, 0)) {
+        ALOGE("startCamera: mm_camera_init failed:");
+        return false;
+        //pthread_exit((void*) ret_val);
+    }
+
+    uint8_t camera_id8 = (uint8_t)HAL_currentCameraId;
+    if (MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_CAMERA_ID, &camera_id8)) {
+        ALOGE("setting camera id failed");
+        LINK_mm_camera_deinit();
+        return false;
+        //pthread_exit((void*) ret_val);
+    }
+
+    //camera_mode_t mode = (camera_mode_t)HAL_currentCameraMode;
+    camera_mode_t mode = CAMERA_MODE_2D;
+    if (MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_MODE, &mode)) {
+        ALOGE("startCamera: CAMERA_PARM_MODE failed:");
+        LINK_mm_camera_deinit();
+        return false;
+        //pthread_exit((void*) ret_val);
+    }
+
+    if (MM_CAMERA_SUCCESS != LINK_mm_camera_exec()) {
+        ALOGE("startCamera: mm_camera_exec failed:");
+        return false;
+        //pthread_exit((void*) ret_val);
+    }
+    mCameraOpen = true;
+    ALOGV(" openCamera : X");
+    if (CAMERA_MODE_3D == mode) {
+        camera_3d_frame_t snapshotFrame;
+        snapshotFrame.frame_type = CAM_SNAPSHOT_FRAME;
+        if(MM_CAMERA_SUCCESS !=
+            mCfgControl.mm_camera_get_parm(CAMERA_PARM_3D_FRAME_FORMAT,
+                (void *)&snapshotFrame)){
+            ALOGE("%s: get 3D format failed", __func__);
+            LINK_mm_camera_deinit();
+            return false;
+            //pthread_exit((void*) ret_val);
+        }
+        QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+        if (obj != 0) {
+            obj->mSnapshot3DFormat = snapshotFrame.format;
+            ALOGI("%s: 3d format  snapshot %d", __func__, obj->mSnapshot3DFormat);
+        }
+    }
+
+    ALOGV("openCamera : X");
+//    pthread_exit((void*) ret_val);
+
+    return NULL;
+}
+//-------------------------------------------------------------------------------------
+static Mutex singleton_lock;
+static bool singleton_releasing;
+static nsecs_t singleton_releasing_start_time;
+static const nsecs_t SINGLETON_RELEASING_WAIT_TIME = seconds_to_nanoseconds(5);
+static const nsecs_t SINGLETON_RELEASING_RECHECK_TIMEOUT = seconds_to_nanoseconds(1);
+static Condition singleton_wait;
+
+static void receive_camframe_callback(struct msm_frame *frame);
+static void receive_liveshot_callback(liveshot_status status, uint32_t jpeg_size);
+static void receive_camstats_callback(camstats_type stype, camera_preview_histogram_info* histinfo);
+static void receive_camframe_video_callback(struct msm_frame *frame); // 720p
+static int8_t receive_event_callback(mm_camera_event* event);
+static void receive_shutter_callback(common_crop_t *crop);
+static void receive_camframe_error_callback(camera_error_type err);
+static int fb_fd = -1;
+static int32_t mMaxZoom = 0;
+static bool zoomSupported = false;
+static int dstOffset = 0;
+
+static int16_t * zoomRatios;
+
+
+/* When using MDP zoom, double the preview buffers. The usage of these
+ * buffers is as follows:
+ * 1. As all the buffers comes under a single FD, and at initial registration,
+ * this FD will be passed to surface flinger, surface flinger can have access
+ * to all the buffers when needed.
+ * 2. Only "kPreviewBufferCount" buffers (SrcSet) will be registered with the
+ * camera driver to receive preview frames. The remaining buffers (DstSet),
+ * will be used at HAL and by surface flinger only when crop information
+ * is present in the frame.
+ * 3. When there is no crop information, there will be no call to MDP zoom,
+ * and the buffers in SrcSet will be passed to surface flinger to display.
+ * 4. With crop information present, MDP zoom will be called, and the final
+ * data will be placed in a buffer from DstSet, and this buffer will be given
+ * to surface flinger to display.
+ */
+#define NUM_MORE_BUFS 2
+
+QualcommCameraHardware::QualcommCameraHardware()
+    : mParameters(),
+      mCameraRunning(false),
+      mPreviewInitialized(false),
+      mPreviewThreadRunning(false),
+      mHFRThreadRunning(false),
+      mFrameThreadRunning(false),
+      mVideoThreadRunning(false),
+      mSnapshotThreadRunning(false),
+      mJpegThreadRunning(false),
+      mSmoothzoomThreadRunning(false),
+      mSmoothzoomThreadExit(false),
+      mInSnapshotMode(false),
+      mEncodePending(false),
+      mBuffersInitialized(false),
+      mSnapshotFormat(0),
+      mFirstFrame(true),
+      mReleasedRecordingFrame(false),
+      mPreviewFrameSize(0),
+      mRawSize(0),
+      mCbCrOffsetRaw(0),
+      mYOffset(0),
+      mAutoFocusThreadRunning(false),
+      mInitialized(false),
+      mBrightness(0),
+      mSkinToneEnhancement(0),
+      mHJR(0),
+      mInPreviewCallback(false),
+      //mUseOverlay(0),
+      mIs3DModeOn(0),
+      //mOverlay(0),
+      mMsgEnabled(0),
+      mNotifyCallback(0),
+      mDataCallback(0),
+      mDataCallbackTimestamp(0),
+      mCallbackCookie(0),
+      mDebugFps(0),
+      mSnapshotDone(0),
+      maxSnapshotWidth(0),
+      maxSnapshotHeight(0),
+      mHasAutoFocusSupport(0),
+      mDisEnabled(0),
+      mRotation(0),
+      mResetWindowCrop(false),
+      mThumbnailWidth(0),
+      mThumbnailHeight(0),
+      strTexturesOn(false),
+      mPictureWidth(0),
+      mPictureHeight(0),
+      mPostviewWidth(0),
+      mPostviewHeight(0),
+      mPreviewWindow(NULL),
+      mTotalPreviewBufferCount(0),
+      mZslFlashEnable(false),
+      mZslPanorama(false),
+      mSnapshotCancel(false),
+      mHFRMode(false),
+      mActualPictWidth(0),
+      mActualPictHeight(0),
+      mDenoiseValue(0),
+      mPreviewStopping(false),
+      mInHFRThread(false),
+      mPrevHeapDeallocRunning(false),
+      mHdrMode(false ),
+      mExpBracketMode(false),
+      mZslEnable(false),
+      mStoreMetaDataInFrame(0),
+      mRecordingState(0)
+{
+    ALOGI("QualcommCameraHardware constructor E");
+    mMMCameraDLRef = MMCameraDL::getInstance();
+    libmmcamera = mMMCameraDLRef->pointer();
+    char value[PROPERTY_VALUE_MAX];
+    mCameraOpen = false;
+    /*if(HAL_currentSnapshotMode == CAMERA_SNAPSHOT_ZSL) {
+        ALOGI("%s: this is ZSL mode", __FUNCTION__);
+        mZslEnable = true;
+    }*/
+
+    property_get("persist.camera.hal.multitouchaf", value, "0");
+    mMultiTouch = atoi(value);
+
+    storeTargetType();
+    for(int i=0; i< MAX_SNAPSHOT_BUFFERS; i++) {
+       mRawMapped[i] = NULL;
+       mJpegMapped[i] = NULL;
+       mThumbnailMapped[i] = NULL;
+    }
+    mRawSnapshotMapped = NULL;
+    mJpegCopyMapped = NULL;
+	for(int i=0; i< RECORD_BUFFERS; i++) {
+        mRecordMapped[i] = NULL;
+    }
+
+    for(int i=0; i<3; i++)
+        mStatsMapped[i] = NULL;
+
+    mJpegLiveSnapMapped = NULL;
+    if(HAL_currentCameraMode == CAMERA_SUPPORT_MODE_3D){
+        mIs3DModeOn = true;
+    }
+    /* TODO: Will remove this command line interface at end */
+    property_get("persist.camera.hal.3dmode", value, "0");
+    int mode = atoi(value);
+    if( mode  == 1) {
+        mIs3DModeOn = true;
+        HAL_currentCameraMode = CAMERA_MODE_3D;
+    }
+
+    if( (pthread_create(&mDeviceOpenThread, NULL, openCamera, NULL)) != 0) {
+        ALOGE(" openCamera thread creation failed ");
+    }
+    memset(&mDimension, 0, sizeof(mDimension));
+    memset(&mCrop, 0, sizeof(mCrop));
+    memset(&zoomCropInfo, 0, sizeof(android_native_rect_t));
+    //storeTargetType();
+    property_get("persist.debug.sf.showfps", value, "0");
+    mDebugFps = atoi(value);
+    if( mCurrentTarget == TARGET_MSM7630 || mCurrentTarget == TARGET_MSM8660 ) {
+        kPreviewBufferCountActual = kPreviewBufferCount;
+        kRecordBufferCount = RECORD_BUFFERS;
+        recordframes = new msm_frame[kRecordBufferCount];
+        record_buffers_tracking_flag = new bool[kRecordBufferCount];
+    }
+    else {
+        kPreviewBufferCountActual = kPreviewBufferCount + NUM_MORE_BUFS;
+        if( mCurrentTarget == TARGET_QSD8250 ) {
+            kRecordBufferCount = RECORD_BUFFERS_8x50;
+            recordframes = new msm_frame[kRecordBufferCount];
+            record_buffers_tracking_flag = new bool[kRecordBufferCount];
+        }
+    }
+    mTotalPreviewBufferCount = kTotalPreviewBufferCount;
+    if((mCurrentTarget != TARGET_MSM7630 ) &&  (mCurrentTarget != TARGET_QSD8250)
+      && (mCurrentTarget != TARGET_MSM8660)) {
+        for (int i = 0; i < mTotalPreviewBufferCount; i++)
+          metadata_memory[i] = NULL;
+    }
+    else {
+        for (int i = 0; i < kRecordBufferCount; i++)
+          metadata_memory[i] = NULL;
+    }
+    switch(mCurrentTarget){
+        case TARGET_MSM7627:
+        case TARGET_MSM7627A:
+            jpegPadding = 0; // to be checked.
+            break;
+        case TARGET_QSD8250:
+        case TARGET_MSM7630:
+        case TARGET_MSM8660:
+            jpegPadding = 0;
+            break;
+        default:
+            jpegPadding = 0;
+            break;
+    }
+    // Initialize with default format values. The format values can be
+    // overriden when application requests.
+    mDimension.prev_format     = CAMERA_YUV_420_NV21;
+    mPreviewFormat             = CAMERA_YUV_420_NV21;
+    mDimension.enc_format      = CAMERA_YUV_420_NV21;
+    if((mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660))
+        mDimension.enc_format  = CAMERA_YUV_420_NV12;
+    mDimension.main_img_format = CAMERA_YUV_420_NV21;
+    mDimension.thumb_format    = CAMERA_YUV_420_NV21;
+
+    if( (mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660) ){
+        /* DIS is disabled all the time in VPE support targets.
+         * No provision for the user to control this.
+         */
+        mDisEnabled = 0;
+        /* Get the DIS value from properties, to check whether
+         * DIS is disabled or not. If the property is not found
+         * default to DIS disabled.*/
+        property_get("persist.camera.hal.dis", value, "0");
+        mDisEnabled = atoi(value);
+        mVpeEnabled = 1;
+    }
+    if(mIs3DModeOn) {
+        mDisEnabled = 0;
+    }
+    ALOGV("constructor EX");
+}
+
+void QualcommCameraHardware::hasAutoFocusSupport(){
+    if( !mCamOps.mm_camera_is_supported(CAMERA_OPS_FOCUS)){
+        ALOGI("AutoFocus is not supported");
+        mHasAutoFocusSupport = false;
+    }else {
+        mHasAutoFocusSupport = true;
+    }
+    if(mZslEnable)
+        mHasAutoFocusSupport = false;
+}
+
+//filter Picture sizes based on max width and height
+void QualcommCameraHardware::filterPictureSizes(){
+    unsigned int i;
+    if(PICTURE_SIZE_COUNT <= 0)
+        return;
+    maxSnapshotWidth = picture_sizes[0].width;
+    maxSnapshotHeight = picture_sizes[0].height;
+   // Iterate through all the width and height to find the max value
+    for(i =0; i<PICTURE_SIZE_COUNT;i++){
+        if(((maxSnapshotWidth < picture_sizes[i].width) &&
+            (maxSnapshotHeight <= picture_sizes[i].height))){
+            maxSnapshotWidth = picture_sizes[i].width;
+            maxSnapshotHeight = picture_sizes[i].height;
+        }
+    }
+    if(mZslEnable){
+        // due to lack of PMEM we restrict to lower resolution
+        picture_sizes_ptr = zsl_picture_sizes;
+        supportedPictureSizesCount = 7;
+    }
+    else if(mIs3DModeOn){
+     // In 3D mode we only want 1080p picture size
+      picture_sizes_ptr = for_3D_picture_sizes;
+      supportedPictureSizesCount = 1;
+    }
+    else{
+    picture_sizes_ptr = picture_sizes;
+    supportedPictureSizesCount = PICTURE_SIZE_COUNT;
+    }
+}
+
+bool QualcommCameraHardware::supportsSceneDetection() {
+   unsigned int prop = 0;
+   for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+       if((mCurrentTarget == boardProperties[prop].target)
+          && boardProperties[prop].hasSceneDetect == true) {
+           return true;
+           break;
+       }
+   }
+   return false;
+}
+
+bool QualcommCameraHardware::supportsSelectableZoneAf() {
+   unsigned int prop = 0;
+   for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+       if((mCurrentTarget == boardProperties[prop].target)
+          && boardProperties[prop].hasSelectableZoneAf == true) {
+           return true;
+           break;
+       }
+   }
+   return false;
+}
+
+bool QualcommCameraHardware::supportsFaceDetection() {
+   unsigned int prop = 0;
+   for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+       if((mCurrentTarget == boardProperties[prop].target)
+          && boardProperties[prop].hasFaceDetect == true) {
+           return true;
+           break;
+       }
+   }
+   return false;
+}
+
+void QualcommCameraHardware::initDefaultParameters()
+{
+    ALOGI("initDefaultParameters E");
+    mDimension.picture_width = DEFAULT_PICTURE_WIDTH;
+    mDimension.picture_height = DEFAULT_PICTURE_HEIGHT;
+    mDimension.ui_thumbnail_width =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+    mDimension.ui_thumbnail_height =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+    bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+               sizeof(cam_ctrl_dimension_t),(void *) &mDimension);
+    if(ret != true) {
+        ALOGE("CAMERA_PARM_DIMENSION failed!!!");
+        return;
+    }
+    hasAutoFocusSupport();
+    //Disable DIS for Web Camera
+    if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_VIDEO_DIS)){
+        ALOGV("DISABLE DIS");
+        mDisEnabled = 0;
+    }else {
+        ALOGV("Enable DIS");
+    }
+    // Initialize constant parameter strings. This will happen only once in the
+    // lifetime of the mediaserver process.
+    if (!parameter_string_initialized) {
+        if(mIs3DModeOn){
+          antibanding_values = create_values_str(
+            antibanding_3D, sizeof(antibanding_3D) / sizeof(str_map));
+        } else{
+        antibanding_values = create_values_str(
+            antibanding, sizeof(antibanding) / sizeof(str_map));
+        }
+        effect_values = create_values_str(
+            effects, sizeof(effects) / sizeof(str_map));
+        autoexposure_values = create_values_str(
+            autoexposure, sizeof(autoexposure) / sizeof(str_map));
+        whitebalance_values = create_values_str(
+            whitebalance, sizeof(whitebalance) / sizeof(str_map));
+        //filter picture sizes
+        filterPictureSizes();
+        picture_size_values = create_sizes_str(
+                picture_sizes_ptr, supportedPictureSizesCount);
+        preview_size_values = create_sizes_str(
+                preview_sizes,  PREVIEW_SIZE_COUNT);
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+                            preview_size_values.string());
+
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
+                            preview_size_values.string());
+
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+                            picture_size_values.string());
+        mParameters.set(QCameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED,
+                            "true");
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                       QCameraParameters::FOCUS_MODE_INFINITY);
+        mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                       QCameraParameters::FOCUS_MODE_INFINITY);
+        mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+
+        mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, FOCUS_AREA_INIT);
+        mParameters.set(QCameraParameters::KEY_METERING_AREAS, FOCUS_AREA_INIT);
+
+        if(!mIs3DModeOn){
+        hfr_size_values = create_sizes_str(
+                hfr_sizes, HFR_SIZE_COUNT);
+        }
+        fps_ranges_supported_values = create_fps_str(
+            FpsRangesSupported,FPS_RANGES_SUPPORTED_COUNT );
+        mParameters.set(
+            QCameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+            fps_ranges_supported_values);
+        mParameters.setPreviewFpsRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000);
+
+        flash_values = create_values_str(
+            flash, sizeof(flash) / sizeof(str_map));
+        if(mHasAutoFocusSupport){
+            focus_mode_values = create_values_str(
+                    focus_modes, sizeof(focus_modes) / sizeof(str_map));
+        }
+        if(mIs3DModeOn){
+          iso_values = create_values_str(
+              iso_3D,sizeof(iso_3D)/sizeof(str_map));
+        } else{
+           iso_values = create_values_str(
+              iso,sizeof(iso)/sizeof(str_map));
+        }
+        lensshade_values = create_values_str(
+            lensshade,sizeof(lensshade)/sizeof(str_map));
+        mce_values = create_values_str(
+            mce,sizeof(mce)/sizeof(str_map));
+        if(!mIs3DModeOn){
+          hfr_values = create_values_str(
+            hfr,sizeof(hfr)/sizeof(str_map));
+        }
+        if(mCurrentTarget == TARGET_MSM8660)
+            hdr_values = create_values_str(
+                hdr,sizeof(hdr)/sizeof(str_map));
+        //Currently Enabling Histogram for 8x60
+        if(mCurrentTarget == TARGET_MSM8660) {
+            histogram_values = create_values_str(
+                histogram,sizeof(histogram)/sizeof(str_map));
+        }
+        //Currently Enabling Skin Tone Enhancement for 8x60 and 7630
+        if((mCurrentTarget == TARGET_MSM8660)||(mCurrentTarget == TARGET_MSM7630)) {
+            skinToneEnhancement_values = create_values_str(
+                skinToneEnhancement,sizeof(skinToneEnhancement)/sizeof(str_map));
+        }
+        if(mHasAutoFocusSupport){
+            touchafaec_values = create_values_str(
+                touchafaec,sizeof(touchafaec)/sizeof(str_map));
+        }
+        zsl_values = create_values_str(
+            zsl_modes,sizeof(zsl_modes)/sizeof(str_map));
+
+        if(mZslEnable){
+           picture_format_values = create_values_str(
+               picture_formats_zsl, sizeof(picture_formats_zsl)/sizeof(str_map));
+        } else{
+           picture_format_values = create_values_str(
+               picture_formats, sizeof(picture_formats)/sizeof(str_map));
+        }
+        if(mCurrentTarget == TARGET_MSM8660 ||
+          (mCurrentTarget == TARGET_MSM7625A ||
+           mCurrentTarget == TARGET_MSM7627A)) {
+            denoise_values = create_values_str(
+                denoise, sizeof(denoise) / sizeof(str_map));
+        }
+       if(mCfgControl.mm_camera_query_parms(CAMERA_PARM_ZOOM_RATIO,
+           (void **)&zoomRatios, (uint32_t *) &mMaxZoom) == MM_CAMERA_SUCCESS) {
+            zoomSupported = true;
+            if( mMaxZoom >0) {
+                ALOGE("Maximum zoom value is %d", mMaxZoom);
+                if(zoomRatios != NULL) {
+                    zoom_ratio_values =  create_str(zoomRatios, mMaxZoom);
+                } else {
+                    ALOGE("Failed to get zoomratios ..");
+                }
+           } else {
+               zoomSupported = false;
+           }
+       } else {
+            zoomSupported = false;
+            ALOGE("Failed to get maximum zoom value...setting max "
+                    "zoom to zero");
+            mMaxZoom = 0;
+        }
+        preview_frame_rate_values = create_values_range_str(
+            MINIMUM_FPS, MAXIMUM_FPS);
+
+        scenemode_values = create_values_str(
+            scenemode, sizeof(scenemode) / sizeof(str_map));
+
+        if(supportsSceneDetection()) {
+            scenedetect_values = create_values_str(
+                scenedetect, sizeof(scenedetect) / sizeof(str_map));
+        }
+
+        if(mHasAutoFocusSupport && supportsSelectableZoneAf()){
+            selectable_zone_af_values = create_values_str(
+                selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map));
+        }
+
+        if(mHasAutoFocusSupport && supportsFaceDetection()) {
+            facedetection_values = create_values_str(
+                facedetection, sizeof(facedetection) / sizeof(str_map));
+        }
+
+        redeye_reduction_values = create_values_str(
+            redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map));
+
+        parameter_string_initialized = true;
+    }
+    //set video size
+    if(( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+       String8 vSize = create_sizes_str(preview_sizes, 1);
+       mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, vSize.string());
+    }
+    if(mIs3DModeOn){
+       ALOGE("In initDefaultParameters - 3D mode on so set the default preview to 1280 x 720");
+       mParameters.setPreviewSize(DEFAULT_PREVIEW_WIDTH_3D, DEFAULT_PREVIEW_HEIGHT_3D);
+       mDimension.display_width = DEFAULT_PREVIEW_WIDTH_3D;
+       mDimension.display_height = DEFAULT_PREVIEW_HEIGHT_3D;
+    } else{
+       mParameters.setPreviewSize(DEFAULT_PREVIEW_WIDTH, DEFAULT_PREVIEW_HEIGHT);
+       mDimension.display_width = DEFAULT_PREVIEW_WIDTH;
+       mDimension.display_height = DEFAULT_PREVIEW_HEIGHT;
+    }
+    mParameters.setPreviewFrameRate(DEFAULT_FPS);
+    if( mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+        mParameters.set(
+            QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+            preview_frame_rate_values.string());
+     } else {
+        mParameters.setPreviewFrameRate(DEFAULT_FIXED_FPS_VALUE);
+        mParameters.set(
+            QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+            DEFAULT_FIXED_FPS_VALUE);
+     }
+    mParameters.setPreviewFrameRateMode("frame-rate-auto");
+    mParameters.setPreviewFormat("yuv420sp"); // informative
+    mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+    if(mIs3DModeOn){
+      mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH_3D, DEFAULT_PICTURE_HEIGHT_3D);
+    } else{
+      mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH, DEFAULT_PICTURE_HEIGHT);
+    }
+    mParameters.setPictureFormat("jpeg"); // informative
+
+    mParameters.set(QCameraParameters::KEY_VIDEO_FRAME_FORMAT, "yuv420sp");
+
+    mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, "85"); // max quality
+
+    mParameters.set("power-mode-supported", "false");
+
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+                    THUMBNAIL_WIDTH_STR); // informative
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+                    THUMBNAIL_HEIGHT_STR); // informative
+    mDimension.ui_thumbnail_width =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+    mDimension.ui_thumbnail_height =
+            thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+    mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "90");
+
+    String8 valuesStr = create_sizes_str(jpeg_thumbnail_sizes, JPEG_THUMBNAIL_SIZE_COUNT);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+                valuesStr.string());
+
+    // Define CAMERA_SMOOTH_ZOOM in Android.mk file , to enable smoothzoom
+#ifdef CAMERA_SMOOTH_ZOOM
+    mParameters.set(QCameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
+#endif
+
+    if(zoomSupported){
+        mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "true");
+        ALOGV("max zoom is %d", mMaxZoom-1);
+        /* mMaxZoom value that the query interface returns is the size
+         * of zoom table. So the actual max zoom value will be one
+         * less than that value.
+         */
+        mParameters.set("max-zoom",mMaxZoom-1);
+        mParameters.set(QCameraParameters::KEY_ZOOM_RATIOS,
+                            zoom_ratio_values);
+    } else {
+        mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "false");
+    }
+    /* Enable zoom support for video application if VPE enabled */
+    if(zoomSupported && mVpeEnabled) {
+        mParameters.set("video-zoom-support", "true");
+    } else {
+        mParameters.set("video-zoom-support", "false");
+    }
+
+    mParameters.set(QCameraParameters::KEY_CAMERA_MODE,0);
+
+    mParameters.set(QCameraParameters::KEY_ANTIBANDING,
+                    QCameraParameters::ANTIBANDING_OFF);
+    mParameters.set(QCameraParameters::KEY_EFFECT,
+                    QCameraParameters::EFFECT_NONE);
+    mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE,
+                    QCameraParameters::AUTO_EXPOSURE_FRAME_AVG);
+    mParameters.set(QCameraParameters::KEY_WHITE_BALANCE,
+                    QCameraParameters::WHITE_BALANCE_AUTO);
+    if( (mCurrentTarget != TARGET_MSM7630)
+        && (mCurrentTarget != TARGET_QSD8250)
+        && (mCurrentTarget != TARGET_MSM8660)
+        && (mCurrentTarget != TARGET_MSM7627A)) {
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+                    "yuv420sp");
+    } else if(mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627) {
+        preview_format_values = create_values_str(
+            preview_formats1, sizeof(preview_formats1) / sizeof(str_map));
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+                preview_format_values.string());
+    } else {
+        preview_format_values = create_values_str(
+            preview_formats, sizeof(preview_formats) / sizeof(str_map));
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+                preview_format_values.string());
+    }
+
+    frame_rate_mode_values = create_values_str(
+            frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map));
+ if( mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS_MODE)){
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES,
+                    frame_rate_mode_values.string());
+    }
+
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+                    preview_size_values.string());
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+                    picture_size_values.string());
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ANTIBANDING,
+                    antibanding_values);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_EFFECTS, effect_values);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE, autoexposure_values);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
+                    whitebalance_values);
+
+    if(mHasAutoFocusSupport){
+       mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                    focus_mode_values);
+       mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                    QCameraParameters::FOCUS_MODE_AUTO);
+    } else {
+       mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                   QCameraParameters::FOCUS_MODE_INFINITY);
+       mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                   QCameraParameters::FOCUS_MODE_INFINITY);
+    }
+
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+                    picture_format_values);
+
+    if(mCfgControl.mm_camera_is_supported(CAMERA_PARM_LED_MODE)) {
+        mParameters.set(QCameraParameters::KEY_FLASH_MODE,
+                        QCameraParameters::FLASH_MODE_OFF);
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_FLASH_MODES,
+                        flash_values);
+    }
+
+    mParameters.set(QCameraParameters::KEY_MAX_SHARPNESS,
+            CAMERA_MAX_SHARPNESS);
+    mParameters.set(QCameraParameters::KEY_MAX_CONTRAST,
+            CAMERA_MAX_CONTRAST);
+    mParameters.set(QCameraParameters::KEY_MAX_SATURATION,
+            CAMERA_MAX_SATURATION);
+
+    mParameters.set(
+            QCameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR);
+    mParameters.set(
+            QCameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR);
+    mParameters.set(
+            QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+            EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR);
+    mParameters.setFloat(
+            QCameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
+            EXPOSURE_COMPENSATION_STEP);
+
+    mParameters.set("luma-adaptation", "3");
+    mParameters.set("skinToneEnhancement", "0");
+    mParameters.set("zoom-supported", "true");
+    mParameters.set("zoom", 0);
+    mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT,
+                    QCameraParameters::PIXEL_FORMAT_JPEG);
+
+    mParameters.set(QCameraParameters::KEY_SHARPNESS,
+                    CAMERA_DEF_SHARPNESS);
+    mParameters.set(QCameraParameters::KEY_CONTRAST,
+                    CAMERA_DEF_CONTRAST);
+    mParameters.set(QCameraParameters::KEY_SATURATION,
+                    CAMERA_DEF_SATURATION);
+
+    mParameters.set(QCameraParameters::KEY_ISO_MODE,
+                    QCameraParameters::ISO_AUTO);
+    mParameters.set(QCameraParameters::KEY_LENSSHADE,
+                    QCameraParameters::LENSSHADE_ENABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ISO_MODES,
+                    iso_values);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES,
+                    lensshade_values);
+    mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT,
+                    QCameraParameters::MCE_ENABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES,
+                    mce_values);
+    if(mCfgControl.mm_camera_is_supported(CAMERA_PARM_HFR) && !(mIs3DModeOn)) {
+        mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE,
+                    QCameraParameters::VIDEO_HFR_OFF);
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,
+                    hfr_size_values.string());
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES,
+                    hfr_values);
+    } else
+        mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,"");
+
+    mParameters.set(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING,
+                    QCameraParameters::MCE_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_HDR_IMAGING_MODES,
+                    hdr_values);
+    mParameters.set(QCameraParameters::KEY_HISTOGRAM,
+                    QCameraParameters::HISTOGRAM_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES,
+                    histogram_values);
+    mParameters.set(QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT,
+                    QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES,
+                    skinToneEnhancement_values);
+    mParameters.set(QCameraParameters::KEY_SCENE_MODE,
+                    QCameraParameters::SCENE_MODE_AUTO);
+    mParameters.set("strtextures", "OFF");
+
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_MODES,
+                    scenemode_values);
+    mParameters.set(QCameraParameters::KEY_DENOISE,
+                    QCameraParameters::DENOISE_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_DENOISE,
+                    denoise_values);
+
+    //touch af/aec parameters
+    mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC,
+                    QCameraParameters::TOUCH_AF_AEC_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC,
+                    touchafaec_values);
+    mParameters.set("touchAfAec-dx","100");
+    mParameters.set("touchAfAec-dy","100");
+    mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+    mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "1");
+
+    mParameters.set(QCameraParameters::KEY_SCENE_DETECT,
+                    QCameraParameters::SCENE_DETECT_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_DETECT,
+                    scenedetect_values);
+    mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF,
+                    QCameraParameters::SELECTABLE_ZONE_AF_AUTO);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF,
+                    selectable_zone_af_values);
+    mParameters.set(QCameraParameters::KEY_FACE_DETECTION,
+                    QCameraParameters::FACE_DETECTION_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_FACE_DETECTION,
+                    facedetection_values);
+    mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION,
+                    QCameraParameters::REDEYE_REDUCTION_DISABLE);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION,
+                    redeye_reduction_values);
+    mParameters.set(QCameraParameters::KEY_ZSL,
+                    QCameraParameters::ZSL_OFF);
+    mParameters.set(QCameraParameters::KEY_SUPPORTED_ZSL_MODES,
+                    zsl_values);
+
+    float focalLength = 0.0f;
+    float horizontalViewAngle = 0.0f;
+    float verticalViewAngle = 0.0f;
+
+    mCfgControl.mm_camera_get_parm(CAMERA_PARM_FOCAL_LENGTH,
+            (void *)&focalLength);
+    mParameters.setFloat(QCameraParameters::KEY_FOCAL_LENGTH,
+                    focalLength);
+    mCfgControl.mm_camera_get_parm(CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+            (void *)&horizontalViewAngle);
+    mParameters.setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+                    horizontalViewAngle);
+    mCfgControl.mm_camera_get_parm(CAMERA_PARM_VERTICAL_VIEW_ANGLE,
+            (void *)&verticalViewAngle);
+    mParameters.setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+                    verticalViewAngle);
+    numCapture = 1;
+    if(mZslEnable) {
+        int maxSnapshot = MAX_SNAPSHOT_BUFFERS - 2;
+        char value[5];
+        property_get("persist.camera.hal.capture", value, "1");
+        numCapture = atoi(value);
+        if(numCapture > maxSnapshot)
+            numCapture = maxSnapshot;
+        else if(numCapture < 1)
+            numCapture = 1;
+        mParameters.set("capture-burst-captures-values", maxSnapshot);
+        mParameters.set("capture-burst-interval-supported", "false");
+    }
+    mParameters.set("num-snaps-per-shutter", numCapture);
+    ALOGI("%s: setting num-snaps-per-shutter to %d", __FUNCTION__, numCapture);
+    if(mIs3DModeOn)
+        mParameters.set("3d-frame-format", "left-right");
+
+    switch(mCurrentTarget){
+        case TARGET_MSM7627:
+        case TARGET_QSD8250:
+        case TARGET_MSM7630:
+           mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "800x480");
+           break;
+        case TARGET_MSM7627A:
+            mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "864x480");
+            break;
+        case TARGET_MSM8660:
+            mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "1920x1088");
+            break;
+        default:
+            mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "640x480");
+            break;
+    }
+    if (setParameters(mParameters) != NO_ERROR) {
+        ALOGE("Failed to set default parameters?!");
+    }
+
+    /* Initialize the camframe_timeout_flag*/
+    Mutex::Autolock l(&mCamframeTimeoutLock);
+    camframe_timeout_flag = FALSE;
+    mPostviewHeap = NULL;
+    mDisplayHeap = NULL;
+    mLastPreviewFrameHeap = NULL;
+    mThumbnailHeap = NULL;
+
+    mInitialized = true;
+    strTexturesOn = false;
+
+    ALOGI("initDefaultParameters X");
+}
+
+
+#define ROUND_TO_PAGE(x)  (((x)+0xfff)&~0xfff)
+
+bool QualcommCameraHardware::startCamera()
+{
+    ALOGV("startCamera E");
+    if( mCurrentTarget == TARGET_MAX ) {
+        ALOGE(" Unable to determine the target type. Camera will not work ");
+        return false;
+    }
+#if DLOPEN_LIBMMCAMERA
+
+    ALOGV("loading liboemcamera at %p", libmmcamera);
+    if (!libmmcamera) {
+        ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+        return false;
+    }
+
+    *(void **)&LINK_cam_frame =
+        ::dlsym(libmmcamera, "cam_frame");
+    *(void **)&LINK_wait_cam_frame_thread_ready =
+	::dlsym(libmmcamera, "wait_cam_frame_thread_ready");
+    *(void **)&LINK_cam_frame_set_exit_flag =
+        ::dlsym(libmmcamera, "cam_frame_set_exit_flag");
+    *(void **)&LINK_camframe_terminate =
+        ::dlsym(libmmcamera, "camframe_terminate");
+
+    *(void **)&LINK_jpeg_encoder_init =
+        ::dlsym(libmmcamera, "jpeg_encoder_init");
+
+    *(void **)&LINK_jpeg_encoder_encode =
+        ::dlsym(libmmcamera, "jpeg_encoder_encode");
+
+    *(void **)&LINK_jpeg_encoder_join =
+        ::dlsym(libmmcamera, "jpeg_encoder_join");
+
+    mCamNotify.preview_frame_cb = &receive_camframe_callback;
+
+    mCamNotify.camstats_cb = &receive_camstats_callback;
+
+    mCamNotify.on_event =  &receive_event_callback;
+
+    mCamNotify.on_error_event = &receive_camframe_error_callback;
+
+    // 720 p new recording functions
+    mCamNotify.video_frame_cb = &receive_camframe_video_callback;
+     // 720 p new recording functions
+
+    *(void **)&LINK_camframe_add_frame = ::dlsym(libmmcamera, "camframe_add_frame");
+
+    *(void **)&LINK_camframe_release_all_frames = ::dlsym(libmmcamera, "camframe_release_all_frames");
+
+    *(void **)&LINK_mmcamera_shutter_callback =
+        ::dlsym(libmmcamera, "mmcamera_shutter_callback");
+
+    *LINK_mmcamera_shutter_callback = receive_shutter_callback;
+
+    *(void**)&LINK_jpeg_encoder_setMainImageQuality =
+        ::dlsym(libmmcamera, "jpeg_encoder_setMainImageQuality");
+
+    *(void**)&LINK_jpeg_encoder_setThumbnailQuality =
+        ::dlsym(libmmcamera, "jpeg_encoder_setThumbnailQuality");
+
+    *(void**)&LINK_jpeg_encoder_setRotation =
+        ::dlsym(libmmcamera, "jpeg_encoder_setRotation");
+
+    *(void**)&LINK_jpeg_encoder_get_buffer_offset =
+        ::dlsym(libmmcamera, "jpeg_encoder_get_buffer_offset");
+
+    *(void**)&LINK_jpeg_encoder_set_3D_info =
+        ::dlsym(libmmcamera, "jpeg_encoder_set_3D_info");
+
+/* Disabling until support is available.
+    *(void**)&LINK_jpeg_encoder_setLocation =
+        ::dlsym(libmmcamera, "jpeg_encoder_setLocation");
+*/
+    *(void **)&LINK_cam_conf =
+        ::dlsym(libmmcamera, "cam_conf");
+
+/* Disabling until support is available.
+    *(void **)&LINK_default_sensor_get_snapshot_sizes =
+        ::dlsym(libmmcamera, "default_sensor_get_snapshot_sizes");
+*/
+    *(void **)&LINK_launch_cam_conf_thread =
+        ::dlsym(libmmcamera, "launch_cam_conf_thread");
+
+    *(void **)&LINK_release_cam_conf_thread =
+        ::dlsym(libmmcamera, "release_cam_conf_thread");
+
+    mCamNotify.on_liveshot_event = &receive_liveshot_callback;
+
+    *(void **)&LINK_cancel_liveshot =
+        ::dlsym(libmmcamera, "cancel_liveshot");
+
+    *(void **)&LINK_set_liveshot_params =
+        ::dlsym(libmmcamera, "set_liveshot_params");
+
+    *(void **)&LINK_set_liveshot_frame =
+        ::dlsym(libmmcamera, "set_liveshot_frame");
+
+    *(void **)&LINK_mm_camera_destroy =
+        ::dlsym(libmmcamera, "mm_camera_destroy");
+
+    *(void **)&LINK_yuv_convert_ycrcb420sp_to_yv12_inplace =
+        ::dlsym(libmmcamera, "yuv_convert_ycrcb420sp_to_yv12");
+
+    *(void **)&LINK_yuv_convert_ycrcb420sp_to_yv12 =
+        ::dlsym(libmmcamera, "yuv_convert_ycrcb420sp_to_yv12_ver2");
+
+    /* Disabling until support is available.*/
+    *(void **)&LINK_zoom_crop_upscale =
+        ::dlsym(libmmcamera, "zoom_crop_upscale");
+
+
+#else
+    mCamNotify.preview_frame_cb = &receive_camframe_callback;
+    mCamNotify.camstats_cb = &receive_camstats_callback;
+    mCamNotify.on_event =  &receive_event_callback;
+
+    mmcamera_shutter_callback = receive_shutter_callback;
+     mCamNotify.on_liveshot_event = &receive_liveshot_callback;
+     mCamNotify.video_frame_cb = &receive_camframe_video_callback;
+
+#endif // DLOPEN_LIBMMCAMERA
+#if 0 //commenting this for now as not getting graphics permission
+    if((mCurrentTarget != TARGET_MSM7630) && (mCurrentTarget != TARGET_MSM8660)){
+        fb_fd = open("/dev/graphics/fb0", O_RDWR);
+        if (fb_fd < 0) {
+            ALOGE("startCamera: fb0 open failed: %s!", strerror(errno));
+            return FALSE;
+        }
+    }
+#endif
+    int ret_val;
+    if (pthread_join(mDeviceOpenThread, (void**)&ret_val) != 0) {
+         ALOGE("openCamera thread exit failed");
+         return false;
+    }
+
+    if (!mCameraOpen) {
+        ALOGE("openCamera() failed");
+        return false;
+    }
+
+
+    mCfgControl.mm_camera_query_parms(CAMERA_PARM_PICT_SIZE, (void **)&picture_sizes, &PICTURE_SIZE_COUNT);
+    if ((picture_sizes == NULL) || (!PICTURE_SIZE_COUNT)) {
+        ALOGE("startCamera X: could not get snapshot sizes");
+        return false;
+    }
+     ALOGV("startCamera picture_sizes %p PICTURE_SIZE_COUNT %d", picture_sizes, PICTURE_SIZE_COUNT);
+    mCfgControl.mm_camera_query_parms(CAMERA_PARM_PREVIEW_SIZE, (void **)&preview_sizes, &PREVIEW_SIZE_COUNT);
+    if ((preview_sizes == NULL) || (!PREVIEW_SIZE_COUNT)) {
+        ALOGE("startCamera X: could not get preview sizes");
+        return false;
+    }
+    ALOGV("startCamera preview_sizes %p previewSizeCount %d", preview_sizes, PREVIEW_SIZE_COUNT);
+
+    mCfgControl.mm_camera_query_parms(CAMERA_PARM_HFR_SIZE, (void **)&hfr_sizes, &HFR_SIZE_COUNT);
+    if ((hfr_sizes == NULL) || (!HFR_SIZE_COUNT)) {
+        ALOGE("startCamera X: could not get hfr sizes");
+        return false;
+    }
+    ALOGV("startCamera hfr_sizes %p hfrSizeCount %d", hfr_sizes, HFR_SIZE_COUNT);
+
+
+    ALOGV("startCamera X");
+    return true;
+}
+
+status_t QualcommCameraHardware::dump(int fd,
+                                      const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+#if 0
+    // Dump internal primitives.
+    result.append("QualcommCameraHardware::dump");
+    snprintf(buffer, 255, "mMsgEnabled (%d)\n", mMsgEnabled);
+    result.append(buffer);
+    int width, height;
+    mParameters.getPreviewSize(&width, &height);
+    snprintf(buffer, 255, "preview width(%d) x height (%d)\n", width, height);
+    result.append(buffer);
+    mParameters.getPictureSize(&width, &height);
+    snprintf(buffer, 255, "raw width(%d) x height (%d)\n", width, height);
+    result.append(buffer);
+    snprintf(buffer, 255,
+             "preview frame size(%d), raw size (%d), jpeg size (%d) "
+             "and jpeg max size (%d)\n", mPreviewFrameSize, mRawSize,
+             mJpegSize, mJpegMaxSize);
+    result.append(buffer);
+    write(fd, result.string(), result.size());
+
+    // Dump internal objects.
+    if (mPreviewHeap[0] != 0) {
+        mPreviewHeap[0]->dump(fd, args);
+    }
+    if (mRawHeap != 0) {
+        mRawHeap->dump(fd, args);
+    }
+    if (mJpegHeap != 0) {
+        mJpegHeap->dump(fd, args);
+    }
+    mParameters.dump(fd, args);
+#endif
+    return NO_ERROR;
+}
+
+/* Issue ioctl calls related to starting Camera Operations*/
+bool static native_start_ops(mm_camera_ops_type_t  type, void* value)
+{
+    if(mCamOps.mm_camera_start(type, value,NULL) != MM_CAMERA_SUCCESS) {
+        ALOGE("native_start_ops: type %d error %s",
+            type,strerror(errno));
+        return false;
+    }
+    return true;
+}
+
+/* Issue ioctl calls related to stopping Camera Operations*/
+bool static native_stop_ops(mm_camera_ops_type_t  type, void* value)
+{
+     if(mCamOps.mm_camera_stop(type, value,NULL) != MM_CAMERA_SUCCESS) {
+        ALOGE("native_stop_ops: type %d error %s",
+            type,strerror(errno));
+        return false;
+    }
+    return true;
+}
+/*==========================================================================*/
+
+
+#define GPS_PROCESSING_METHOD_SIZE  101
+#define FOCAL_LENGTH_DECIMAL_PRECISON 100
+
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };
+#define EXIF_ASCII_PREFIX_SIZE (sizeof(ExifAsciiPrefix))
+
+static rat_t latitude[3];
+static rat_t longitude[3];
+static char lonref[2];
+static char latref[2];
+static rat_t altitude;
+static rat_t gpsTimestamp[3];
+static char gpsDatestamp[20];
+static char dateTime[20];
+static rat_t focalLength;
+static uint16_t flashMode;
+static int iso_arr[] = {0,1,100,200,400,800,1600};
+static uint16_t isoMode;
+static char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+static void addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+                        uint32_t count, uint8_t copy, void *data) {
+
+    if(exif_table_numEntries == MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("Number of entries exceeded limit");
+        return;
+    }
+
+    int index = exif_table_numEntries;
+    exif_data[index].tag_id = tagid;
+    exif_data[index].tag_entry.type = type;
+    exif_data[index].tag_entry.count = count;
+    exif_data[index].tag_entry.copy = copy;
+    if((type == EXIF_RATIONAL) && (count > 1))
+        exif_data[index].tag_entry.data._rats = (rat_t *)data;
+    if((type == EXIF_RATIONAL) && (count == 1))
+        exif_data[index].tag_entry.data._rat = *(rat_t *)data;
+    else if(type == EXIF_ASCII)
+        exif_data[index].tag_entry.data._ascii = (char *)data;
+    else if(type == EXIF_BYTE)
+        exif_data[index].tag_entry.data._byte = *(uint8_t *)data;
+    else if((type == EXIF_SHORT) && (count > 1))
+        exif_data[index].tag_entry.data._shorts = (uint16_t *)data;
+    else if((type == EXIF_SHORT) && (count == 1))
+        exif_data[index].tag_entry.data._short = *(uint16_t *)data;
+    // Increase number of entries
+    exif_table_numEntries++;
+}
+
+static void parseLatLong(const char *latlonString, int *pDegrees,
+                           int *pMinutes, int *pSeconds ) {
+
+    double value = atof(latlonString);
+    value = fabs(value);
+    int degrees = (int) value;
+
+    double remainder = value - degrees;
+    int minutes = (int) (remainder * 60);
+    int seconds = (int) (((remainder * 60) - minutes) * 60 * 1000);
+
+    *pDegrees = degrees;
+    *pMinutes = minutes;
+    *pSeconds = seconds;
+}
+
+static void setLatLon(exif_tag_id_t tag, const char *latlonString) {
+
+    int degrees, minutes, seconds;
+
+    parseLatLong(latlonString, &degrees, &minutes, &seconds);
+
+    rat_t value[3] = { {degrees, 1},
+                       {minutes, 1},
+                       {seconds, 1000} };
+
+    if(tag == EXIFTAGID_GPS_LATITUDE) {
+        memcpy(latitude, value, sizeof(latitude));
+        addExifTag(EXIFTAGID_GPS_LATITUDE, EXIF_RATIONAL, 3,
+                    1, (void *)latitude);
+    } else {
+        memcpy(longitude, value, sizeof(longitude));
+        addExifTag(EXIFTAGID_GPS_LONGITUDE, EXIF_RATIONAL, 3,
+                    1, (void *)longitude);
+    }
+}
+
+void QualcommCameraHardware::setGpsParameters() {
+    const char *str = NULL;
+
+    str = mParameters.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+
+    if(str!=NULL ){
+       memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+       strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str,
+           GPS_PROCESSING_METHOD_SIZE - 1);
+       gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE-1] = '\0';
+       addExifTag(EXIFTAGID_GPS_PROCESSINGMETHOD, EXIF_ASCII,
+           EXIF_ASCII_PREFIX_SIZE + strlen(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE) + 1,
+           1, (void *)gpsProcessingMethod);
+    }
+
+    str = NULL;
+
+    //Set Latitude
+    str = mParameters.get(QCameraParameters::KEY_GPS_LATITUDE);
+    if(str != NULL) {
+        setLatLon(EXIFTAGID_GPS_LATITUDE, str);
+        //set Latitude Ref
+        float latitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LATITUDE);
+        latref[0] = 'N';
+        if(latitudeValue < 0 ){
+            latref[0] = 'S';
+        }
+        latref[1] = '\0';
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latref);
+        addExifTag(EXIFTAGID_GPS_LATITUDE_REF, EXIF_ASCII, 2,
+                                1, (void *)latref);
+    }
+
+    //set Longitude
+    str = NULL;
+    str = mParameters.get(QCameraParameters::KEY_GPS_LONGITUDE);
+    if(str != NULL) {
+        setLatLon(EXIFTAGID_GPS_LONGITUDE, str);
+        //set Longitude Ref
+        float longitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LONGITUDE);
+        lonref[0] = 'E';
+        if(longitudeValue < 0){
+            lonref[0] = 'W';
+        }
+        lonref[1] = '\0';
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, lonref);
+        addExifTag(EXIFTAGID_GPS_LONGITUDE_REF, EXIF_ASCII, 2,
+                                1, (void *)lonref);
+    }
+
+    //set Altitude
+    str = NULL;
+    str = mParameters.get(QCameraParameters::KEY_GPS_ALTITUDE);
+    if(str != NULL) {
+        double value = atof(str);
+        int ref = 0;
+        if(value < 0){
+            ref = 1;
+            value = -value;
+        }
+        uint32_t value_meter = value * 1000;
+        rat_t alt_value = {value_meter, 1000};
+        memcpy(&altitude, &alt_value, sizeof(altitude));
+        addExifTag(EXIFTAGID_GPS_ALTITUDE, EXIF_RATIONAL, 1,
+                    1, (void *)&altitude);
+        //set AltitudeRef
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, ref);
+        addExifTag(EXIFTAGID_GPS_ALTITUDE_REF, EXIF_BYTE, 1,
+                    1, (void *)&ref);
+    }
+
+    //set Gps TimeStamp
+    str = NULL;
+    str = mParameters.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+    if(str != NULL) {
+
+      long value = atol(str);
+      time_t unixTime;
+      struct tm *UTCTimestamp;
+
+      unixTime = (time_t)value;
+      UTCTimestamp = gmtime(&unixTime);
+
+      strftime(gpsDatestamp, sizeof(gpsDatestamp), "%Y:%m:%d", UTCTimestamp);
+      addExifTag(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+                          strlen(gpsDatestamp)+1 , 1, (void *)&gpsDatestamp);
+
+      rat_t time_value[3] = { {UTCTimestamp->tm_hour, 1},
+                              {UTCTimestamp->tm_min, 1},
+                              {UTCTimestamp->tm_sec, 1} };
+
+
+      memcpy(&gpsTimestamp, &time_value, sizeof(gpsTimestamp));
+      addExifTag(EXIFTAGID_GPS_TIMESTAMP, EXIF_RATIONAL,
+                  3, 1, (void *)&gpsTimestamp);
+    }
+
+}
+
+
+bool QualcommCameraHardware::initZslParameter(void)
+    {  ALOGV("%s: E", __FUNCTION__);
+       mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+       ALOGV("initZslParamter E: picture size=%dx%d", mPictureWidth, mPictureHeight);
+       if (updatePictureDimension(mParameters, mPictureWidth, mPictureHeight)) {
+         mDimension.picture_width = mPictureWidth;
+         mDimension.picture_height = mPictureHeight;
+       }
+
+       /* use the default thumbnail sizes */
+        mZslParms.picture_width = mPictureWidth;
+        mZslParms.picture_height = mPictureHeight;
+        mZslParms.preview_width =  mDimension.display_width;
+        mZslParms.preview_height = mDimension.display_height;
+        mZslParms.useExternalBuffers = TRUE;
+          /* fill main image size, thumbnail size, postview size into capture_params_t*/
+        memset(&mZslCaptureParms, 0, sizeof(zsl_capture_params_t));
+        mZslCaptureParms.thumbnail_height = mPostviewHeight;
+        mZslCaptureParms.thumbnail_width = mPostviewWidth;
+        ALOGV("Number of snapshot to capture: %d",numCapture);
+        mZslCaptureParms.num_captures = numCapture;
+
+        return true;
+    }
+
+
+bool QualcommCameraHardware::initImageEncodeParameters(int size)
+{
+    ALOGV("%s: E", __FUNCTION__);
+    memset(&mImageEncodeParms, 0, sizeof(encode_params_t));
+    int jpeg_quality = mParameters.getInt("jpeg-quality");
+    bool ret;
+    if (jpeg_quality >= 0) {
+        ALOGV("initJpegParameters, current jpeg main img quality =%d",
+             jpeg_quality);
+        //Application can pass quality of zero
+        //when there is no back sensor connected.
+        //as jpeg quality of zero is not accepted at
+        //camera stack, pass default value.
+        if(jpeg_quality == 0) jpeg_quality = 85;
+        mImageEncodeParms.quality = jpeg_quality;
+        ret = native_set_parms(CAMERA_PARM_JPEG_MAINIMG_QUALITY, sizeof(int), &jpeg_quality);
+        if(!ret){
+          ALOGE("initJpegParametersX: failed to set main image quality");
+          return false;
+        }
+    }
+
+    int thumbnail_quality = mParameters.getInt("jpeg-thumbnail-quality");
+    if (thumbnail_quality >= 0) {
+        //Application can pass quality of zero
+        //when there is no back sensor connected.
+        //as quality of zero is not accepted at
+        //camera stack, pass default value.
+        if(thumbnail_quality == 0) thumbnail_quality = 85;
+        ALOGV("initJpegParameters, current jpeg thumbnail quality =%d",
+             thumbnail_quality);
+        /* TODO: check with mm-camera? */
+        mImageEncodeParms.quality = thumbnail_quality;
+        ret = native_set_parms(CAMERA_PARM_JPEG_THUMB_QUALITY, sizeof(int), &thumbnail_quality);
+        if(!ret){
+          ALOGE("initJpegParameters X: failed to set thumbnail quality");
+          return false;
+        }
+    }
+
+    int rotation = mParameters.getInt("rotation");
+    char mDeviceName[PROPERTY_VALUE_MAX];
+    property_get("ro.hw_plat", mDeviceName, "");
+    if(!strcmp(mDeviceName,"7x25A"))
+        rotation = (rotation + 90)%360;
+
+    if (mIs3DModeOn)
+        rotation = 0;
+    if (rotation >= 0) {
+        ALOGV("initJpegParameters, rotation = %d", rotation);
+        mImageEncodeParms.rotation = rotation;
+    }
+
+    jpeg_set_location();
+
+    //set TimeStamp
+    const char *str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+    if(str != NULL) {
+      strncpy(dateTime, str, 19);
+      dateTime[19] = '\0';
+      addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                  20, 1, (void *)dateTime);
+    }
+
+    int focalLengthValue = (int) (mParameters.getFloat(
+                QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISON);
+    rat_t focalLengthRational = {focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISON};
+    memcpy(&focalLength, &focalLengthRational, sizeof(focalLengthRational));
+    addExifTag(EXIFTAGID_FOCAL_LENGTH, EXIF_RATIONAL, 1,
+                1, (void *)&focalLength);
+    //Adding ExifTag for ISOSpeedRating
+    const char *iso_str = mParameters.get(QCameraParameters::KEY_ISO_MODE);
+    int iso_value = attr_lookup(iso, sizeof(iso) / sizeof(str_map), iso_str);
+    isoMode = iso_arr[iso_value];
+    addExifTag(EXIFTAGID_ISO_SPEED_RATING,EXIF_SHORT,1,1,(void *)&isoMode);
+
+    if (mUseJpegDownScaling) {
+      ALOGV("initImageEncodeParameters: update main image", __func__);
+      mImageEncodeParms.output_picture_width = mActualPictWidth;
+      mImageEncodeParms.output_picture_height = mActualPictHeight;
+    }
+    mImageEncodeParms.cbcr_offset = mCbCrOffsetRaw;
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO)
+        mImageEncodeParms.cbcr_offset = mCbCrOffsetRaw;
+    /* TODO: check this */
+    mImageEncodeParms.y_offset = 0;
+    for(int i = 0; i < size; i++){
+        memset(&mEncodeOutputBuffer[i], 0, sizeof(mm_camera_buffer_t));
+        mEncodeOutputBuffer[i].ptr = (uint8_t *)mJpegMapped[i]->data;
+        mEncodeOutputBuffer[i].filled_size = mJpegMaxSize;
+        mEncodeOutputBuffer[i].size = mJpegMaxSize;
+        mEncodeOutputBuffer[i].fd = mJpegfd[i];
+        mEncodeOutputBuffer[i].offset = 0;
+    }
+    mImageEncodeParms.p_output_buffer = mEncodeOutputBuffer;
+    mImageEncodeParms.exif_data = exif_data;
+    mImageEncodeParms.exif_numEntries = exif_table_numEntries;
+
+    mImageEncodeParms.format3d = mIs3DModeOn;
+    return true;
+}
+
+bool QualcommCameraHardware::native_set_parms(
+    camera_parm_type_t type, uint16_t length, void *value)
+{
+    if(mCfgControl.mm_camera_set_parm(type,value) != MM_CAMERA_SUCCESS) {
+        ALOGE("native_set_parms failed: type %d length %d error %s",
+            type, length, strerror(errno));
+        return false;
+    }
+    return true;
+
+}
+bool QualcommCameraHardware::native_set_parms(
+    camera_parm_type_t type, uint16_t length, void *value, int *result)
+{
+    mm_camera_status_t status;
+    status = mCfgControl.mm_camera_set_parm(type,value);
+    ALOGV("native_set_parms status = %d", status);
+    if( status == MM_CAMERA_SUCCESS || status == MM_CAMERA_ERR_INVALID_OPERATION){
+        *result = status ;
+        return true;
+    }
+    ALOGE("%s: type %d length %d error %s, status %d", __FUNCTION__,
+                                       type, length, strerror(errno), status);
+   *result = status;
+    return false;
+}
+
+void QualcommCameraHardware::jpeg_set_location()
+{
+    bool encode_location = true;
+    camera_position_type pt;
+
+#define PARSE_LOCATION(what,type,fmt,desc) do {                                \
+        pt.what = 0;                                                           \
+        const char *what##_str = mParameters.get("gps-"#what);                 \
+        ALOGV("GPS PARM %s --> [%s]", "gps-"#what, what##_str);                 \
+        if (what##_str) {                                                      \
+            type what = 0;                                                     \
+            if (sscanf(what##_str, fmt, &what) == 1)                           \
+                pt.what = what;                                                \
+            else {                                                             \
+                ALOGE("GPS " #what " %s could not"                              \
+                     " be parsed as a " #desc, what##_str);                    \
+                encode_location = false;                                       \
+            }                                                                  \
+        }                                                                      \
+        else {                                                                 \
+            ALOGV("GPS " #what " not specified: "                               \
+                 "defaulting to zero in EXIF header.");                        \
+            encode_location = false;                                           \
+       }                                                                       \
+    } while(0)
+
+    PARSE_LOCATION(timestamp, long, "%ld", "long");
+    if (!pt.timestamp) pt.timestamp = time(NULL);
+    PARSE_LOCATION(altitude, short, "%hd", "short");
+    PARSE_LOCATION(latitude, double, "%lf", "double float");
+    PARSE_LOCATION(longitude, double, "%lf", "double float");
+
+#undef PARSE_LOCATION
+
+    if (encode_location) {
+        ALOGD("setting image location ALT %d LAT %lf LON %lf",
+             pt.altitude, pt.latitude, pt.longitude);
+
+        setGpsParameters();
+        /* Disabling until support is available.
+        if (!LINK_jpeg_encoder_setLocation(&pt)) {
+            ALOGE("jpeg_set_location: LINK_jpeg_encoder_setLocation failed.");
+        }
+        */
+    }
+    else ALOGV("not setting image location");
+}
+
+static bool register_buf(int size,
+                         int frame_size,
+                         int cbcr_offset,
+                         int yoffset,
+                         int pmempreviewfd,
+                         uint32_t offset,
+                         uint8_t *buf,
+                         int pmem_type,
+                         bool vfe_can_write,
+                         bool register_buffer,
+                         bool use_all_chnls)
+{
+    struct msm_pmem_info pmemBuf;
+    CAMERA_HAL_UNUSED(frame_size);
+
+    memset(&pmemBuf, 0, sizeof(struct msm_pmem_info));
+    pmemBuf.type     = pmem_type;
+    pmemBuf.fd       = pmempreviewfd;
+    pmemBuf.offset   = offset;
+    pmemBuf.len      = size;
+    pmemBuf.vaddr    = buf;
+    pmemBuf.planar0_off = yoffset;
+     if(!use_all_chnls) {
+       ALOGV("use_all_chnls = %d\n", use_all_chnls);
+       pmemBuf.planar1_off = cbcr_offset;
+       pmemBuf.planar2_off = yoffset;
+               } else {
+       pmemBuf.planar1_off = myv12_params.CbOffset;
+       pmemBuf.planar2_off = myv12_params.CrOffset;
+               }
+       ALOGE("register_buf: CbOff = 0x%x CrOff = 0x%x",
+       pmemBuf.planar1_off, pmemBuf.planar2_off);
+
+    pmemBuf.active   = vfe_can_write;
+
+    ALOGV("register_buf:  reg = %d buffer = %p",
+         !register_buffer, buf);
+    if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+        CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+         ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM  error %s",
+               strerror(errno));
+         return false;
+         }
+
+    return true;
+
+}
+
+static bool register_buf(int size,
+                         int frame_size,
+                         int cbcr_offset,
+                         int yoffset,
+                         int pmempreviewfd,
+                         uint32_t offset,
+                         uint8_t *buf,
+                         int pmem_type,
+                         bool vfe_can_write,
+                         bool register_buffer = true,
+                         bool use_all_chnls = false);
+
+void QualcommCameraHardware::runFrameThread(void *data)
+{
+    ALOGV("runFrameThread E");
+    int type;
+    int CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+
+    if(libmmcamera)
+    {
+        LINK_cam_frame(data);
+    }
+    //waiting for preview thread to complete before clearing of the buffers
+    mPreviewThreadWaitLock.lock();
+    while (mPreviewThreadRunning) {
+        ALOGI("runframethread: waiting for preview  thread to complete.");
+        mPreviewThreadWait.wait(mPreviewThreadWaitLock);
+        ALOGI("initPreview: old preview thread completed.");
+    }
+    mPreviewThreadWaitLock.unlock();
+
+    // Cancelling previewBuffers and returning them to display before stopping preview
+    // This will ensure that all preview buffers are available for dequeing when
+    //startPreview is called again with the same ANativeWindow object (snapshot case). If the
+    //ANativeWindow is a new one(camera-camcorder switch case) because the app passed a new
+    //surface then buffers will be re-allocated and not returned from the old pool.
+    relinquishBuffers();
+    mPreviewBusyQueue.flush();
+    /* Flush the Free Q */
+    LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+
+    if(mIs3DModeOn != true) {
+#if 0
+        if(mInHFRThread == false)
+        {
+             mPmemWaitLock.lock();
+             //mPreviewHeap.clear();
+// TODO do properly
+             mPrevHeapDeallocRunning = true;
+             mPmemWait.signal();
+             mPmemWaitLock.unlock();
+
+            if(( mPreviewFormat == CAMERA_YUV_420_YV12 )&&
+                ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627) &&
+                previewWidth%32 != 0 )
+                mYV12Heap.clear();
+
+        }
+        else
+#endif
+        {
+           int mBufferSize = previewWidth * previewHeight * 3/2;
+           int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+           ALOGE("unregistering all preview buffers");
+            //unregister preview buffers. we are not deallocating here.
+            for (int cnt = 0; cnt < mTotalPreviewBufferCount; ++cnt) {
+                register_buf(mBufferSize,
+                         mBufferSize,
+                         mCbCrOffset,
+                         0,
+                         frames[cnt].fd,
+                         0,
+                         (uint8_t *)frames[cnt].buffer,
+                         MSM_PMEM_PREVIEW,
+                         false,
+                         false,
+                         true);
+            //mPreviewHeap[cnt].clear();
+             // TODO : clean properly
+            }
+        }
+    }
+    if(!mZslEnable) {
+    if(( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)){
+        if(mHFRMode != true) {
+#if 0
+            mRecordHeap.clear();
+            mRecordHeap = NULL;
+#endif
+        } else {
+            ALOGI("%s: unregister record buffers with camera driver", __FUNCTION__);
+            register_record_buffers(false);
+        }
+        int CbCrOffset = PAD_TO_2K(mDimension.video_width  * mDimension.video_height);
+	    for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+#if 0
+	if (mRecordfd[cnt] > 0) {
+	    ALOGE("Unregistering buffer %d with kernel",cnt);
+	    register_buf(mRecordFrameSize,
+		mRecordFrameSize, CbCrOffset, 0,
+		mRecordfd[cnt],
+		0,
+		(uint8_t *)recordframes[cnt].buffer,
+		MSM_PMEM_VIDEO,
+		false, false);
+	    ALOGE("Came back from register call to kernel");
+	  }
+#endif
+             type = MSM_PMEM_VIDEO;
+             ALOGE("%s: unregister record buffers[%d] with camera driver", __FUNCTION__, cnt);
+             if(recordframes) {
+               register_buf(mRecordFrameSize,
+                  mRecordFrameSize, CbCrOffset, 0,
+                  recordframes[cnt].fd,
+                  0,
+                  (uint8_t *)recordframes[cnt].buffer,
+                  type,
+                  false,false);
+               if(mRecordMapped[cnt]) {
+                   mRecordMapped[cnt]->release(mRecordMapped[cnt]);
+                   mRecordMapped[cnt] = NULL;
+                   close(mRecordfd[cnt]);
+                   if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+                       struct encoder_media_buffer_type * packet =
+                               (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+                       native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+                       metadata_memory[cnt]->release(metadata_memory[cnt]);
+                       metadata_memory[cnt] = NULL;
+                   }
+#ifdef USE_ION
+                   deallocate_ion_memory(&record_main_ion_fd[cnt], &record_ion_info_fd[cnt]);
+#endif
+               }
+            }
+	    }
+    }
+	}
+
+    mFrameThreadWaitLock.lock();
+    mFrameThreadRunning = false;
+    mFrameThreadWait.signal();
+    mFrameThreadWaitLock.unlock();
+
+    ALOGV("runFrameThread X");
+}
+
+
+void QualcommCameraHardware::runPreviewThread(void *data)
+{
+    static int hfr_count = 0;
+    msm_frame* frame = NULL;
+    status_t retVal = NO_ERROR;
+    CAMERA_HAL_UNUSED(data);
+    android_native_buffer_t *buffer;
+	buffer_handle_t *handle = NULL;
+    int bufferIndex = 0;
+
+    while((frame = mPreviewBusyQueue.get()) != NULL) {
+        if (UNLIKELY(mDebugFps)) {
+            debugShowPreviewFPS();
+        }
+        mCallbackLock.lock();
+        int msgEnabled = mMsgEnabled;
+        camera_data_callback pcb = mDataCallback;
+        void *pdata = mCallbackCookie;
+        camera_data_timestamp_callback rcb = mDataCallbackTimestamp;
+        void *rdata = mCallbackCookie;
+        camera_data_callback mcb = mDataCallback;
+        void *mdata = mCallbackCookie;
+        mCallbackLock.unlock();
+
+        // signal smooth zoom thread , that a new preview frame is available
+        mSmoothzoomThreadWaitLock.lock();
+        if(mSmoothzoomThreadRunning) {
+            mSmoothzoomThreadWait.signal();
+        }
+        mSmoothzoomThreadWaitLock.unlock();
+
+        // Find the offset within the heap of the current buffer.
+        ssize_t offset_addr = 0; // TODO , use proper value
+      //      (ssize_t)frame->buffer - (ssize_t)mPreviewHeap->mHeap->base();
+     //   ssize_t offset = offset_addr / mPreviewHeap->mAlignedBufferSize;
+        common_crop_t *crop = (common_crop_t *) (frame->cropinfo);
+#ifdef DUMP_PREVIEW_FRAMES
+        static int frameCnt = 0;
+        int written;
+                if (frameCnt >= 0 && frameCnt <= 10 ) {
+                    char buf[128];
+                    snprintf(buffer, sizeof(buf), "/data/%d_preview.yuv", frameCnt);
+                    int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                    ALOGV("dumping preview frame %d", frameCnt);
+                    if (file_fd < 0) {
+                        ALOGE("cannot open file\n");
+                    }
+                    else
+                    {
+                        ALOGV("dumping data");
+                        written = write(file_fd, (uint8_t *)frame->buffer,
+                            mPreviewFrameSize );
+                        if(written < 0)
+                          ALOGE("error in data write");
+                    }
+                    close(file_fd);
+              }
+              frameCnt++;
+#endif
+        mInPreviewCallback = true;
+         if (crop->in1_w != 0 && crop->in1_h != 0) {
+             zoomCropInfo.left = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+             zoomCropInfo.top = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+             /* There can be scenarios where the in1_wXin1_h and
+              * out1_wXout1_h are same. In those cases, reset the
+              * x and y to zero instead of negative for proper zooming
+              */
+             if(zoomCropInfo.left < 0) zoomCropInfo.left = 0;
+             if(zoomCropInfo.top < 0) zoomCropInfo.top = 0;
+             zoomCropInfo.right = zoomCropInfo.left + crop->in1_w;
+             zoomCropInfo.bottom = zoomCropInfo.top + crop->in1_h;
+             mPreviewWindow-> set_crop (mPreviewWindow,
+                                       zoomCropInfo.left,
+                                       zoomCropInfo.top,
+                                       zoomCropInfo.right,
+                                       zoomCropInfo.bottom);
+             /* Set mResetOverlayCrop to true, so that when there is
+              * no crop information, setCrop will be called
+              * with zero crop values.
+              */
+             mResetWindowCrop = true;
+
+         } else {
+             // Reset zoomCropInfo variables. This will ensure that
+             // stale values wont be used for postview
+             zoomCropInfo.left = 0;
+             zoomCropInfo.top = 0;
+             zoomCropInfo.right = crop->in1_w;
+             zoomCropInfo.bottom = crop->in1_h;
+             /* This reset is required, if not, overlay driver continues
+              * to use the old crop information for these preview
+              * frames which is not the correct behavior. To avoid
+              * multiple calls, reset once.
+              */
+             if(mResetWindowCrop == true){
+                mPreviewWindow-> set_crop (mPreviewWindow,
+                                      zoomCropInfo.left,
+                                      zoomCropInfo.top,
+                                      zoomCropInfo.right,
+                                      zoomCropInfo.bottom);
+                 mResetWindowCrop = false;
+             }
+         }
+         /* To overcome a timing case where we could be having the overlay refer to deallocated
+            mDisplayHeap(and showing corruption), the mDisplayHeap is not deallocated untill the
+            first preview frame is queued to the overlay in 8660. Also adding the condition
+            to check if snapshot is currently in progress ensures that the resources being
+            used by the snapshot thread are not incorrectly deallocated by preview thread*/
+         if ((mCurrentTarget == TARGET_MSM8660)&&(mFirstFrame == true)) {
+             ALOGD(" receivePreviewFrame : first frame queued, display heap being deallocated");
+              mThumbnailHeap.clear();
+              mDisplayHeap.clear();
+              if(!mZslEnable){
+                 mDisplayHeap.clear();
+                 mPostviewHeap.clear();
+             }
+             mFirstFrame = false;
+         }
+         mLastQueuedFrame = (void *)frame->buffer;
+         bufferIndex = mapBuffer(frame);
+
+         // if 7x27A && yv12 is set as preview format use convert routines to
+         // convert from YUV420sp to YV12
+         yuv_image_type in_buf, out_buf;
+         int conversion_result = 0;
+
+         if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+           ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 )){
+            // if the width is not multiple of 32,
+            //we cannot do inplace conversion as sizes of 420sp and YV12 frames differ
+            if(previewWidth%32){
+#if 0 //TODO :
+               ALOGE("YV12::Doing not inplace conversion from 420sp to yv12");
+               in_buf.imgPtr = (unsigned char*)mPreviewMapped[bufferIndex]->data;
+               in_buf.dx = out_buf.dx = previewWidth;
+               in_buf.dy = in_buf.dy = previewHeight;
+               conversion_result = LINK_yuv_convert_ycrcb420sp_to_yv12(&in_buf, &out_buf);
+#endif
+            } else {
+               ALOGE("Doing inplace conversion from 420sp to yv12");
+               in_buf.imgPtr = (unsigned char *)mPreviewMapped[bufferIndex]->data;
+               in_buf.dx  = previewWidth;
+               in_buf.dy  = previewHeight;
+               conversion_result = LINK_yuv_convert_ycrcb420sp_to_yv12_inplace(&in_buf);
+            }
+         }
+
+         if(bufferIndex >= 0) {
+           //Need to encapsulate this in IMemory object and send
+
+         if (pcb != NULL && (msgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
+             int previewBufSize;
+             /* for CTS : Forcing preview memory buffer lenth to be
+                          'previewWidth * previewHeight * 3/2'. Needed when gralloc allocated extra memory.*/
+             if( mPreviewFormat == CAMERA_YUV_420_NV21 || mPreviewFormat == CAMERA_YUV_420_YV12) {
+               previewBufSize = previewWidth * previewHeight * 3/2;
+               camera_memory_t *previewMem = mGetMemory(frames[bufferIndex].fd, previewBufSize,
+                                                        1, mCallbackCookie);
+               if (!previewMem || !previewMem->data) {
+                 ALOGE("%s: mGetMemory failed.\n", __func__);
+               } else {
+                   pcb(CAMERA_MSG_PREVIEW_FRAME,previewMem,0,NULL,pdata);
+                   previewMem->release(previewMem);
+               }
+             } else
+                 pcb(CAMERA_MSG_PREVIEW_FRAME,(camera_memory_t *) mPreviewMapped[bufferIndex],0,NULL,pdata);
+         }
+
+           // TODO : may have to reutn proper frame as pcb
+           mDisplayLock.lock();
+           if( mPreviewWindow != NULL) {
+                if (BUFFER_LOCKED == frame_buffer[bufferIndex].lockState) {
+                    if (GENLOCK_FAILURE == genlock_unlock_buffer(
+                           (native_handle_t*)(*(frame_buffer[bufferIndex].buffer)))) {
+                       ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                       mDisplayLock.unlock();
+                    } else {
+                       frame_buffer[bufferIndex].lockState = BUFFER_UNLOCKED;
+                    }
+                } else {
+                    ALOGE("%s: buffer to be enqueued is unlocked", __FUNCTION__);
+                    mDisplayLock.unlock();
+                }
+             const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+             if(str != NULL){
+                 int is_hfr_off = 0;
+                 hfr_count++;
+                 if(!strcmp(str, QCameraParameters::VIDEO_HFR_OFF)) {
+                    is_hfr_off = 1;
+                    retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+                                               frame_buffer[bufferIndex].buffer);
+                 } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_2X)) {
+                    hfr_count %= 2;
+                 } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_3X)) {
+                    hfr_count %= 3;
+                 } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_4X)) {
+                    hfr_count %= 4;
+                 }
+                 if(hfr_count == 0)
+                     retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+                                                frame_buffer[bufferIndex].buffer);
+                 else if(!is_hfr_off)
+                     retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                                frame_buffer[bufferIndex].buffer);
+             } else
+                   retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+                                              frame_buffer[bufferIndex].buffer);
+             if( retVal != NO_ERROR)
+               ALOGE("%s: Failed while queueing buffer %d for display."
+                         " Error = %d", __FUNCTION__,
+                         frames[bufferIndex].fd, retVal);
+             int stride;
+             retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+                                                &(handle),&(stride));
+             private_handle_t *bhandle = (private_handle_t *)(*handle);
+             if( retVal != NO_ERROR) {
+               ALOGE("%s: Failed while dequeueing buffer from display."
+                        " Error = %d", __FUNCTION__, retVal);
+             } else {
+               retVal = mPreviewWindow->lock_buffer(mPreviewWindow,handle);
+               //yyan todo use handle to find out buffer
+                 if(retVal != NO_ERROR)
+                   ALOGE("%s: Failed while dequeueing buffer from"
+                      "display. Error = %d", __FUNCTION__, retVal);
+             }
+           }
+           mDisplayLock.unlock();
+         } else
+           ALOGE("Could not find the buffer");
+
+        // If output  is NOT enabled (targets otherthan 7x30 , 8x50 and 8x60 currently..)
+
+        nsecs_t timeStamp = nsecs_t(frame->ts.tv_sec)*1000000000LL + frame->ts.tv_nsec;
+
+        if( (mCurrentTarget != TARGET_MSM7630 ) &&  (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+            int flagwait = 1;
+            if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) && (record_flag)) {
+                if(mStoreMetaDataInFrame){
+                    flagwait = 1;
+                    if(metadata_memory[bufferIndex]!= NULL)
+                        rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, metadata_memory[bufferIndex],0,rdata);
+                    else flagwait = 0;
+                } else {
+                    rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mPreviewMapped[bufferIndex],0, rdata);
+                }
+                if(flagwait){
+                    Mutex::Autolock rLock(&mRecordFrameLock);
+                        if (mReleasedRecordingFrame != true) {
+                            mRecordWait.wait(mRecordFrameLock);
+                        }
+                        mReleasedRecordingFrame = false;
+                }
+            }
+        }
+
+        if ( mCurrentTarget == TARGET_MSM8660 ) {
+            mMetaDataWaitLock.lock();
+            if (mFaceDetectOn == true && mSendMetaData == true) {
+                mSendMetaData = false;
+                fd_roi_t *roi = (fd_roi_t *)(frame->roi_info.info);
+
+                switch (roi->type) {
+                case FD_ROI_TYPE_HEADER:
+                    {
+                        mNumFDRcvd = 0;
+                        memset(mFaceArray, -1, sizeof(mFaceArray));
+                        mFaceArray[0] = 0; //faces_detected * 4;
+
+                        mFacesDetected = roi->d.hdr.num_face_detected;
+                        if(mFacesDetected > MAX_ROI)
+                          mFacesDetected = MAX_ROI;
+                    }
+                    break;
+                case FD_ROI_TYPE_DATA:
+                    {
+                        int idx = roi->d.data.idx;
+                        if (idx < mFacesDetected) {
+                            mFaceArray[idx*4+1]   = roi->d.data.face.face_boundary.x;
+                            mFaceArray[idx*4+2] = roi->d.data.face.face_boundary.y;
+                            mFaceArray[idx*4+3] = roi->d.data.face.face_boundary.x;
+                            mFaceArray[idx*4+4] = roi->d.data.face.face_boundary.y;
+                            mNumFDRcvd++;
+                            if (mNumFDRcvd == mFacesDetected) {
+                                mFaceArray[0] = mFacesDetected * 4;
+                                if(mMetaDataHeap != NULL){
+                                    ALOGV("runPreviewThread mMetaDataHEap is non-NULL");
+                                    memcpy((uint32_t *)mMetaDataHeap->mHeap->base(), (uint32_t *)mFaceArray, sizeof(mFaceArray));
+                                }
+                            }
+                        }
+                    }
+                    break;
+                }
+            }
+            mMetaDataWaitLock.unlock();
+        }
+        bufferIndex = mapFrame(handle);
+        if(bufferIndex >= 0) {
+           LINK_camframe_add_frame(CAM_PREVIEW_FRAME, &frames[bufferIndex]);
+           private_handle_t *bhandle = (private_handle_t *)(*handle);
+           if (GENLOCK_NO_ERROR != genlock_lock_buffer(bhandle, GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+                ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+                frame_buffer[bufferIndex].lockState = BUFFER_UNLOCKED;
+           } else {
+                frame_buffer[bufferIndex].lockState = BUFFER_LOCKED;
+           }
+        } else {
+          ALOGE("Could not find the Frame");
+
+          // Special Case: Stoppreview is issued which causes thumbnail buffer
+          // to be cancelled. Frame thread has still not exited. In preview thread
+          // dequeue returns incorrect buffer id (previously cancelled thumbnail buffer)
+          // This will throw error "Could not find frame". We need to cancel the incorrectly
+          // dequeued buffer here to ensure that all buffers are available for the next
+          // startPreview call.
+
+          mDisplayLock.lock();
+          ALOGV(" error Cancelling preview buffers  ");
+	    retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+		          handle);
+          if(retVal != NO_ERROR)
+              ALOGE("%s:  cancelBuffer failed for buffer", __FUNCTION__);
+          mDisplayLock.unlock();
+        }
+      }
+    mPreviewThreadWaitLock.lock();
+    mPreviewThreadRunning = false;
+    mPreviewThreadWait.signal();
+    mPreviewThreadWaitLock.unlock();
+}
+int QualcommCameraHardware::mapBuffer(struct msm_frame *frame) {
+  int ret = -1;
+  for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+     if (frame_buffer[cnt].frame->buffer == frame->buffer) {
+        ret = cnt;
+        break;
+       }
+    }
+  return ret;
+}
+int QualcommCameraHardware::mapvideoBuffer(struct msm_frame *frame)
+{
+  int ret = -1;
+  for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+     if ((unsigned int)mRecordMapped[cnt]->data == (unsigned int)frame->buffer) {
+       ret = cnt;
+       ALOGE("found match returning %d", ret);
+       break;
+     }
+  }
+  return ret;
+
+}
+int QualcommCameraHardware::mapRawBuffer(struct msm_frame *frame)
+{
+  int ret = -1;
+  for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+     if ((unsigned int)mRawMapped[cnt]->data == (unsigned int)frame->buffer) {
+       ret = cnt;
+       ALOGE("found match returning %d", ret);
+       break;
+     }
+  }
+  return ret;
+}
+int QualcommCameraHardware::mapThumbnailBuffer(struct msm_frame *frame)
+{
+  int ret = -1;
+  for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+     if ((unsigned int)(uint8_t *)mThumbnailMapped[cnt] == (unsigned int)frame->buffer) {
+       ret = cnt;
+       ALOGE("found match returning %d", ret);
+       break;
+     }
+  }
+  if(ret < 0) ALOGE("mapThumbnailBuffer, could not find match");
+  return ret;
+}
+int QualcommCameraHardware::mapJpegBuffer(mm_camera_buffer_t *encode_buffer)
+{
+  int ret = -1;
+  for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+     if ((unsigned int)mJpegMapped[cnt]->data == (unsigned int)encode_buffer->ptr) {
+       ret = cnt;
+       ALOGE("found match returning %d", ret);
+       break;
+     }
+  }
+  return ret;
+}
+int QualcommCameraHardware::mapFrame(buffer_handle_t *buffer) {
+  int ret = -1;
+  for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+     if (frame_buffer[cnt].buffer == buffer) {
+       ret = cnt;
+       break;
+     }
+  }
+  return ret;
+}
+
+void *preview_thread(void *user)
+{
+    ALOGI("preview_thread E");
+    QualcommCameraHardware  *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runPreviewThread(user);
+    }
+    else ALOGE("not starting preview thread: the object went away!");
+    ALOGI("preview_thread X");
+    return NULL;
+}
+
+void *hfr_thread(void *user)
+{
+    ALOGI("hfr_thread E");
+    QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runHFRThread(user);
+    }
+    else ALOGE("not starting hfr thread: the object went away!");
+    ALOGI("hfr_thread X");
+    return NULL;
+}
+
+void QualcommCameraHardware::runHFRThread(void *data)
+{
+    ALOGD("runHFRThread E");
+    mInHFRThread = true;
+    CAMERA_HAL_UNUSED(data);
+    ALOGI("%s: stopping Preview", __FUNCTION__);
+    stopPreviewInternal();
+
+    // Release thumbnail Buffers
+    if( mPreviewWindow != NULL ) {
+        private_handle_t *handle;
+        for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+            if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+                handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+                ALOGV("%s:  Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+                ALOGV("runHfrThread : display lock");
+                mDisplayLock.lock();
+                if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+                    if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+                       ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                       mDisplayLock.unlock();
+                       continue;
+                    } else {
+                       mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+                    }
+                }
+                status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                                              mThumbnailBuffer[cnt]);
+                if(retVal != NO_ERROR)
+                    ALOGE("%s: cancelBuffer failed for postview buffer %d",
+                                                     __FUNCTION__, handle->fd);
+                // unregister , unmap and release as well
+                int mBufferSize = previewWidth * previewHeight * 3/2;
+                int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+                if(mThumbnailMapped[cnt] && (mSnapshotFormat == PICTURE_FORMAT_JPEG)) {
+                    ALOGE("%s:  Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+                    register_buf(mBufferSize,
+                        mBufferSize, mCbCrOffset, 0,
+                        handle->fd,
+                        0,
+                        (uint8_t *)mThumbnailMapped[cnt],
+                        MSM_PMEM_THUMBNAIL,
+                        false, false);
+                    if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+                      ALOGE("StopPreview : Error un-mmapping the thumbnail buffer %d", index);
+                    }
+                    mThumbnailBuffer[cnt] = NULL;
+                    mThumbnailMapped[cnt] = NULL;
+                }
+                ALOGV("runHfrThread : display unlock");
+                mDisplayLock.unlock();
+          }
+       }
+    }
+
+    ALOGV("%s: setting parameters", __FUNCTION__);
+    setParameters(mParameters);
+    ALOGV("%s: starting Preview", __FUNCTION__);
+    if( mPreviewWindow == NULL)
+    {
+        startPreviewInternal();
+    }
+    else {
+        getBuffersAndStartPreview();
+    }
+
+    mHFRMode = false;
+    mInHFRThread = false;
+}
+
+void QualcommCameraHardware::runVideoThread(void *data)
+{
+    ALOGD("runVideoThread E");
+    msm_frame* vframe = NULL;
+    CAMERA_HAL_UNUSED(data);
+
+    while(true) {
+        pthread_mutex_lock(&(g_busy_frame_queue.mut));
+
+        // Exit the thread , in case of stop recording..
+        mVideoThreadWaitLock.lock();
+        if(mVideoThreadExit){
+            ALOGV("Exiting video thread..");
+            mVideoThreadWaitLock.unlock();
+            pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+            break;
+        }
+        mVideoThreadWaitLock.unlock();
+
+        ALOGV("in video_thread : wait for video frame ");
+        // check if any frames are available in busyQ and give callback to
+        // services/video encoder
+        cam_frame_wait_video();
+        ALOGV("video_thread, wait over..");
+
+        // Exit the thread , in case of stop recording..
+        mVideoThreadWaitLock.lock();
+        if(mVideoThreadExit){
+            ALOGV("Exiting video thread..");
+            mVideoThreadWaitLock.unlock();
+            pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+            break;
+        }
+        mVideoThreadWaitLock.unlock();
+
+        // Get the video frame to be encoded
+        vframe = cam_frame_get_video ();
+        pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+        ALOGE("in video_thread : got video frame %x",vframe);
+
+        /*if (UNLIKELY(mDebugFps)) {
+            debugShowVideoFPS();
+        }*/
+
+        if(vframe != NULL) {
+            // Find the offset within the heap of the current buffer.
+            //ALOGV("Got video frame :  buffer %d base %d ", vframe->buffer,
+              //(unsigned long int)mRecordHeap->mHeap->base());
+            //ssize_t offset =
+            //    (ssize_t)vframe->buffer - (ssize_t)mRecordHeap->mHeap->base();
+            //ALOGV("offset = %d , alignsize = %d , new_offset = %d", (int)offset, mRecordHeap->mAlignedBufferSize,
+             // (int)(offset / mRecordHeap->mAlignedBufferSize));
+
+            //offset /= mRecordHeap->mAlignedBufferSize;
+
+            //set the track flag to true for this video buffer
+            //record_buffers_tracking_flag[offset] = true;
+
+            /* Extract the timestamp of this frame */
+            nsecs_t timeStamp = nsecs_t(vframe->ts.tv_sec)*1000000000LL + vframe->ts.tv_nsec;
+
+            // dump frames for test purpose
+#if 0
+            static int frameCnt = 0;
+            if (frameCnt >= 11 && frameCnt <= 13 ) {
+                char buf[128];
+                sprintf(buf,  "/data/%d_v.yuv", frameCnt);
+                int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                ALOGV("dumping video frame %d", frameCnt);
+                if (file_fd < 0) {
+                    ALOGE("cannot open file\n");
+                }
+                else
+                {
+                    write(file_fd, (const void *)vframe->buffer,
+                        vframe->cbcr_off * 3 / 2);
+                }
+                close(file_fd);
+          }
+          frameCnt++;
+#endif
+#if 0
+          if(mIs3DModeOn ) {
+              /* VPE will be taking care of zoom, so no need to
+               * use overlay's setCrop interface for zoom
+               * functionality.
+               */
+              /* get the offset of current video buffer for rendering */
+              ssize_t offset_addr = (ssize_t)vframe->buffer -
+                                      (ssize_t)mRecordHeap->mHeap->base();
+              /* To overcome a timing case where we could be having the overlay refer to deallocated
+                 mDisplayHeap(and showing corruption), the mDisplayHeap is not deallocated untill the
+                 first preview frame is queued to the overlay in 8660 */
+              if ((mCurrentTarget == TARGET_MSM8660)&&(mFirstFrame == true)) {
+                  ALOGD(" receivePreviewFrame : first frame queued, display heap being deallocated");
+                  mThumbnailHeap.clear();
+                  mDisplayHeap.clear();
+                  mFirstFrame = false;
+                  mPostviewHeap.clear();
+              }
+              mLastQueuedFrame = (void *)vframe->buffer;
+          }
+#endif
+            // Enable IF block to give frames to encoder , ELSE block for just simulation
+#if 1
+            ALOGV("in video_thread : got video frame, before if check giving frame to services/encoder");
+            mCallbackLock.lock();
+            int msgEnabled = mMsgEnabled;
+            camera_data_timestamp_callback rcb = mDataCallbackTimestamp;
+            void *rdata = mCallbackCookie;
+            mCallbackLock.unlock();
+
+            /* When 3D mode is ON, the video thread will be ON even in preview
+             * mode. We need to distinguish when recording is started. So, when
+             * 3D mode is ON, check for the recordingState (which will be set
+             * with start recording and reset in stop recording), before
+             * calling rcb.
+             */
+            int index = mapvideoBuffer(vframe);
+            if(!mIs3DModeOn) {
+                record_buffers_tracking_flag[index] = true;
+                if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) ) {
+                    ALOGV("in video_thread : got video frame, giving frame to services/encoder index = %d", index);
+                    if(mStoreMetaDataInFrame){
+                        rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, metadata_memory[index],0,rdata);
+                    } else {
+                        rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mRecordMapped[index],0,rdata);
+                    }
+                }
+            }
+#if 0
+            else {
+                mCallbackLock.lock();
+                msgEnabled = mMsgEnabled;
+                data_callback pcb = mDataCallback;
+                void *pdata = mCallbackCookie;
+                mCallbackLock.unlock();
+                if (pcb != NULL) {
+                    ALOGE("pcb is not null");
+                    static int count = 0;
+                    //if(msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+                    if (!count) {
+                        ALOGE("Giving first frame to app");
+                        pcb(CAMERA_MSG_PREVIEW_FRAME, mRecordHeap->mBuffers[offset],
+                                pdata);
+                        count++;
+                    }
+                }
+                if(mRecordingState == 1) {
+                    if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) ) {
+                        ALOGV("in video_thread 3D mode : got video frame, giving frame to services/encoder");
+                        rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mRecordHeap->mBuffers[offset], rdata);
+                    }
+                } else {
+                    /* When in preview mode, put the video buffer back into
+                     * free Q, for next availability.
+                     */
+                    ALOGV("in video_thread 3D mode : got video frame, putting frame to Free Q");
+                    record_buffers_tracking_flag[offset] = false;
+                    LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+                }
+            }
+#endif
+#else
+            // 720p output2  : simulate release frame here:
+            ALOGE("in video_thread simulation , releasing the video frame");
+            LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+#endif
+
+        } else ALOGE("in video_thread get frame returned null");
+
+
+    } // end of while loop
+
+    mVideoThreadWaitLock.lock();
+    mVideoThreadRunning = false;
+    mVideoThreadWait.signal();
+    mVideoThreadWaitLock.unlock();
+
+    ALOGV("runVideoThread X");
+}
+
+void *video_thread(void *user)
+{
+    ALOGV("video_thread E");
+    CAMERA_HAL_UNUSED(user);
+
+    QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runVideoThread(user);
+    }
+    else ALOGE("not starting video thread: the object went away!");
+    ALOGV("video_thread X");
+    return NULL;
+}
+
+void *frame_thread(void *user)
+{
+    ALOGD("frame_thread E");
+    CAMERA_HAL_UNUSED(user);
+    QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runFrameThread(user);
+    }
+    else ALOGW("not starting frame thread: the object went away!");
+    ALOGD("frame_thread X");
+    return NULL;
+}
+
+static int parse_size(const char *str, int &width, int &height)
+{
+    // Find the width.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If an 'x' or 'X' does not immediately follow, give up.
+    if ( (*end != 'x') && (*end != 'X') )
+        return -1;
+
+    // Find the height, immediately after the 'x'.
+    int h = (int)strtol(end+1, 0, 10);
+
+    width = w;
+    height = h;
+
+    return 0;
+}
+QualcommCameraHardware* hardware;
+
+int QualcommCameraHardware::allocate_ion_memory(int *main_ion_fd, struct ion_allocation_data* alloc,
+     struct ion_fd_data* ion_info_fd, int ion_type, int size, int *memfd)
+{
+    int rc = 0;
+    struct ion_handle_data handle_data;
+
+    *main_ion_fd = open("/dev/ion", O_RDONLY | O_SYNC);
+    if (*main_ion_fd < 0) {
+      ALOGE("Ion dev open failed\n");
+      ALOGE("Error is %s\n", strerror(errno));
+      goto ION_OPEN_FAILED;
+    }
+    alloc->len = size;
+    /* to make it page size aligned */
+    alloc->len = (alloc->len + 4095) & (~4095);
+    alloc->align = 4096;
+    alloc->flags = (0x1 << ion_type | 0x1 << ION_IOMMU_HEAP_ID);
+
+    rc = ioctl(*main_ion_fd, ION_IOC_ALLOC, alloc);
+    if (rc < 0) {
+      ALOGE("ION allocation failed\n");
+      goto ION_ALLOC_FAILED;
+    }
+
+    ion_info_fd->handle = alloc->handle;
+    rc = ioctl(*main_ion_fd, ION_IOC_SHARE, ion_info_fd);
+    if (rc < 0) {
+      ALOGE("ION map failed %s\n", strerror(errno));
+      goto ION_MAP_FAILED;
+    }
+    *memfd = ion_info_fd->fd;
+    return 0;
+
+ION_MAP_FAILED:
+    handle_data.handle = ion_info_fd->handle;
+    ioctl(*main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(*main_ion_fd);
+ION_OPEN_FAILED:
+    return -1;
+}
+int QualcommCameraHardware::deallocate_ion_memory(int *main_ion_fd, struct ion_fd_data* ion_info_fd)
+{
+    struct ion_handle_data handle_data;
+    int rc = 0;
+
+    handle_data.handle = ion_info_fd->handle;
+    ioctl(*main_ion_fd, ION_IOC_FREE, &handle_data);
+    close(*main_ion_fd);
+    return rc;
+}
+
+bool QualcommCameraHardware::initPreview()
+{
+    const char * pmem_region;
+    int CbCrOffset = 0;
+    int ion_heap;
+    mParameters.getPreviewSize(&previewWidth, &previewHeight);
+    const char *recordSize = NULL;
+    recordSize = mParameters.get(QCameraParameters::KEY_VIDEO_SIZE);
+ALOGE("%s Got preview dimension as %d x %d ", __func__, previewWidth, previewHeight);
+    if(!recordSize) {
+         //If application didn't set this parameter string, use the values from
+         //getPreviewSize() as video dimensions.
+         ALOGV("No Record Size requested, use the preview dimensions");
+         videoWidth = previewWidth;
+         videoHeight = previewHeight;
+     } else {
+         //Extract the record witdh and height that application requested.
+         if(!parse_size(recordSize, videoWidth, videoHeight)) {
+             //VFE output1 shouldn't be greater than VFE output2.
+             if( (previewWidth > videoWidth) || (previewHeight > videoHeight)) {
+                 //Set preview sizes as record sizes.
+                 ALOGI("Preview size %dx%d is greater than record size %dx%d,\
+                    resetting preview size to record size",previewWidth,\
+                      previewHeight, videoWidth, videoHeight);
+                 previewWidth = videoWidth;
+                 previewHeight = videoHeight;
+                 mParameters.setPreviewSize(previewWidth, previewHeight);
+             }
+             if( (mCurrentTarget != TARGET_MSM7630)
+                 && (mCurrentTarget != TARGET_QSD8250)
+                  && (mCurrentTarget != TARGET_MSM8660) ) {
+                 //For Single VFE output targets, use record dimensions as preview dimensions.
+                 previewWidth = videoWidth;
+                 previewHeight = videoHeight;
+                 mParameters.setPreviewSize(previewWidth, previewHeight);
+             }
+         } else {
+             ALOGE("initPreview X: failed to parse parameter record-size (%s)", recordSize);
+             return false;
+         }
+     }
+
+     mDimension.display_width = previewWidth;
+     mDimension.display_height= previewHeight;
+     mDimension.ui_thumbnail_width =
+             thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+     mDimension.ui_thumbnail_height =
+             thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+
+    ALOGV("initPreview E: preview size=%dx%d videosize = %d x %d", previewWidth, previewHeight, videoWidth, videoHeight );
+
+    if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+        mDimension.video_width = CEILING16(videoWidth);
+        /* Backup the video dimensions, as video dimensions in mDimension
+         * will be modified when DIS is supported. Need the actual values
+         * to pass ap part of VPE config
+         */
+        videoWidth = mDimension.video_width;
+        mDimension.video_height = videoHeight;
+        ALOGV("initPreview : preview size=%dx%d videosize = %d x %d", previewWidth, previewHeight,
+          mDimension.video_width, mDimension.video_height);
+    }
+
+    // See comments in deinitPreview() for why we have to wait for the frame
+    // thread here, and why we can't use pthread_join().
+    mFrameThreadWaitLock.lock();
+    while (mFrameThreadRunning) {
+        ALOGI("initPreview: waiting for old frame thread to complete.");
+        mFrameThreadWait.wait(mFrameThreadWaitLock);
+        ALOGI("initPreview: old frame thread completed.");
+    }
+    mFrameThreadWaitLock.unlock();
+
+    mInSnapshotModeWaitLock.lock();
+    while (mInSnapshotMode) {
+        ALOGI("initPreview: waiting for snapshot mode to complete.");
+        mInSnapshotModeWait.wait(mInSnapshotModeWaitLock);
+        ALOGI("initPreview: snapshot mode completed.");
+    }
+    mInSnapshotModeWaitLock.unlock();
+
+    pmem_region = "/dev/pmem_adsp";
+    ion_heap = ION_CAMERA_HEAP_ID;
+
+    int cnt = 0;
+
+    memset(&myv12_params, 0, sizeof(yv12_format_parms_t));
+    mPreviewFrameSize = previewWidth * previewHeight * 3/2;
+    ALOGE("Width = %d Height = %d \n", previewWidth, previewHeight);
+    if(mPreviewFormat == CAMERA_YUV_420_YV12) {
+       myv12_params.CbOffset = PAD_TO_WORD(previewWidth * previewHeight);
+       myv12_params.CrOffset = myv12_params.CbOffset + PAD_TO_WORD((previewWidth * previewHeight)/4);
+       mDimension.prev_format = CAMERA_YUV_420_YV12;
+       ALOGE("CbOffset = 0x%x CrOffset = 0x%x \n",myv12_params.CbOffset, myv12_params.CrOffset);
+    } else {
+      CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+      }
+
+    //Pass the yuv formats, display dimensions,
+    //so that vfe will be initialized accordingly.
+    mDimension.display_luma_width = previewWidth;
+    mDimension.display_luma_height = previewHeight;
+    mDimension.display_chroma_width = previewWidth;
+    mDimension.display_chroma_height = previewHeight;
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+        mPreviewFrameSize = PAD_TO_4K(CEILING32(previewWidth) * CEILING32(previewHeight)) +
+                                     2 * (CEILING32(previewWidth/2) * CEILING32(previewHeight/2));
+        CbCrOffset = PAD_TO_4K(CEILING32(previewWidth) * CEILING32(previewHeight));
+        mDimension.prev_format = CAMERA_YUV_420_NV21_ADRENO;
+        mDimension.display_luma_width = CEILING32(previewWidth);
+        mDimension.display_luma_height = CEILING32(previewHeight);
+        mDimension.display_chroma_width = 2 * CEILING32(previewWidth/2);
+        //Chroma Height is not needed as of now. Just sending with other dimensions.
+        mDimension.display_chroma_height = CEILING32(previewHeight/2);
+    }
+    ALOGV("mDimension.prev_format = %d", mDimension.prev_format);
+    ALOGV("mDimension.display_luma_width = %d", mDimension.display_luma_width);
+    ALOGV("mDimension.display_luma_height = %d", mDimension.display_luma_height);
+    ALOGV("mDimension.display_chroma_width = %d", mDimension.display_chroma_width);
+    ALOGV("mDimension.display_chroma_height = %d", mDimension.display_chroma_height);
+
+    dstOffset = 0;
+    //set DIS value to get the updated video width and height to calculate
+    //the required record buffer size
+    if(mVpeEnabled) {
+        bool status = setDIS();
+        if(status) {
+            ALOGE("Failed to set DIS");
+            return false;
+        }
+    }
+
+  //Pass the original video width and height and get the required width
+    //and height for record buffer allocation
+    mDimension.orig_video_width = videoWidth;
+    mDimension.orig_video_height = videoHeight;
+    if(mZslEnable){
+        //Limitation of ZSL  where the thumbnail and display dimensions should be the same
+        mDimension.ui_thumbnail_width = mDimension.display_width;
+        mDimension.ui_thumbnail_height = mDimension.display_height;
+        mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+        if (updatePictureDimension(mParameters, mPictureWidth,
+          mPictureHeight)) {
+          mDimension.picture_width = mPictureWidth;
+          mDimension.picture_height = mPictureHeight;
+        }
+    }
+    // mDimension will be filled with thumbnail_width, thumbnail_height,
+    // orig_picture_dx, and orig_picture_dy after this function call. We need to
+    // keep it for jpeg_encoder_encode.
+    bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+                               sizeof(cam_ctrl_dimension_t), &mDimension);
+#if 0
+    if(mIs3DModeOn != true) {
+      if(mInHFRThread == false)
+      {
+        mPrevHeapDeallocRunning = false;
+#ifdef USE_ION
+        mPreviewHeap = new IonPool(ion_heap,
+                                MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+                                mPreviewFrameSize,
+                                kPreviewBufferCountActual,
+                                mPreviewFrameSize,
+                                CbCrOffset,
+                                0,
+                                "preview");
+#else
+        mPreviewHeap = new PmemPool(pmem_region,
+                                MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+                                mPreviewFrameSize,
+                                kPreviewBufferCountActual,
+                                mPreviewFrameSize,
+                                CbCrOffset,
+                                0,
+                                "preview");
+#endif
+        if (!mPreviewHeap->initialized()) {
+          mPreviewHeap.clear();
+          ALOGE("initPreview X: could not initialize Camera preview heap.");
+          return false;
+        }
+      }
+      else
+      {
+          for (int cnt = 0; cnt < kPreviewBufferCountActual; ++cnt) {
+              bool status;
+              int active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+              status = register_buf(mPreviewFrameSize,
+                       mPreviewFrameSize,
+                       CbCrOffset,
+                       0,
+                       mPreviewHeap->mHeap->getHeapID(),
+                       mPreviewHeap->mAlignedBufferSize * cnt,
+                       (uint8_t *)mPreviewHeap->mHeap->base() + mPreviewHeap->mAlignedBufferSize * cnt,
+                       MSM_PMEM_PREVIEW,
+                       active,
+                       true);
+              if(status == false){
+                  ALOGE("Registring Preview Buffers failed for HFR mode");
+                  return false;
+              }
+          }
+      }
+      // if 7x27A , YV12 format is set as preview format , if width is not 32
+      // bit aligned , we need seperate buffer to hold YV12 data
+    yv12framesize = (previewWidth*previewHeight)
+          + 2* ( CEILING16(previewWidth/2) * (previewHeight/2)) ;
+    if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+        ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 ) &&
+        previewWidth%32 != 0 ){
+        ALOGE("initpreview : creating YV12 heap as previewwidth %d not 32 aligned", previewWidth);
+#ifdef USE_ION
+        mYV12Heap = new IonPool(ion_heap,
+                                MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_PREVIEW,
+                                yv12framesize,
+                                NUM_YV12_FRAMES,
+                                yv12framesize,
+                                CbCrOffset,
+                                0,
+                                "postview");
+#else
+        mYV12Heap = new PmemPool(pmem_region,
+                                MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_PREVIEW,
+                                yv12framesize,
+                                NUM_YV12_FRAMES,
+                                yv12framesize,
+                                CbCrOffset,
+                                0,
+                                "postview");
+#endif
+            if (!mYV12Heap->initialized()) {
+                mYV12Heap.clear();
+                ALOGE("initPreview X: could not initialize YV12 Camera preview heap.");
+                return false;
+            }
+        }
+    }
+#endif
+
+    if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+
+        // Allocate video buffers after allocating preview buffers.
+        bool status = initRecord();
+        if(status != true) {
+            ALOGE("Failed to allocate video bufers");
+            return false;
+        }
+    }
+
+    if (ret) {
+        if(mIs3DModeOn != true) {
+            for (cnt = 0; cnt < kPreviewBufferCount; cnt++) {
+#if 0
+                frames[cnt].fd = mPreviewHeap->mHeap->getHeapID();
+                frames[cnt].buffer =
+                    (uint32_t)mPreviewHeap->mHeap->base() + mPreviewHeap->mAlignedBufferSize * cnt;
+                frames[cnt].y_off = 0;
+                frames[cnt].cbcr_off = CbCrOffset;
+                frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+#endif
+        }
+
+            mPreviewBusyQueue.init();
+            LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+            for(int i=ACTIVE_PREVIEW_BUFFERS ;i <kPreviewBufferCount; i++)
+                LINK_camframe_add_frame(CAM_PREVIEW_FRAME,&frames[i]);
+
+            mPreviewThreadWaitLock.lock();
+            pthread_attr_t pattr;
+            pthread_attr_init(&pattr);
+            pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+
+            mPreviewThreadRunning = !pthread_create(&mPreviewThread,
+                                      &pattr,
+                                      preview_thread,
+                                      (void*)NULL);
+            ret = mPreviewThreadRunning;
+            mPreviewThreadWaitLock.unlock();
+
+            if(ret == false)
+                return ret;
+        }
+
+
+        mFrameThreadWaitLock.lock();
+        pthread_attr_t attr;
+        pthread_attr_init(&attr);
+        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+        camframeParams.cammode = CAMERA_MODE_2D;
+
+        if (mIs3DModeOn) {
+            camframeParams.cammode = CAMERA_MODE_3D;
+        } else {
+            camframeParams.cammode = CAMERA_MODE_2D;
+        }
+        LINK_cam_frame_set_exit_flag(0);
+
+        mFrameThreadRunning = !pthread_create(&mFrameThread,
+                                              &attr,
+                                              frame_thread,
+                                              &camframeParams);
+        ret = mFrameThreadRunning;
+        mFrameThreadWaitLock.unlock();
+        LINK_wait_cam_frame_thread_ready();
+    }
+    mFirstFrame = true;
+
+    ALOGV("initPreview X: %d", ret);
+    return ret;
+}
+
+void QualcommCameraHardware::deinitPreview(void)
+{
+    ALOGI("deinitPreview E");
+
+    mPreviewBusyQueue.deinit();
+
+    // When we call deinitPreview(), we signal to the frame thread that it
+    // needs to exit, but we DO NOT WAIT for it to complete here.  The problem
+    // is that deinitPreview is sometimes called from the frame-thread's
+    // callback, when the refcount on the Camera client reaches zero.  If we
+    // called pthread_join(), we would deadlock.  So, we just call
+    // LINK_camframe_terminate() in deinitPreview(), which makes sure that
+    // after the preview callback returns, the camframe thread will exit.  We
+    // could call pthread_join() in initPreview() to join the last frame
+    // thread.  However, we would also have to call pthread_join() in release
+    // as well, shortly before we destroy the object; this would cause the same
+    // deadlock, since release(), like deinitPreview(), may also be called from
+    // the frame-thread's callback.  This we have to make the frame thread
+    // detached, and use a separate mechanism to wait for it to complete.
+
+    LINK_camframe_terminate();
+    ALOGI("deinitPreview X");
+}
+
+bool QualcommCameraHardware::initRawSnapshot()
+{
+    ALOGV("initRawSnapshot E");
+    const char * pmem_region;
+
+    //get width and height from Dimension Object
+    bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+                               sizeof(cam_ctrl_dimension_t), &mDimension);
+
+
+    if(!ret){
+        ALOGE("initRawSnapshot X: failed to set dimension");
+        return false;
+    }
+    int rawSnapshotSize = mDimension.raw_picture_height *
+                           mDimension.raw_picture_width;
+
+    ALOGV("raw_snapshot_buffer_size = %d, raw_picture_height = %d, "\
+         "raw_picture_width = %d",
+          rawSnapshotSize, mDimension.raw_picture_height,
+          mDimension.raw_picture_width);
+
+    // Create Memory for Raw Snapshot
+    if( createSnapshotMemory(numCapture, numCapture, false, PICTURE_FORMAT_RAW) == false ) // TODO : check if the numbers are correct
+    {
+        ALOGE("ERROR :  initRawSnapshot , createSnapshotMemory failed");
+        return false;
+    }
+
+    mRawCaptureParms.num_captures = 1;
+    mRawCaptureParms.raw_picture_width = mDimension.raw_picture_width;
+    mRawCaptureParms.raw_picture_height = mDimension.raw_picture_height;
+
+    ALOGV("initRawSnapshot X");
+    return true;
+
+}
+bool QualcommCameraHardware::initZslBuffers(bool initJpegHeap){
+    ALOGE("Init ZSL buffers E");
+    const char * pmem_region;
+    int ion_heap = ION_CP_MM_HEAP_ID;
+    int postViewBufferSize;
+
+    mPostviewWidth = mDimension.display_width;
+    mPostviewHeight =  mDimension.display_height;
+
+    //postview buffer initialization
+    postViewBufferSize  = mPostviewWidth * mPostviewHeight * 3 / 2;
+    int CbCrOffsetPostview = PAD_TO_WORD(mPostviewWidth * mPostviewHeight);
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+        postViewBufferSize  = PAD_TO_4K(CEILING32(mPostviewWidth) * CEILING32(mPostviewHeight)) +
+                                  2 * (CEILING32(mPostviewWidth/2) * CEILING32(mPostviewHeight/2));
+        int CbCrOffsetPostview = PAD_TO_4K(CEILING32(mPostviewWidth) * CEILING32(mPostviewHeight));
+    }
+
+    //Snapshot buffer initialization
+    mRawSize = mPictureWidth * mPictureHeight * 3 / 2;
+    mCbCrOffsetRaw = PAD_TO_WORD(mPictureWidth * mPictureHeight);
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+        mRawSize = PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight)) +
+                            2 * (CEILING32(mPictureWidth/2) * CEILING32(mPictureHeight/2));
+        mCbCrOffsetRaw = PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight));
+    }
+
+    //Jpeg buffer initialization
+    if( mCurrentTarget == TARGET_MSM7627 ||
+       (mCurrentTarget == TARGET_MSM7625A ||
+        mCurrentTarget == TARGET_MSM7627A))
+        mJpegMaxSize = CEILING16(mPictureWidth) * CEILING16(mPictureHeight) * 3 / 2;
+    else {
+        mJpegMaxSize = mPictureWidth * mPictureHeight * 3 / 2;
+        if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+            mJpegMaxSize =
+               PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight)) +
+                    2 * (CEILING32(mPictureWidth/2) * CEILING32(mPictureHeight/2));
+        }
+    }
+
+    cam_buf_info_t buf_info;
+    int yOffset = 0;
+    buf_info.resolution.width = mPictureWidth;
+    buf_info.resolution.height = mPictureHeight;
+    if(mPreviewFormat != CAMERA_YUV_420_NV21_ADRENO) {
+        mCfgControl.mm_camera_get_parm(CAMERA_PARM_BUFFER_INFO, (void *)&buf_info);
+        mRawSize = buf_info.size;
+        mJpegMaxSize = mRawSize;
+        mCbCrOffsetRaw = buf_info.cbcr_offset;
+        yOffset = buf_info.yoffset;
+    }
+
+    ALOGV("initZslBuffer: initializing mRawHeap.");
+    if(mCurrentTarget == TARGET_MSM8660) {
+       pmem_region = "/dev/pmem_smipool";
+    } else {
+       pmem_region = "/dev/pmem_adsp";
+    }
+    //Main Raw Image
+    #if 0
+#ifdef USE_ION
+    mRawHeap =
+        new IonPool( ion_heap,
+                     MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                     MSM_PMEM_MAINIMG,
+                     mJpegMaxSize,
+                     MAX_SNAPSHOT_BUFFERS,
+                     mRawSize,
+                     mCbCrOffsetRaw,
+                     yOffset,
+                     "snapshot camera");
+#else
+    mRawHeap =
+        new PmemPool(pmem_region,
+                     MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                     MSM_PMEM_MAINIMG,
+                     mJpegMaxSize,
+                     MAX_SNAPSHOT_BUFFERS,
+                     mRawSize,
+                     mCbCrOffsetRaw,
+                     yOffset,
+                     "snapshot camera");
+#endif
+    if (!mRawHeap->initialized()) {
+       ALOGE("initZslBuffer X failed ");
+       mRawHeap.clear();
+       ALOGE("initRaw X: error initializing mRawHeap");
+       return false;
+    }
+
+
+    // Jpeg
+    if (initJpegHeap) {
+        ALOGV("initZslRaw: initializing mJpegHeap.");
+        mJpegHeap =
+            new AshmemPool(mJpegMaxSize,
+                           (MAX_SNAPSHOT_BUFFERS - 2),  // It is the max number of snapshot supported.
+                           0, // we do not know how big the picture will be
+                           "jpeg");
+
+        if (!mJpegHeap->initialized()) {
+            mJpegHeap.clear();
+            mRawHeap.clear();
+            ALOGE("initZslRaw X failed: error initializing mJpegHeap.");
+            return false;
+        }
+    }
+
+    //PostView
+    pmem_region = "/dev/pmem_adsp";
+    ion_heap = ION_HEAP_ADSP_ID;
+#ifdef USE_ION
+    mPostviewHeap =
+            new IonPool(ion_heap,
+                        MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                        MSM_PMEM_THUMBNAIL,
+                        postViewBufferSize,
+                        MAX_SNAPSHOT_BUFFERS,
+                        postViewBufferSize,
+                        CbCrOffsetPostview,
+                        0,
+                        "thumbnail");
+#else
+    mPostviewHeap =
+            new PmemPool(pmem_region,
+                         MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                         MSM_PMEM_THUMBNAIL,
+                         postViewBufferSize,
+                         MAX_SNAPSHOT_BUFFERS,
+                         postViewBufferSize,
+                         CbCrOffsetPostview,
+                         0,
+                         "thumbnail");
+#endif
+
+    if (!mPostviewHeap->initialized()) {
+        mPostviewHeap.clear();
+        mJpegHeap.clear();
+        mRawHeap.clear();
+        ALOGE("initZslBuffer X failed: error initializing mPostviewHeap.");
+        return false;
+    }
+#endif
+    if( createSnapshotMemory(MAX_SNAPSHOT_BUFFERS, MAX_SNAPSHOT_BUFFERS, initJpegHeap) == false ) // TODO : check if the numbers are correct
+    {
+        ALOGE("ERROR :  initZslraw , createSnapshotMemory failed");
+        return false;
+    }
+    /* frame all the exif and encode information into encode_params_t */
+    initImageEncodeParameters(MAX_SNAPSHOT_BUFFERS);
+
+    ALOGV("initZslRaw X");
+    return true;
+}
+
+bool QualcommCameraHardware::deinitZslBuffers()
+{   ALOGE("deinitZslBuffers E");
+    for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+       if(NULL != mRawMapped[cnt]) {
+         ALOGE("Unregister MAIN_IMG");
+         register_buf(mJpegMaxSize,
+                  mRawSize,mCbCrOffsetRaw,0,
+                  mRawfd[cnt],0,
+                  (uint8_t *)mRawMapped[cnt]->data,
+                  MSM_PMEM_MAINIMG,
+                  0, 0);
+            mRawMapped[cnt]->release(mRawMapped[cnt]);
+            mRawMapped[cnt] = NULL;
+            close(mRawfd[cnt]);
+#ifdef USE_ION
+            deallocate_ion_memory(&raw_main_ion_fd[cnt], &raw_ion_info_fd[cnt]);
+#endif
+        }
+    }
+    for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS) : numCapture); cnt++) {
+        if(mJpegMapped[cnt]) {
+            mJpegMapped[cnt]->release(mJpegMapped[cnt]);
+            mJpegMapped[cnt] = NULL;
+        }
+    }
+    ALOGE("deinitZslBuffers X");
+    return true;
+}
+
+bool QualcommCameraHardware::createSnapshotMemory (int numberOfRawBuffers, int numberOfJpegBuffers,
+                                                   bool initJpegHeap, int snapshotFormat)
+{
+    char * pmem_region;
+    int ret;
+    int ion_heap = ION_CP_MM_HEAP_ID;
+    if(mCurrentTarget == TARGET_MSM8660) {
+       pmem_region = "/dev/pmem_smipool";
+    } else {
+       pmem_region = "/dev/pmem_adsp";
+    }
+    if( snapshotFormat == PICTURE_FORMAT_JPEG) {
+        // Create Raw memory for snapshot
+        for(int cnt = 0; cnt < numberOfRawBuffers; cnt++)
+        {
+        #ifdef USE_ION
+            if (allocate_ion_memory(&raw_main_ion_fd[cnt], &raw_alloc[cnt], &raw_ion_info_fd[cnt],
+                                    ion_heap, mJpegMaxSize, &mRawfd[cnt]) < 0){
+              ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+              return NULL;
+            }
+        #else
+            mRawfd[cnt] = open(pmem_region, O_RDWR|O_SYNC);
+            if (mRawfd[cnt] <= 0) {
+                ALOGE("%s: Open device %s failed!\n",__func__, pmem_region);
+                    return false;
+            }
+        #endif
+            ALOGE("%s  Raw memory index: %d , fd is %d ", __func__, cnt, mRawfd[cnt]);
+            mRawMapped[cnt]=mGetMemory(mRawfd[cnt], mJpegMaxSize,1,mCallbackCookie);
+            if(mRawMapped[cnt] == NULL) {
+                ALOGE("Failed to get camera memory for mRawMapped heap index: %d", cnt);
+                return false;
+            }else{
+               ALOGE("Received following info for raw mapped data:%p,handle:%p, size:%d,release:%p",
+               mRawMapped[cnt]->data ,mRawMapped[cnt]->handle, mRawMapped[cnt]->size, mRawMapped[cnt]->release);
+            }
+            // Register Raw frames
+            ALOGE("Registering buffer %d with fd :%d with kernel",cnt,mRawfd[cnt]);
+            int active = (cnt < ACTIVE_ZSL_BUFFERS);  // TODO check ?
+            register_buf(mJpegMaxSize,
+                mRawSize,
+                mCbCrOffsetRaw,
+                mYOffset,
+                mRawfd[cnt],0,
+                (uint8_t *)mRawMapped[cnt]->data,
+                MSM_PMEM_MAINIMG,
+                active);
+        }
+        // Create Jpeg memory for snapshot
+        if (initJpegHeap)
+        {
+            for(int cnt = 0; cnt < numberOfJpegBuffers; cnt++)
+            {
+                ALOGE("%s  Jpeg memory index: %d , fd is %d ", __func__, cnt, mJpegfd[cnt]);
+                mJpegMapped[cnt]=mGetMemory(-1, mJpegMaxSize,1,mCallbackCookie);
+                if(mJpegMapped[cnt] == NULL) {
+                    ALOGE("Failed to get camera memory for mJpegMapped heap index: %d", cnt);
+                    return false;
+                }else{
+                   ALOGE("Received following info for jpeg mapped data:%p,handle:%p, size:%d,release:%p",
+                   mJpegMapped[cnt]->data ,mJpegMapped[cnt]->handle, mJpegMapped[cnt]->size, mJpegMapped[cnt]->release);
+                }
+            }
+        }
+        // Lock Thumbnail buffers, and register them
+        ALOGE("Locking and registering Thumbnail buffer(s)");
+        for(int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+            // TODO : change , lock all thumbnail buffers
+            if((mPreviewWindow != NULL) && (mThumbnailBuffer[cnt] != NULL)) {
+                ALOGE("createsnapshotbuffers : display lock");
+                mDisplayLock.lock();
+                /* Lock the postview buffer before use */
+                ALOGV(" Locking thumbnail/postview buffer %d", cnt);
+                if( (ret = mPreviewWindow->lock_buffer(mPreviewWindow,
+                                 mThumbnailBuffer[cnt])) != NO_ERROR) {
+                    ALOGE(" Error locking postview buffer. Error = %d ", ret);
+                    ALOGE("createsnapshotbuffers : display unlock error");
+                    mDisplayLock.unlock();
+                    return false;
+                }
+                if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t*)(*mThumbnailBuffer[cnt]),
+                                                           GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+                    ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+                    mDisplayLock.unlock();
+                    return -EINVAL;
+                } else {
+                    mThumbnailLockState[cnt] = BUFFER_LOCKED;
+                }
+                mDisplayLock.unlock();
+                ALOGE("createsnapshotbuffers : display unlock");
+            }
+
+            private_handle_t *thumbnailHandle;
+            int mBufferSize = previewWidth * previewHeight * 3/2;
+            int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+
+            if(mThumbnailBuffer[cnt]) {
+                thumbnailHandle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+                ALOGV("fd thumbnailhandle fd %d size %d", thumbnailHandle->fd, thumbnailHandle->size);
+                mThumbnailMapped [cnt]= (unsigned int) mmap(0, thumbnailHandle->size, PROT_READ|PROT_WRITE,
+                MAP_SHARED, thumbnailHandle->fd, 0);
+                if((void *)mThumbnailMapped[cnt] == MAP_FAILED){
+                    ALOGE(" Couldnt map Thumbnail buffer %d", errno);
+                    return false;
+                }
+                register_buf(mBufferSize,
+                    mBufferSize, mCbCrOffset, 0,
+                    thumbnailHandle->fd,
+                    0,
+                    (uint8_t *)mThumbnailMapped[cnt],
+                    MSM_PMEM_THUMBNAIL,
+                    (cnt < ACTIVE_ZSL_BUFFERS));
+            }
+        } // for loop locking and registering thumbnail buffers
+    }  else { // End if Format is Jpeg , start if format is RAW
+         if(numberOfRawBuffers ==1) {
+             int rawSnapshotSize = mDimension.raw_picture_height * mDimension.raw_picture_width;
+#ifdef USE_ION
+            if (allocate_ion_memory(&raw_snapshot_main_ion_fd, &raw_snapshot_alloc, &raw_snapshot_ion_info_fd,
+                                    ion_heap, rawSnapshotSize, &mRawSnapshotfd) < 0){
+              ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+              return false;
+            }
+#else
+            mRawSnapshotfd = open(pmem_region, O_RDWR|O_SYNC);
+            if (mRawSnapshotfd <= 0) {
+                ALOGE("%s: Open device %s failed for rawnspashot!\n",__func__, pmem_region);
+                return false;
+            }
+#endif
+            ALOGE("%s  Raw snapshot memory , fd is %d ", __func__, mRawSnapshotfd);
+            mRawSnapshotMapped=mGetMemory(mRawSnapshotfd,
+                                          rawSnapshotSize,
+                                          1,
+                                          mCallbackCookie);
+            if(mRawSnapshotMapped == NULL) {
+                ALOGE("Failed to get camera memory for mRawSnapshotMapped ");
+                return false;
+            }else{
+               ALOGE("Received following info for raw mapped data:%p,handle:%p, size:%d,release:%p",
+               mRawSnapshotMapped->data ,mRawSnapshotMapped->handle, mRawSnapshotMapped->size, mRawSnapshotMapped->release);
+            }
+                        // Register Raw frames
+            ALOGE("Registering RawSnapshot buffer with fd :%d with kernel",mRawSnapshotfd);
+            int active = 1;  // TODO check ?
+            register_buf(     rawSnapshotSize,
+                              rawSnapshotSize,
+                                            0,
+                                            0,
+                              mRawSnapshotfd,
+                                            0,
+                              (uint8_t *)mRawSnapshotMapped->data,
+                              MSM_PMEM_RAW_MAINIMG,
+                                        active);
+         } else {
+             ALOGE("Multiple raw snapshot capture not supported for now....");
+             return false;
+         }
+    } // end else , if RAW format
+    return true;
+}
+bool QualcommCameraHardware::initRaw(bool initJpegHeap)
+{
+    const char * pmem_region;
+    int ion_heap;
+    int postViewBufferSize;
+    uint32_t pictureAspectRatio;
+    uint32_t i;
+    mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+    mActualPictWidth = mPictureWidth;
+    mActualPictHeight = mPictureHeight;
+    if (updatePictureDimension(mParameters, mPictureWidth, mPictureHeight)) {
+        mDimension.picture_width = mPictureWidth;
+        mDimension.picture_height = mPictureHeight;
+    }
+    ALOGV("initRaw E: picture size=%dx%d", mPictureWidth, mPictureHeight);
+    int w_scale_factor = (mIs3DModeOn && mSnapshot3DFormat == SIDE_BY_SIDE_FULL) ? 2 : 1;
+
+    /* use the default thumbnail sizes */
+    mThumbnailHeight = thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+    mThumbnailWidth = (mThumbnailHeight * mPictureWidth)/ mPictureHeight;
+    /* see if we can get better thumbnail sizes (not mandatory?) */
+    pictureAspectRatio = (uint32_t)((mPictureWidth * Q12) / mPictureHeight);
+    for(i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ){
+        if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio)
+        {
+            mThumbnailWidth = thumbnail_sizes[i].width;
+            mThumbnailHeight = thumbnail_sizes[i].height;
+            break;
+        }
+    }
+    /* calculate thumbnail aspect ratio */
+    if(mCurrentTarget == TARGET_MSM7627 ) {
+        int thumbnail_aspect_ratio =
+        (uint32_t)((mThumbnailWidth * Q12) / mThumbnailHeight);
+
+       if (thumbnail_aspect_ratio < pictureAspectRatio) {
+
+          /* if thumbnail is narrower than main image, in other words wide mode
+           * snapshot then we want to adjust the height of the thumbnail to match
+           * the main image aspect ratio. */
+           mThumbnailHeight =
+           (mThumbnailWidth * Q12) / pictureAspectRatio;
+       } else if (thumbnail_aspect_ratio != pictureAspectRatio) {
+
+          /* if thumbnail is wider than main image we want to adjust width of the
+           * thumbnail to match main image aspect ratio */
+           mThumbnailWidth  =
+           (mThumbnailHeight * pictureAspectRatio) / Q12;
+       }
+       /* make the dimensions multiple of 16 - JPEG requirement */
+       mThumbnailWidth = FLOOR16(mThumbnailWidth);
+       mThumbnailHeight = FLOOR16(mThumbnailHeight);
+       ALOGV("the thumbnail sizes are %dx%d",mThumbnailWidth,mThumbnailHeight);
+    }
+
+    /* calculate postView size */
+    mPostviewWidth = mThumbnailWidth;
+    mPostviewHeight = mThumbnailHeight;
+    /* Try to keep the postview dimensions near to preview for better
+     * performance and userexperience. If the postview and preview dimensions
+     * are same, then we can try to use the same overlay of preview for
+     * postview also. If not, we need to reset the overlay for postview.
+     * we will be getting the same dimensions for preview and postview
+     * in most of the cases. The only exception is for applications
+     * which won't use optimalPreviewSize based on picture size.
+    */
+    if((mPictureHeight >= previewHeight) &&
+       (mCurrentTarget != TARGET_MSM7627) && !mIs3DModeOn) {
+        mPostviewHeight = previewHeight;
+        mPostviewWidth = (previewHeight * mPictureWidth) / mPictureHeight;
+    }else if(mActualPictHeight < mThumbnailHeight){
+        mPostviewHeight = THUMBNAIL_SMALL_HEIGHT;
+        mPostviewWidth = (THUMBNAIL_SMALL_HEIGHT * mActualPictWidth)/ mActualPictHeight;
+        mThumbnailWidth = mPostviewWidth;
+        mThumbnailHeight = mPostviewHeight;
+    }
+
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+        mDimension.main_img_format = CAMERA_YUV_420_NV21_ADRENO;
+        mDimension.thumb_format = CAMERA_YUV_420_NV21_ADRENO;
+    }
+
+    mDimension.ui_thumbnail_width = mPostviewWidth;
+    mDimension.ui_thumbnail_height = mPostviewHeight;
+
+    // mDimension will be filled with thumbnail_width, thumbnail_height,
+    // orig_picture_dx, and orig_picture_dy after this function call. We need to
+    // keep it for jpeg_encoder_encode.
+    bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+                               sizeof(cam_ctrl_dimension_t), &mDimension);
+
+    if(!ret) {
+        ALOGE("initRaw X: failed to set dimension");
+        return false;
+    }
+#if 0
+    if (mJpegHeap != NULL) {
+        ALOGV("initRaw: clearing old mJpegHeap.");
+        mJpegHeap.clear();
+    }
+#endif
+    //postview buffer initialization
+    postViewBufferSize  = mPostviewWidth * w_scale_factor * mPostviewHeight * 3 / 2;
+    int CbCrOffsetPostview = PAD_TO_WORD(mPostviewWidth * w_scale_factor * mPostviewHeight);
+
+    //Snapshot buffer initialization
+    mRawSize = mPictureWidth * w_scale_factor * mPictureHeight * 3 / 2;
+    mCbCrOffsetRaw = PAD_TO_WORD(mPictureWidth * w_scale_factor * mPictureHeight);
+    if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+        mRawSize = PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight)) +
+                            2 * (CEILING32(mPictureWidth * w_scale_factor/2) * CEILING32(mPictureHeight/2));
+        mCbCrOffsetRaw = PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight));
+    }
+
+    //Jpeg buffer initialization
+    if( mCurrentTarget == TARGET_MSM7627 ||
+       (mCurrentTarget == TARGET_MSM7625A ||
+        mCurrentTarget == TARGET_MSM7627A))
+        mJpegMaxSize = CEILING16(mPictureWidth * w_scale_factor) * CEILING16(mPictureHeight) * 3 / 2;
+    else {
+        mJpegMaxSize = mPictureWidth * w_scale_factor * mPictureHeight * 3 / 2;
+        if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+            mJpegMaxSize =
+               PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight)) +
+                    2 * (CEILING32(mPictureWidth * w_scale_factor/2) * CEILING32(mPictureHeight/2));
+        }
+    }
+
+    int rotation = mParameters.getInt("rotation");
+    char mDeviceName[PROPERTY_VALUE_MAX];
+    property_get("ro.hw_plat", mDeviceName, "");
+    if(!strcmp(mDeviceName,"7x25A"))
+        rotation = (rotation + 90)%360;
+
+    if (mIs3DModeOn)
+        rotation = 0;
+    ret = native_set_parms(CAMERA_PARM_JPEG_ROTATION, sizeof(int), &rotation);
+    if(!ret){
+        ALOGE("setting camera id failed");
+        return false;
+    }
+    cam_buf_info_t buf_info;
+    if(mIs3DModeOn == false)
+    {
+        buf_info.resolution.width = mPictureWidth * w_scale_factor;
+        buf_info.resolution.height = mPictureHeight;
+        mCfgControl.mm_camera_get_parm(CAMERA_PARM_BUFFER_INFO, (void *)&buf_info);
+        mRawSize = buf_info.size;
+        mJpegMaxSize = mRawSize;
+        mCbCrOffsetRaw = buf_info.cbcr_offset;
+        mYOffset = buf_info.yoffset;
+    }
+    int mBufferSize;
+    int CbCrOffset;
+    if(mCurrentTarget != TARGET_MSM7627 && mCurrentTarget != TARGET_MSM7627A){
+        mParameters.getPreviewSize(&previewWidth, &previewHeight);
+        mBufferSize = previewWidth * previewHeight * 3/2;
+        CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+    }
+    else {
+        mBufferSize = mPostviewWidth * mPostviewHeight * 3/2;
+        CbCrOffset = PAD_TO_WORD(mPostviewWidth * mPostviewHeight);
+    }
+
+    ALOGV("initRaw: initializing mRawHeap.");
+
+    //PostView
+    pmem_region = "/dev/pmem_adsp";
+    ion_heap = ION_CAMERA_HEAP_ID;
+    // Create memory for Raw YUV frames and Jpeg images
+    if( createSnapshotMemory(numCapture, numCapture, initJpegHeap) == false )
+    {
+        ALOGE("ERROR :  initraw , createSnapshotMemory failed");
+        return false;
+    }
+    /* frame all the exif and encode information into encode_params_t */
+
+    initImageEncodeParameters(numCapture);
+    /* fill main image size, thumbnail size, postview size into capture_params_t*/
+    memset(&mImageCaptureParms, 0, sizeof(capture_params_t));
+    mImageCaptureParms.num_captures = numCapture;
+    mImageCaptureParms.picture_width = mPictureWidth;
+    mImageCaptureParms.picture_height = mPictureHeight;
+    mImageCaptureParms.postview_width = mPostviewWidth;
+    mImageCaptureParms.postview_height = mPostviewHeight;
+
+    int width = mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+    if((width != 0) && (height != 0)) {
+        mImageCaptureParms.thumbnail_width = mThumbnailWidth;
+        mImageCaptureParms.thumbnail_height = mThumbnailHeight;
+    } else {
+        mImageCaptureParms.thumbnail_width = 0;
+        mImageCaptureParms.thumbnail_height = 0;
+    }
+
+    ALOGI("%s: picture size=%dx%d",__FUNCTION__,
+        mImageCaptureParms.picture_width, mImageCaptureParms.picture_height);
+    ALOGI("%s: postview size=%dx%d",__FUNCTION__,
+        mImageCaptureParms.postview_width, mImageCaptureParms.postview_height);
+    ALOGI("%s: thumbnail size=%dx%d",__FUNCTION__,
+        mImageCaptureParms.thumbnail_width, mImageCaptureParms.thumbnail_height);
+
+    ALOGV("initRaw X");
+    return true;
+}
+
+
+void QualcommCameraHardware::deinitRawSnapshot()
+{
+    ALOGV("deinitRawSnapshot E");
+
+    int rawSnapshotSize = mDimension.raw_picture_height * mDimension.raw_picture_width;
+     // Unregister and de allocated memory for Raw Snapshot
+    if(mRawSnapshotMapped) {
+        register_buf(         rawSnapshotSize,
+                              rawSnapshotSize,
+                                            0,
+                                            0,
+                               mRawSnapshotfd,
+                                            0,
+          (uint8_t *)mRawSnapshotMapped->data,
+                         MSM_PMEM_RAW_MAINIMG,
+                                        false,
+                                        false);
+        mRawSnapshotMapped->release(mRawSnapshotMapped);
+        mRawSnapshotMapped = NULL;
+        close(mRawSnapshotfd);
+#ifdef USE_ION
+        deallocate_ion_memory(&raw_snapshot_main_ion_fd, &raw_snapshot_ion_info_fd);
+#endif
+    }
+    ALOGV("deinitRawSnapshot X");
+}
+
+void QualcommCameraHardware::deinitRaw()
+{
+    ALOGV("deinitRaw E");
+    ALOGV("deinitRaw , clearing raw memory and jpeg memory");
+    for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+       if(NULL != mRawMapped[cnt]) {
+         ALOGE("Unregister MAIN_IMG");
+         register_buf(mJpegMaxSize,
+                  mRawSize,mCbCrOffsetRaw,0,
+                  mRawfd[cnt],0,
+                  (uint8_t *)mRawMapped[cnt]->data,
+                  MSM_PMEM_MAINIMG,
+                  0, 0);
+            mRawMapped[cnt]->release(mRawMapped[cnt]);
+            mRawMapped[cnt] = NULL;
+            close(mRawfd[cnt]);
+#ifdef USE_ION
+            deallocate_ion_memory(&raw_main_ion_fd[cnt], &raw_ion_info_fd[cnt]);
+#endif
+        }
+    }
+    for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS) : numCapture); cnt++) {
+        if(NULL != mJpegMapped[cnt]) {
+            mJpegMapped[cnt]->release(mJpegMapped[cnt]);
+            mJpegMapped[cnt] = NULL;
+        }
+    }
+    if( mPreviewWindow != NULL ) {
+        ALOGE("deinitRaw , clearing/cancelling thumbnail buffers:");
+        private_handle_t *handle;
+        for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+            if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+                handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+                ALOGE("%s:  Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+                ALOGE("deinitraw : display lock");
+                mDisplayLock.lock();
+                if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+                    if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+                       ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                    } else {
+                       mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+                    }
+                }
+                status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                                              mThumbnailBuffer[cnt]);
+                if(retVal != NO_ERROR)
+                    ALOGE("%s: cancelBuffer failed for postview buffer %d",
+                                                     __FUNCTION__, handle->fd);
+                   if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+                       struct encoder_media_buffer_type * packet =
+                               (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+                       native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+                       metadata_memory[cnt]->release(metadata_memory[cnt]);
+                       metadata_memory[cnt] = NULL;
+                   }
+                // unregister , unmap and release as well
+
+                int mBufferSize = previewWidth * previewHeight * 3/2;
+                int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+                if(mThumbnailMapped[cnt]) {
+                    ALOGE("%s:  Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+                    register_buf(mBufferSize,
+                        mBufferSize, mCbCrOffset, 0,
+                        handle->fd,
+                        0,
+                        (uint8_t *)mThumbnailMapped[cnt],
+                        MSM_PMEM_THUMBNAIL,
+                        false, false);
+                     if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+                       ALOGE("deinitraw : Error un-mmapping the thumbnail buffer %d", index);
+                     }
+                     mThumbnailBuffer[cnt] = NULL;
+                     mThumbnailMapped[cnt] = NULL;
+                }
+                ALOGE("deinitraw : display unlock");
+                mDisplayLock.unlock();
+            }
+        }
+    }
+    ALOGV("deinitRaw X");
+}
+
+void QualcommCameraHardware::relinquishBuffers()
+{
+    status_t retVal;
+    ALOGV("%s: E ", __FUNCTION__);
+    mDisplayLock.lock();
+    if( mPreviewWindow != NULL) {
+      for(int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+         if (BUFFER_LOCKED == frame_buffer[cnt].lockState) {
+            ALOGE(" Cancelling preview buffers %d ",frames[cnt].fd);
+            if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+                                              (*(frame_buffer[cnt].buffer)))) {
+                ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+            } else {
+                frame_buffer[cnt].lockState = BUFFER_UNLOCKED;
+            }
+         }
+         retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+	                         frame_buffer[cnt].buffer);
+         mPreviewMapped[cnt]->release(mPreviewMapped[cnt]);
+         if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+             struct encoder_media_buffer_type * packet =
+                  (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+             native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+             metadata_memory[cnt]->release(metadata_memory[cnt]);
+             metadata_memory[cnt] = NULL;
+         }
+         ALOGE("release preview buffers");
+         if(retVal != NO_ERROR)
+           ALOGE("%s: cancelBuffer failed for preview buffer %d ",
+             __FUNCTION__, frames[cnt].fd);
+      }
+    } else {
+      ALOGV(" PreviewWindow is null, will not cancelBuffers ");
+    }
+    mDisplayLock.unlock();
+    ALOGV("%s: X ", __FUNCTION__);
+}
+status_t QualcommCameraHardware::set_PreviewWindow(void* param)
+{
+  ALOGE(": set_preview_window");
+  preview_stream_ops_t* window = (preview_stream_ops_t*)param;
+  return setPreviewWindow(window);
+}
+
+status_t QualcommCameraHardware::setPreviewWindow(preview_stream_ops_t* window)
+{
+    status_t retVal = NO_ERROR;
+    ALOGV(" %s: E ", __FUNCTION__);
+    if( window == NULL) {
+        ALOGW(" Setting NULL preview window ");
+        /* Current preview window will be invalidated.
+         * Release all the buffers back */
+        //@TODO: We may need to this to avoid leak
+       /*if(mPreviewWindow!=NULL)
+         relinquishBuffers();*/
+    }
+    ALOGE("Set preview window:: ");
+    mDisplayLock.lock();
+    mPreviewWindow = window;
+    mDisplayLock.unlock();
+
+    if( (mPreviewWindow != NULL) && mCameraRunning) {
+        /* Initial preview in progress. Stop it and start
+         * the actual preview */
+         stopInitialPreview();
+         retVal = getBuffersAndStartPreview();
+    }
+    ALOGV(" %s : X ", __FUNCTION__ );
+    return retVal;
+}
+
+status_t QualcommCameraHardware::getBuffersAndStartPreview() {
+    status_t retVal = NO_ERROR;
+	int stride;
+    bool all_chnls = false;
+    ALOGI(" %s : E ", __FUNCTION__);
+    mFrameThreadWaitLock.lock();
+    while (mFrameThreadRunning) {
+        ALOGV("%s: waiting for old frame thread to complete.", __FUNCTION__);
+        mFrameThreadWait.wait(mFrameThreadWaitLock);
+        ALOGV("%s: old frame thread completed.",__FUNCTION__);
+    }
+    mFrameThreadWaitLock.unlock();
+
+    if( mPreviewWindow!= NULL) {
+        ALOGV("%s: Calling native_window_set_buffer", __FUNCTION__);
+
+        android_native_buffer_t *mPreviewBuffer;
+        int32_t previewFormat;
+        const char *str = mParameters.getPreviewFormat();
+        int numMinUndequeuedBufs = 0;
+
+        int err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,
+	    &numMinUndequeuedBufs);
+
+        if (err != 0) {
+            ALOGW("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+                    strerror(-err), -err);
+            return err;
+        }
+        mTotalPreviewBufferCount = kPreviewBufferCount + numMinUndequeuedBufs;
+
+        previewFormat = attr_lookup(app_preview_formats,
+        sizeof(app_preview_formats) / sizeof(str_map), str);
+        if (previewFormat ==  NOT_FOUND) {
+          previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        }
+
+	    retVal = mPreviewWindow->set_buffer_count(mPreviewWindow,
+	                     mTotalPreviewBufferCount +
+                                (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture) ); //1);
+
+        if(retVal != NO_ERROR) {
+            ALOGE("%s: Error while setting buffer count to %d ", __FUNCTION__, kPreviewBufferCount + 1);
+            return retVal;
+        }
+        mParameters.getPreviewSize(&previewWidth, &previewHeight);
+
+        retVal = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+	              previewWidth, previewHeight, previewFormat);
+
+        if(retVal != NO_ERROR) {
+            ALOGE("%s: Error while setting buffer geometry ", __FUNCTION__);
+            return retVal;
+        }
+
+#ifdef USE_ION
+        mPreviewWindow->set_usage (mPreviewWindow,
+            GRALLOC_USAGE_PRIVATE_CAMERA_HEAP |
+            GRALLOC_USAGE_PRIVATE_UNCACHED);
+#else
+        mPreviewWindow->set_usage (mPreviewWindow,
+            GRALLOC_USAGE_PRIVATE_ADSP_HEAP |
+            GRALLOC_USAGE_PRIVATE_UNCACHED);
+#endif
+        int CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+        int cnt = 0, active = 1;
+        int mBufferSize = previewWidth * previewHeight * 3/2;
+        for (cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+	            //const native_handle *nh = (native_handle *)malloc (sizeof(native_handle));
+	            buffer_handle_t *bhandle =NULL;// &nh; ;
+	            //buffer_handle_t *bh_handle=&handle;
+	            retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+	                                            &(bhandle),
+	                                            &(stride));
+
+	        if((retVal == NO_ERROR)) {
+                /* Acquire lock on the buffer if it was successfully
+                 * dequeued from gralloc */
+                ALOGV(" Locking buffer %d ", cnt);
+                retVal = mPreviewWindow->lock_buffer(mPreviewWindow,
+                                            bhandle);
+                // lock the buffer using genlock
+                if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*bhandle),
+                                                      GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+                    ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+                    return -EINVAL;
+                }
+                ALOGE(" Locked buffer %d successfully", cnt);
+	//yyan todo use handle to find out mPreviewBuffer
+
+            } else {
+                ALOGE("%s: dequeueBuffer failed for preview buffer. Error = %d",
+                      __FUNCTION__, retVal);
+                return retVal;
+            }
+			if(retVal == NO_ERROR) {
+                private_handle_t *handle = (private_handle_t *)(*bhandle);//(private_handle_t *)mPreviewBuffer->handle;
+                ALOGE("Handle %p, Fd passed:%d, Base:%p, Size %p",
+                handle,handle->fd,handle->base,handle->size);
+
+                if(handle) {
+
+                  //thumbnailHandle = (private_handle_t *)mThumbnailBuffer->handle;
+                  ALOGV("fd mmap fd %d size %d", handle->fd, handle->size/*thumbnailHandle->size*/);
+                  mPreviewMapped[cnt]= mGetMemory(handle->fd,handle->size,1,mCallbackCookie);
+
+                  if((void *)mPreviewMapped[cnt] == NULL){
+                      ALOGE(" Failed to get camera memory for  Preview buffer %d ",cnt);
+                  }else{
+                      ALOGE(" Mapped Preview buffer %d", cnt);
+                  }
+                  ALOGE("Got the following from get_mem data: %p, handle :%d, release : %p, size: %d",
+                       mPreviewMapped[cnt]->data,
+                       mPreviewMapped[cnt]->handle,
+                       mPreviewMapped[cnt]->release,
+                       mPreviewMapped[cnt]->size);
+                  ALOGE(" getbuffersandrestartpreview deQ %d", handle->fd);
+                  frames[cnt].fd = handle->fd;
+                  frames[cnt].buffer = (unsigned int)mPreviewMapped[cnt]->data;//(unsigned int)mPreviewHeap[cnt]->mHeap->base();
+                  if(((void *)frames[cnt].buffer == MAP_FAILED)
+                     || (frames[cnt].buffer == 0)) {
+                      ALOGE("%s: Couldnt map preview buffers", __FUNCTION__);
+                      return UNKNOWN_ERROR;
+                  }
+
+                  if(mPreviewFormat == CAMERA_YUV_420_YV12 && mCurrentTarget != TARGET_MSM7627A) {
+                    myv12_params.CbOffset = PAD_TO_WORD(previewWidth * previewHeight);
+                    myv12_params.CrOffset = myv12_params.CbOffset + PAD_TO_WORD((previewWidth * previewHeight)/4);
+                    ALOGE("CbOffset = 0x%x CrOffset = 0x%x \n",myv12_params.CbOffset, myv12_params.CrOffset);
+                    frames[cnt].planar0_off = 0;
+                    frames[cnt].planar1_off = myv12_params.CbOffset;
+                    frames[cnt].planar2_off = myv12_params.CrOffset;
+                    frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+                    all_chnls = true;
+                  }else{
+                    frames[cnt].planar0_off = 0;
+                    frames[cnt].planar1_off= CbCrOffset;
+                    frames[cnt].planar2_off = 0;
+                    frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+                  }
+                  frame_buffer[cnt].frame = &frames[cnt];
+                  frame_buffer[cnt].buffer = bhandle;
+                  frame_buffer[cnt].size = handle->size;
+                  frame_buffer[cnt].lockState = BUFFER_LOCKED;
+                  active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+
+                  ALOGE("Registering buffer %d with fd :%d with kernel",cnt,handle->fd);
+                  register_buf(mBufferSize,
+                             mBufferSize, CbCrOffset, 0,
+                             handle->fd,
+                             0,
+                             (uint8_t *)frames[cnt].buffer/*(uint8_t *)mThumbnailMapped*/,
+                             MSM_PMEM_PREVIEW,
+                             active,true,all_chnls);
+                  ALOGE("Came back from register call to kernel");
+                } else
+                    ALOGE("%s: setPreviewWindow: Could not get buffer handle", __FUNCTION__);
+            } else {
+                ALOGE("%s: lockBuffer failed for preview buffer. Error = %d",
+                         __FUNCTION__, retVal);
+                return retVal;
+            }
+        }
+
+
+ // Dequeue Thumbnail/Postview  Buffers here , Consider ZSL/Multishot cases
+        for (cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+
+            retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+                                     &mThumbnailBuffer[cnt], &(stride));
+            private_handle_t* handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+            ALOGE(" : dequeing thumbnail buffer fd %d", handle->fd);
+            if(retVal != NO_ERROR) {
+                ALOGE("%s: dequeueBuffer failed for postview buffer. Error = %d ",
+                                                            __FUNCTION__, retVal);
+            return retVal;
+            }
+        }
+
+        // Cancel minUndequeuedBufs.
+        for (cnt = kPreviewBufferCount; cnt < mTotalPreviewBufferCount; cnt++) {
+            if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)(*(frame_buffer[cnt].buffer)))) {
+                ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                return -EINVAL;
+            }
+            frame_buffer[cnt].lockState = BUFFER_UNLOCKED;
+            status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                frame_buffer[cnt].buffer);
+            ALOGE(" Cancelling preview buffers %d ",frame_buffer[cnt].frame->fd);
+        }
+    } else {
+        ALOGE("%s: Could not get Buffer from Surface", __FUNCTION__);
+        return UNKNOWN_ERROR;
+    }
+    mPreviewBusyQueue.init();
+    LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+    for(int i=ACTIVE_PREVIEW_BUFFERS ;i < kPreviewBufferCount; i++)
+        LINK_camframe_add_frame(CAM_PREVIEW_FRAME,&frames[i]);
+
+    mBuffersInitialized = true;
+
+    //Starting preview now as the preview buffers are allocated
+ //   if(!mPreviewInitialized && !mCameraRunning) {   // TODO just for testing
+        ALOGE("setPreviewWindow: Starting preview after buffer allocation");
+        startPreviewInternal();
+ //   }
+    ALOGI(" %s : X ",__FUNCTION__);
+    return NO_ERROR;
+}
+void QualcommCameraHardware::release()
+{
+    ALOGI("release E");
+    Mutex::Autolock l(&mLock);
+#if 0
+    {
+        Mutex::Autolock checkLock(&singleton_lock);
+        if(singleton_releasing){
+            ALOGE("ERROR: multiple release!");
+            return;
+        }
+    }
+#endif
+    ALOGI("release: mCameraRunning = %d", mCameraRunning);
+    if (mCameraRunning) {
+        if(mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+            mRecordFrameLock.lock();
+            mReleasedRecordingFrame = true;
+            mRecordWait.signal();
+            mRecordFrameLock.unlock();
+        }
+        stopPreviewInternal();
+        ALOGI("release: stopPreviewInternal done.");
+    }
+    LINK_jpeg_encoder_join();
+    mm_camera_ops_type_t current_ops_type = (mSnapshotFormat
+            == PICTURE_FORMAT_JPEG) ? CAMERA_OPS_CAPTURE_AND_ENCODE
+            : CAMERA_OPS_RAW_CAPTURE;
+    mCamOps.mm_camera_deinit(current_ops_type, NULL, NULL);
+
+    //Signal the snapshot thread
+    mJpegThreadWaitLock.lock();
+    mJpegThreadRunning = false;
+    mJpegThreadWait.signal();
+    mJpegThreadWaitLock.unlock();
+
+    // Wait for snapshot thread to complete before clearing the
+    // resources.
+    mSnapshotThreadWaitLock.lock();
+    while (mSnapshotThreadRunning) {
+        ALOGV("release: waiting for old snapshot thread to complete.");
+        mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+        ALOGV("release: old snapshot thread completed.");
+    }
+    mSnapshotThreadWaitLock.unlock();
+
+    {
+        Mutex::Autolock l (&mRawPictureHeapLock);
+        deinitRaw();
+    }
+
+    deinitRawSnapshot();
+    ALOGI("release: clearing resources done.");
+    if(mCurrentTarget == TARGET_MSM8660) {
+       ALOGV("release : Clearing the mThumbnailHeap and mDisplayHeap");
+       mLastPreviewFrameHeap.clear();
+       mLastPreviewFrameHeap = NULL;
+       mThumbnailHeap.clear();
+       mThumbnailHeap = NULL;
+       mPostviewHeap.clear();
+       mPostviewHeap = NULL;
+       mDisplayHeap.clear();
+       mDisplayHeap = NULL;
+    }
+    LINK_mm_camera_deinit();
+    if(fb_fd >= 0) {
+        close(fb_fd);
+        fb_fd = -1;
+    }
+    singleton_lock.lock();
+    singleton_releasing = true;
+    singleton_releasing_start_time = systemTime();
+    singleton_lock.unlock();
+
+    ALOGI("release X: mCameraRunning = %d, mFrameThreadRunning = %d", mCameraRunning, mFrameThreadRunning);
+    ALOGI("mVideoThreadRunning = %d, mSnapshotThreadRunning = %d, mJpegThreadRunning = %d", mVideoThreadRunning, mSnapshotThreadRunning, mJpegThreadRunning);
+    ALOGI("camframe_timeout_flag = %d, mAutoFocusThreadRunning = %d", camframe_timeout_flag, mAutoFocusThreadRunning);
+    mFrameThreadWaitLock.lock();
+    while (mFrameThreadRunning) {
+        ALOGV("release: waiting for old frame thread to complete.");
+        mFrameThreadWait.wait(mFrameThreadWaitLock);
+        ALOGV("release: old frame thread completed.");
+    }
+    mFrameThreadWaitLock.unlock();
+
+}
+
+QualcommCameraHardware::~QualcommCameraHardware()
+{
+    ALOGI("~QualcommCameraHardware E");
+
+    //singleton_lock.lock();
+    if( mCurrentTarget == TARGET_MSM7630 || mCurrentTarget == TARGET_QSD8250 || mCurrentTarget == TARGET_MSM8660 ) {
+        delete [] recordframes;
+        recordframes = NULL;
+        delete [] record_buffers_tracking_flag;
+    }
+    mMMCameraDLRef.clear();
+    //singleton.clear();
+    //singleton_releasing = false;
+    //singleton_releasing_start_time = 0;
+    //singleton_wait.signal();
+    //singleton_lock.unlock();
+    ALOGI("~QualcommCameraHardware X");
+}
+#if 0
+IMemoryHeap* QualcommCameraHardware::getRawHeap() const
+{
+#if 0
+    ALOGV("getRawHeap");
+    return mDisplayHeap != NULL ? mDisplayHeap->mHeap : NULL;
+#endif
+}
+
+IMemoryHeap* QualcommCameraHardware::getPreviewHeap() const
+{
+#if 0
+    ALOGV("getPreviewHeap");
+    return mPreviewHeap[0] != NULL ? mPreviewHeap[0]->mHeap : NULL;
+    if(mIs3DModeOn != true) {
+        if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+            ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 ) &&
+            previewWidth%32 != 0 )
+            return mYV12Heap->mHeap;
+
+        return mPreviewHeap != NULL ? mPreviewHeap->mHeap : NULL;
+    } else
+        return mRecordHeap != NULL ? mRecordHeap->mHeap : NULL;
+
+#endif
+}
+#endif
+#if 0
+status_t QualcommCameraHardware::startInitialPreview() {
+   ALOGV(" %s : E", __FUNCTION__);
+   const char * pmem_region = "/dev/pmem_smipool";
+   int initialPreviewWidth = INITIAL_PREVIEW_WIDTH;
+   int initialPreviewHeight = INITIAL_PREVIEW_HEIGHT;
+   int previewFrameSize = initialPreviewWidth * initialPreviewHeight * 3/2;
+   int CbCrOffset = PAD_TO_WORD(initialPreviewWidth * initialPreviewHeight);
+   mFrameThreadWaitLock.lock();
+    while (mFrameThreadRunning) {
+        ALOGV("%s: waiting for old frame thread to complete.", __FUNCTION__);
+        mFrameThreadWait.wait(mFrameThreadWaitLock);
+        ALOGV("%s: old frame thread completed.",__FUNCTION__);
+    }
+    mFrameThreadWaitLock.unlock();
+
+    mInitialPreviewHeap = new PmemPool(pmem_region,
+                               MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_PREVIEW,
+                                previewFrameSize,
+                                kPreviewBufferCount,
+                                previewFrameSize,
+                                CbCrOffset,
+                                0,
+                                "initial preview");
+
+    mDimension.display_width  = initialPreviewWidth;
+    mDimension.display_height = initialPreviewHeight;
+    mDimension.video_width  = initialPreviewWidth;
+    mDimension.video_height = initialPreviewHeight;
+    mDimension.display_luma_width = initialPreviewWidth;
+    mDimension.display_luma_height = initialPreviewHeight;
+    mDimension.display_chroma_width = initialPreviewWidth;
+    mDimension.display_chroma_height = initialPreviewHeight;
+    mDimension.orig_video_width = initialPreviewWidth;
+    mDimension.orig_video_height = initialPreviewHeight;
+    ALOGV("mDimension.prev_format = %d", mDimension.prev_format);
+    ALOGV("mDimension.display_luma_width = %d", mDimension.display_luma_width);
+    ALOGV("mDimension.display_luma_height = %d", mDimension.display_luma_height);
+    ALOGV("mDimension.display_chroma_width = %d", mDimension.display_chroma_width);
+    ALOGV("mDimension.display_chroma_height = %d", mDimension.display_chroma_height);
+
+    native_set_parms(CAMERA_PARM_DIMENSION,
+              sizeof(cam_ctrl_dimension_t), &mDimension);
+    ALOGV(" %s : mDimension.video_width = %d mDimension.video_height = %d", __FUNCTION__,
+             mDimension.video_width, mDimension.video_height);
+    mRecordFrameSize = previewFrameSize;
+    ALOGV("mRecordFrameSize = %d", mRecordFrameSize);
+
+    mRecordHeap = new PmemPool(pmem_region,
+                               MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                               MSM_PMEM_VIDEO,
+                               previewFrameSize,
+                               kRecordBufferCount,
+                               previewFrameSize,
+                               CbCrOffset,
+                               0,
+                               "initial record");
+
+    if (!mRecordHeap->initialized()) {
+        mRecordHeap.clear();
+        ALOGE("%s X: could not initialize record heap.", __FUNCTION__);
+        return false;
+    }
+    {
+        Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+        mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+    }
+
+    ALOGV(" %s : X", __FUNCTION__);
+    return NO_ERROR;
+}
+#endif
+status_t QualcommCameraHardware::startPreviewInternal()
+{
+   ALOGV("in startPreviewInternal : E");
+   if (!mBuffersInitialized) {
+     ALOGE("startPreviewInternal: Buffers not allocated. Cannot start preview");
+     return NO_ERROR;
+   }
+   mPreviewStopping = false;
+#if 0
+   if(mZslEnable && !mZslPanorama){
+       ALOGE("start zsl Preview called");
+       mCamOps.mm_camera_start(CAMERA_OPS_ZSL_STREAMING_CB,NULL, NULL);
+       if (mCurrentTarget == TARGET_MSM8660) {
+           if(mLastPreviewFrameHeap != NULL)
+           mLastPreviewFrameHeap.clear();
+    }
+    }
+#endif
+    if(mCameraRunning) {
+        ALOGV("startPreview X: preview already running.");
+        return NO_ERROR;
+    }
+    if(mZslEnable){
+         //call init
+         ALOGI("ZSL Enable called");
+         uint8_t is_zsl = 1;
+          mm_camera_status_t status;
+          if(MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_ZSL_ENABLE,
+                     (void *)&is_zsl)){
+              ALOGE("ZSL Enable failed");
+          return UNKNOWN_ERROR;
+          }
+    }
+
+    if (!mPreviewInitialized) {
+        mLastQueuedFrame = NULL;
+        mPreviewInitialized = initPreview();
+        if (!mPreviewInitialized) {
+            ALOGE("startPreview X initPreview failed.  Not starting preview.");
+            mPreviewBusyQueue.deinit();
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    /* For 3D mode, start the video output, as this need to be
+     * used for display also.
+     */
+    if(mIs3DModeOn) {
+        startRecordingInternal();
+        if(!mVideoThreadRunning) {
+            ALOGE("startPreview X startRecording failed.  Not starting preview.");
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    {
+        Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+        if(( mCurrentTarget != TARGET_MSM7630 ) &&
+                (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660))
+            mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_PREVIEW, NULL);
+        else {
+            if(!mZslEnable){
+                ALOGE("Calling CAMERA_OPS_STREAMING_VIDEO");
+                mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+                ALOGE(": Calling CAMERA_OPS_STREAMING_VIDEO %d", mCameraRunning);
+        }else {
+                initZslParameter();
+                 mCameraRunning = false;
+                 if (MM_CAMERA_SUCCESS == mCamOps.mm_camera_init(CAMERA_OPS_STREAMING_ZSL,
+                        (void *)&mZslParms, NULL)) {
+                        //register buffers for ZSL
+                        bool status = initZslBuffers(true);
+                        if(status != true) {
+                             ALOGE("Failed to allocate ZSL buffers");
+                             return false;
+                        }
+                        if(MM_CAMERA_SUCCESS == mCamOps.mm_camera_start(CAMERA_OPS_STREAMING_ZSL,NULL, NULL)){
+                            mCameraRunning = true;
+                        }
+                }
+                if(mCameraRunning == false)
+                    ALOGE("Starting  ZSL CAMERA_OPS_STREAMING_ZSL failed!!!");
+            }
+        }
+    }
+
+    if(!mCameraRunning) {
+        deinitPreview();
+        if(mZslEnable){
+            //deinit
+            ALOGI("ZSL DISABLE called");
+           uint8_t is_zsl = 0;
+            mm_camera_status_t status;
+            if( MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_ZSL_ENABLE,
+                     (void *)&is_zsl)){
+                ALOGE("ZSL_Disable failed!!");
+                return UNKNOWN_ERROR;
+            }
+        }
+        /* Flush the Busy Q */
+        cam_frame_flush_video();
+        /* Need to flush the free Qs as these are initalized in initPreview.*/
+        LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+        LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+        mPreviewInitialized = false;
+        mOverlayLock.lock();
+        //mOverlay = NULL;
+        mOverlayLock.unlock();
+        ALOGE("startPreview X: native_start_ops: CAMERA_OPS_STREAMING_PREVIEW ioctl failed!");
+        return UNKNOWN_ERROR;
+    }
+
+    //Reset the Gps Information
+    exif_table_numEntries = 0;
+    previewWidthToNativeZoom = previewWidth;
+    previewHeightToNativeZoom = previewHeight;
+
+    ALOGV("startPreviewInternal X");
+    return NO_ERROR;
+}
+status_t QualcommCameraHardware::startInitialPreview() {
+   mCameraRunning = DUMMY_CAMERA_STARTED;
+   return NO_ERROR;
+}
+status_t QualcommCameraHardware::startPreview()
+{
+  status_t result;
+  ALOGV("startPreview E");
+  Mutex::Autolock l(&mLock);
+  if( mPreviewWindow == NULL) {
+    /* startPreview has been called before setting the preview
+     * window. Start the camera with initial buffers because the
+     * CameraService expects the preview to be enabled while
+     * setting a valid preview window */
+    ALOGV(" %s : Starting preview with initial buffers ", __FUNCTION__);
+    result = startInitialPreview();
+  } else {
+      /* startPreview has been issued after a valid preview window
+       * is set. Get the preview buffers from gralloc and start
+       * preview normally */
+    ALOGV(" %s : Starting normal preview ", __FUNCTION__);
+    result = getBuffersAndStartPreview();
+  }
+  ALOGV("startPreview X");
+  return result;
+}
+
+void QualcommCameraHardware::stopInitialPreview() {
+   mCameraRunning = 0;//!native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+#if 0
+    ALOGV(" %s : E ", __FUNCTION__);
+    if (mCameraRunning) {
+        ALOGV(" %s : Camera was running. Stopping ", __FUNCTION__);
+        {
+            Mutex::Autolock l(&mCamframeTimeoutLock);
+           {
+      Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+      if(!camframe_timeout_flag) {
+                    mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+                }
+       }
+    }
+    mInitialPreviewHeap.clear();
+    mRecordHeap.clear();
+  }
+  ALOGV(" %s : X ", __FUNCTION__);
+#endif
+}
+
+void QualcommCameraHardware::stopPreviewInternal()
+{
+    ALOGI("stopPreviewInternal E: %d", mCameraRunning);
+    mPreviewStopping = true;
+    if (mCameraRunning && mPreviewWindow!=NULL) {
+        /* For 3D mode, we need to exit the video thread.*/
+        if(mIs3DModeOn) {
+            mRecordingState = 0;
+            mVideoThreadWaitLock.lock();
+            ALOGI("%s: 3D mode, exit video thread", __FUNCTION__);
+            mVideoThreadExit = 1;
+            mVideoThreadWaitLock.unlock();
+
+            pthread_mutex_lock(&(g_busy_frame_queue.mut));
+            pthread_cond_signal(&(g_busy_frame_queue.wait));
+            pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+        }
+
+        // Cancel auto focus.
+        {
+            if (mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS)) {
+                cancelAutoFocusInternal();
+            }
+        }
+
+        // make mSmoothzoomThreadExit true
+        mSmoothzoomThreadLock.lock();
+        mSmoothzoomThreadExit = true;
+        mSmoothzoomThreadLock.unlock();
+        // singal smooth zoom thread , so that it can exit gracefully
+        mSmoothzoomThreadWaitLock.lock();
+        if(mSmoothzoomThreadRunning)
+            mSmoothzoomThreadWait.signal();
+
+        mSmoothzoomThreadWaitLock.unlock();
+
+        Mutex::Autolock l(&mCamframeTimeoutLock);
+        {
+            Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+            if(!camframe_timeout_flag) {
+                if (( mCurrentTarget != TARGET_MSM7630 ) &&
+                        (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660))
+                         mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_PREVIEW, NULL);
+                else{
+                    if(!mZslEnable){
+                        ALOGE("%s ops_streaming mCameraRunning b= %d",__FUNCTION__, mCameraRunning);
+                        mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+                        ALOGE("%s ops_streaming mCameraRunning = %d",__FUNCTION__, mCameraRunning);
+                    }else {
+                        mCameraRunning = true;
+                        if(MM_CAMERA_SUCCESS == mCamOps.mm_camera_stop(CAMERA_OPS_STREAMING_ZSL,NULL, NULL)){
+                            deinitZslBuffers();
+                            if (MM_CAMERA_SUCCESS == mCamOps.mm_camera_deinit(CAMERA_OPS_STREAMING_ZSL,
+                                    (void *)&mZslParms, NULL)) {
+                                mCameraRunning = false;
+                            }
+                        }
+                        if(mCameraRunning ==true)
+                            ALOGE("Starting  ZSL CAMERA_OPS_STREAMING_ZSL failed!!!");
+                    }
+                }
+            } else {
+                /* This means that the camframetimeout was issued.
+                 * But we did not issue native_stop_preview(), so we
+                 * need to update mCameraRunning to indicate that
+                 * Camera is no longer running. */
+                ALOGE("%s, : MAKE MCAMER_RUNNING FALSE!!!",__FUNCTION__);
+                mCameraRunning = 0;
+            }
+        }
+    }
+    /* in 3D mode, wait for the video thread before clearing resources.*/
+    if(mIs3DModeOn) {
+        mVideoThreadWaitLock.lock();
+        while (mVideoThreadRunning) {
+            ALOGI("%s: waiting for video thread to complete.", __FUNCTION__);
+            mVideoThreadWait.wait(mVideoThreadWaitLock);
+            ALOGI("%s : video thread completed.", __FUNCTION__);
+        }
+        mVideoThreadWaitLock.unlock();
+    }
+    ALOGE("%s, J_mCameraRunning = %d", __FUNCTION__, mCameraRunning);
+    if (!mCameraRunning) {
+        ALOGE("%s, before calling deinitpre mPreviewInitialized = %d", __FUNCTION__, mPreviewInitialized);
+        if(mPreviewInitialized) {
+            ALOGE("before calling deinitpreview");
+            deinitPreview();
+            if( ( mCurrentTarget == TARGET_MSM7630 ) ||
+                (mCurrentTarget == TARGET_QSD8250) ||
+                (mCurrentTarget == TARGET_MSM8660)) {
+                mVideoThreadWaitLock.lock();
+                ALOGV("in stopPreviewInternal: making mVideoThreadExit 1");
+                mVideoThreadExit = 1;
+                mVideoThreadWaitLock.unlock();
+                //720p : signal the video thread , and check in video thread
+                //if stop is called, if so exit video thread.
+                pthread_mutex_lock(&(g_busy_frame_queue.mut));
+                pthread_cond_signal(&(g_busy_frame_queue.wait));
+                pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+
+                ALOGE(" flush video and release all frames");
+                /* Flush the Busy Q */
+                cam_frame_flush_video();
+                /* Flush the Free Q */
+                LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+            }
+            mPreviewInitialized = false;
+        }
+    }
+    else ALOGI("stopPreviewInternal: Preview is stopped already");
+
+    ALOGI("stopPreviewInternal X: %d", mCameraRunning);
+}
+
+void QualcommCameraHardware::stopPreview()
+{
+    ALOGV("stopPreview: E");
+    Mutex::Autolock l(&mLock);
+    {
+        if (mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+            return;
+    }
+    if( mSnapshotThreadRunning ) {
+        ALOGV("In stopPreview during snapshot");
+        return;
+    }
+    if( mPreviewWindow != NULL ) {
+        private_handle_t *handle;
+        for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+            if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+                handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+                ALOGE("%s:  Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+                ALOGE("stoppreview : display lock");
+                mDisplayLock.lock();
+                if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+                    if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+                       ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                       mDisplayLock.unlock();
+                       continue;
+                    } else {
+                       mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+                    }
+                }
+                status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+                                                              mThumbnailBuffer[cnt]);
+                ALOGE("stopPreview : after cancelling thumbnail buffer");
+                if(retVal != NO_ERROR)
+                    ALOGE("%s: cancelBuffer failed for postview buffer %d",
+                                                     __FUNCTION__, handle->fd);
+                // unregister , unmap and release as well
+                int mBufferSize = previewWidth * previewHeight * 3/2;
+                int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+                if(mThumbnailMapped[cnt]  && (mSnapshotFormat == PICTURE_FORMAT_JPEG)
+                          || mZslEnable) {
+                    ALOGE("%s:  Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+                    register_buf(mBufferSize,
+                        mBufferSize, mCbCrOffset, 0,
+                        handle->fd,
+                        0,
+                        (uint8_t *)mThumbnailMapped[cnt],
+                        MSM_PMEM_THUMBNAIL,
+                        false, false);
+                    if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+                      ALOGE("StopPreview : Error un-mmapping the thumbnail buffer %d", index);
+                    }
+                    mThumbnailMapped[cnt] = NULL;
+                 }
+                mThumbnailBuffer[cnt] = NULL;
+                ALOGE("stoppreview : display unlock");
+                mDisplayLock.unlock();
+          }
+       }
+    }
+    stopPreviewInternal();
+    ALOGV("stopPreview: X");
+}
+
+void QualcommCameraHardware::runAutoFocus()
+{
+    bool status = true;
+    void *libhandle = NULL;
+    isp3a_af_mode_t afMode = AF_MODE_AUTO;
+
+    mAutoFocusThreadLock.lock();
+    // Skip autofocus if focus mode is infinity.
+
+    const char * focusMode = mParameters.get(QCameraParameters::KEY_FOCUS_MODE);
+    if ((mParameters.get(QCameraParameters::KEY_FOCUS_MODE) == 0)
+           || (strcmp(focusMode, QCameraParameters::FOCUS_MODE_INFINITY) == 0)
+           || (strcmp(focusMode, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0)) {
+        goto done;
+    }
+
+    if(!libmmcamera){
+        ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+        mAutoFocusThreadRunning = false;
+        mAutoFocusThreadLock.unlock();
+        return;
+    }
+
+    afMode = (isp3a_af_mode_t)attr_lookup(focus_modes,
+                                sizeof(focus_modes) / sizeof(str_map),
+                                mParameters.get(QCameraParameters::KEY_FOCUS_MODE));
+
+    /* This will block until either AF completes or is cancelled. */
+    ALOGV("af start (mode %d)", afMode);
+    status_t err;
+    err = mAfLock.tryLock();
+    if(err == NO_ERROR) {
+        {
+            Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+            if(mCameraRunning){
+                ALOGV("Start AF");
+                status =  native_start_ops(CAMERA_OPS_FOCUS ,(void *)&afMode);
+            }else{
+                ALOGV("As Camera preview is not running, AF not issued");
+                status = false;
+            }
+        }
+        mAfLock.unlock();
+    }
+    else{
+        //AF Cancel would have acquired the lock,
+        //so, no need to perform any AF
+        ALOGV("As Cancel auto focus is in progress, auto focus request "
+                "is ignored");
+        status = FALSE;
+    }
+    {
+        Mutex::Autolock pl(&mParametersLock);
+        if(mHasAutoFocusSupport && (updateFocusDistances(focusMode) != NO_ERROR)) {
+            ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, focusMode);
+        }
+    }
+
+    ALOGV("af done: %d", (int)status);
+
+done:
+    mAutoFocusThreadRunning = false;
+    mAutoFocusThreadLock.unlock();
+
+    mCallbackLock.lock();
+    bool autoFocusEnabled = mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS);
+    camera_notify_callback cb = mNotifyCallback;
+    void *data = mCallbackCookie;
+    mCallbackLock.unlock();
+    if (autoFocusEnabled)
+        cb(CAMERA_MSG_FOCUS, status, 0, data);
+
+}
+
+status_t QualcommCameraHardware::cancelAutoFocusInternal()
+{
+    ALOGV("cancelAutoFocusInternal E");
+    bool afRunning = true;
+
+    if(!mHasAutoFocusSupport){
+        ALOGV("cancelAutoFocusInternal X");
+        return NO_ERROR;
+    }
+
+    status_t rc = NO_ERROR;
+    status_t err;
+
+    do {
+      err = mAfLock.tryLock();
+      if(err == NO_ERROR) {
+          //Got Lock, means either AF hasn't started or
+          // AF is done. So no need to cancel it, just change the state
+          ALOGV("Auto Focus is not in progress, Cancel Auto Focus is ignored");
+          mAfLock.unlock();
+
+          mAutoFocusThreadLock.lock();
+          afRunning = mAutoFocusThreadRunning;
+          mAutoFocusThreadLock.unlock();
+          if(afRunning) {
+            usleep( 5000 );
+          }
+      }
+    } while ( err == NO_ERROR && afRunning );
+    if(afRunning) {
+        //AF is in Progess, So cancel it
+        ALOGV("Lock busy...cancel AF");
+        rc = native_stop_ops(CAMERA_OPS_FOCUS, NULL) ?
+          NO_ERROR : UNKNOWN_ERROR;
+
+        /*now just wait for auto focus thread to be finished*/
+        mAutoFocusThreadLock.lock();
+        mAutoFocusThreadLock.unlock();
+    }
+    ALOGV("cancelAutoFocusInternal X: %d", rc);
+    return rc;
+}
+
+void *auto_focus_thread(void *user)
+{
+    ALOGV("auto_focus_thread E");
+    CAMERA_HAL_UNUSED(user);
+    QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runAutoFocus();
+    }
+    else ALOGW("not starting autofocus: the object went away!");
+    ALOGV("auto_focus_thread X");
+    return NULL;
+}
+
+status_t QualcommCameraHardware::autoFocus()
+{
+    ALOGV("autoFocus E");
+    Mutex::Autolock l(&mLock);
+
+    if(!mHasAutoFocusSupport){
+       /*
+        * If autofocus is not supported HAL defaults
+        * focus mode to infinity and supported mode to
+        * infinity also. In this mode and fixed mode app
+        * should not call auto focus.
+        */
+        ALOGE("Auto Focus not supported");
+        ALOGV("autoFocus X");
+        return INVALID_OPERATION;
+    }
+    {
+        mAutoFocusThreadLock.lock();
+        if (!mAutoFocusThreadRunning) {
+
+            // Create a detached thread here so that we don't have to wait
+            // for it when we cancel AF.
+            pthread_t thr;
+            pthread_attr_t attr;
+            pthread_attr_init(&attr);
+            pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+            mAutoFocusThreadRunning =
+                !pthread_create(&thr, &attr,
+                                auto_focus_thread, NULL);
+            if (!mAutoFocusThreadRunning) {
+                ALOGE("failed to start autofocus thread");
+                mAutoFocusThreadLock.unlock();
+                return UNKNOWN_ERROR;
+            }
+        }
+        mAutoFocusThreadLock.unlock();
+    }
+
+    ALOGV("autoFocus X");
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::cancelAutoFocus()
+{
+    ALOGV("cancelAutoFocus E");
+    Mutex::Autolock l(&mLock);
+
+    int rc = NO_ERROR;
+    if (mCameraRunning && mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS)) {
+        rc = cancelAutoFocusInternal();
+    }
+
+    ALOGV("cancelAutoFocus X");
+    return rc;
+}
+
+void QualcommCameraHardware::runSnapshotThread(void *data)
+{
+    bool ret = true;
+    CAMERA_HAL_UNUSED(data);
+    ALOGI("runSnapshotThread E");
+
+    if(!libmmcamera){
+        ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+    }
+    mSnapshotCancelLock.lock();
+    if(mSnapshotCancel == true) {
+        mSnapshotCancel = false;
+        mSnapshotCancelLock.unlock();
+        ALOGI("%s: cancelpicture has been called..so abort taking snapshot", __FUNCTION__);
+        deinitRaw();
+        mInSnapshotModeWaitLock.lock();
+        mInSnapshotMode = false;
+        mInSnapshotModeWait.signal();
+        mInSnapshotModeWaitLock.unlock();
+        mSnapshotThreadWaitLock.lock();
+        mSnapshotThreadRunning = false;
+        mSnapshotThreadWait.signal();
+        mSnapshotThreadWaitLock.unlock();
+        return;
+    }
+    mSnapshotCancelLock.unlock();
+
+    mJpegThreadWaitLock.lock();
+    mJpegThreadRunning = true;
+    mJpegThreadWait.signal();
+    mJpegThreadWaitLock.unlock();
+    mm_camera_ops_type_t current_ops_type = (mSnapshotFormat == PICTURE_FORMAT_JPEG) ?
+                                             CAMERA_OPS_CAPTURE_AND_ENCODE :
+                                              CAMERA_OPS_RAW_CAPTURE;
+    if(strTexturesOn == true) {
+        current_ops_type = CAMERA_OPS_CAPTURE;
+        mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+                         NULL);
+    } else if(mSnapshotFormat == PICTURE_FORMAT_JPEG){
+        if(!mZslEnable || mZslFlashEnable){
+            mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+                 (void *)&mImageEncodeParms);
+            }else{
+                notifyShutter(TRUE);
+                initZslParameter();
+                ALOGE("snapshot mZslCapture.thumbnail %d %d %d",mZslCaptureParms.thumbnail_width,
+                                     mZslCaptureParms.thumbnail_height,mZslCaptureParms.num_captures);
+                mCamOps.mm_camera_start(current_ops_type,(void *)&mZslCaptureParms,
+                      (void *)&mImageEncodeParms);
+           }
+        mJpegThreadWaitLock.lock();
+        while (mJpegThreadRunning) {
+            ALOGV("%s: waiting for jpeg callback.", __FUNCTION__);
+            mJpegThreadWait.wait(mJpegThreadWaitLock);
+            ALOGV("%s: jpeg callback received.", __FUNCTION__);
+        }
+        mJpegThreadWaitLock.unlock();
+
+        //cleanup
+       if(!mZslEnable || mZslFlashEnable)
+            deinitRaw();
+    }else if(mSnapshotFormat == PICTURE_FORMAT_RAW){
+        notifyShutter(TRUE);
+        mCamOps.mm_camera_start(current_ops_type,(void *)&mRawCaptureParms,
+                                 NULL);
+        // Waiting for callback to come
+        ALOGV("runSnapshotThread : waiting for callback to come");
+        mJpegThreadWaitLock.lock();
+        while (mJpegThreadRunning) {
+            ALOGV("%s: waiting for jpeg callback.", __FUNCTION__);
+            mJpegThreadWait.wait(mJpegThreadWaitLock);
+            ALOGV("%s: jpeg callback received.", __FUNCTION__);
+        }
+        mJpegThreadWaitLock.unlock();
+        ALOGV("runSnapshotThread : calling deinitRawSnapshot");
+        deinitRawSnapshot();
+
+    }
+
+    if(!mZslEnable || mZslFlashEnable)
+        mCamOps.mm_camera_deinit(current_ops_type, NULL, NULL);
+    mZslFlashEnable  = false;
+    mSnapshotThreadWaitLock.lock();
+    mSnapshotThreadRunning = false;
+    mSnapshotThreadWait.signal();
+    mSnapshotThreadWaitLock.unlock();
+    ALOGI("runSnapshotThread X");
+}
+
+void *snapshot_thread(void *user)
+{
+    ALOGD("snapshot_thread E");
+    CAMERA_HAL_UNUSED(user);
+    QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runSnapshotThread(user);
+    }
+    else ALOGW("not starting snapshot thread: the object went away!");
+    ALOGD("snapshot_thread X");
+    return NULL;
+}
+
+status_t QualcommCameraHardware::takePicture()
+{
+    ALOGE("takePicture(%d)", mMsgEnabled);
+    Mutex::Autolock l(&mLock);
+    if(mRecordingState ) {
+      return takeLiveSnapshotInternal( );
+    }
+
+    if(strTexturesOn == true){
+        mEncodePendingWaitLock.lock();
+        while(mEncodePending) {
+            ALOGE("takePicture: Frame given to application, waiting for encode call");
+            mEncodePendingWait.wait(mEncodePendingWaitLock);
+            ALOGE("takePicture: Encode of the application data is done");
+        }
+        mEncodePendingWaitLock.unlock();
+    }
+
+    // Wait for old snapshot thread to complete.
+    mSnapshotThreadWaitLock.lock();
+    while (mSnapshotThreadRunning) {
+        ALOGV("takePicture: waiting for old snapshot thread to complete.");
+        mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+        ALOGV("takePicture: old snapshot thread completed.");
+    }
+    // if flash is enabled then run snapshot as normal mode and not zsl mode.
+    // App should expect only 1 callback as multi snapshot in normal mode is not supported
+    mZslFlashEnable = false;
+    if(mZslEnable){
+        int is_flash_needed = 0;
+        mm_camera_status_t status;
+        status = mCfgControl.mm_camera_get_parm(CAMERA_PARM_QUERY_FALSH4SNAP,
+                      (void *)&is_flash_needed);
+        if(is_flash_needed) {
+            mZslFlashEnable = true;
+        }
+    }
+    //Adding ExifTag for Flash
+    const char *flash_str = mParameters.get(QCameraParameters::KEY_FLASH_MODE);
+    if(flash_str){
+        int is_flash_fired = 0;
+        if(mCfgControl.mm_camera_get_parm(CAMERA_PARM_QUERY_FALSH4SNAP,
+                      (void *)&is_flash_fired) != MM_CAMERA_SUCCESS){
+            flashMode = FLASH_SNAP ; //for No Flash support,bit 5 will be 1
+        } else {
+            if(!strcmp(flash_str,"on"))
+                flashMode = 1;
+
+            if(!strcmp(flash_str,"off"))
+                flashMode = 0;
+
+            if(!strcmp(flash_str,"auto")){
+                //for AUTO bits 3 and 4 will be 1
+                //for flash fired bit 0 will be 1, else 0
+                flashMode  = FLASH_AUTO;
+                if(is_flash_fired)
+                   flashMode = (is_flash_fired>>1) | flashMode ;
+            }
+        }
+        addExifTag(EXIFTAGID_FLASH,EXIF_SHORT,1,1,(void *)&flashMode);
+    }
+
+    if(mParameters.getPictureFormat() != 0 &&
+            !strcmp(mParameters.getPictureFormat(),
+                    QCameraParameters::PIXEL_FORMAT_RAW)){
+        mSnapshotFormat = PICTURE_FORMAT_RAW;
+      {
+       // HACK: Raw ZSL capture is not supported yet
+        mZslFlashEnable = true;
+      }
+    }
+    else
+        mSnapshotFormat = PICTURE_FORMAT_JPEG;
+
+    if(!mZslEnable || mZslFlashEnable){
+        if((mSnapshotFormat == PICTURE_FORMAT_JPEG)){
+            if(!native_start_ops(CAMERA_OPS_PREPARE_SNAPSHOT, NULL)) {
+                mSnapshotThreadWaitLock.unlock();
+                ALOGE("PREPARE SNAPSHOT: CAMERA_OPS_PREPARE_SNAPSHOT ioctl Failed");
+                return UNKNOWN_ERROR;
+            }
+        }
+    }
+    else {
+        int rotation = mParameters.getInt("rotation");
+        native_set_parms(CAMERA_PARM_JPEG_ROTATION, sizeof(int), &rotation);
+    }
+#if 0    // TODO for ICS
+    if(mCurrentTarget == TARGET_MSM8660) {
+       /* Store the last frame queued for preview. This
+        * shall be used as postview */
+        if (!(storePreviewFrameForPostview()))
+        return UNKNOWN_ERROR;
+    }
+#endif
+    if(!mZslEnable || mZslFlashEnable)
+        stopPreviewInternal();
+#if 0
+    else if(mZslEnable && !mZslPanorama) {
+        /* Dont stop preview if ZSL Panorama is enabled for
+         * Continuous viewfinder support*/
+        ALOGE("Calling stop preview");
+        mCamOps.mm_camera_stop(CAMERA_OPS_ZSL_STREAMING_CB,NULL, NULL);
+    }
+#endif
+
+
+    mFrameThreadWaitLock.unlock();
+
+    mm_camera_ops_type_t current_ops_type = (mSnapshotFormat == PICTURE_FORMAT_JPEG) ?
+                                             CAMERA_OPS_CAPTURE_AND_ENCODE :
+                                              CAMERA_OPS_RAW_CAPTURE;
+    if(strTexturesOn == true)
+        current_ops_type = CAMERA_OPS_CAPTURE;
+
+    if( !mZslEnable || mZslFlashEnable)
+        mCamOps.mm_camera_init(current_ops_type, NULL, NULL);
+
+    if(mSnapshotFormat == PICTURE_FORMAT_JPEG){
+      if(!mZslEnable || mZslFlashEnable) {
+        if (!initRaw(mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE))) {
+          ALOGE("initRaw failed.  Not taking picture.");
+          mSnapshotThreadWaitLock.unlock();
+          return UNKNOWN_ERROR;
+        }
+      }
+    } else if(mSnapshotFormat == PICTURE_FORMAT_RAW ){
+        if(!initRawSnapshot()){
+            ALOGE("initRawSnapshot failed. Not taking picture.");
+            mSnapshotThreadWaitLock.unlock();
+            return UNKNOWN_ERROR;
+        }
+    }
+
+    mShutterLock.lock();
+    mShutterPending = true;
+    mShutterLock.unlock();
+
+    mSnapshotCancelLock.lock();
+    mSnapshotCancel = false;
+    mSnapshotCancelLock.unlock();
+
+    numJpegReceived = 0;
+    pthread_attr_t attr;
+    pthread_attr_init(&attr);
+    pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+    mSnapshotThreadRunning = !pthread_create(&mSnapshotThread,
+                                             &attr,
+                                             snapshot_thread,
+                                             NULL);
+    mSnapshotThreadWaitLock.unlock();
+
+    mInSnapshotModeWaitLock.lock();
+    mInSnapshotMode = true;
+    mInSnapshotModeWaitLock.unlock();
+
+    ALOGE("takePicture: X");
+    return mSnapshotThreadRunning ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+void QualcommCameraHardware::set_liveshot_exifinfo()
+{
+
+    setGpsParameters();
+    //set TimeStamp
+    const char *str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+    if(str != NULL) {
+        strncpy(dateTime, str, 19);
+        dateTime[19] = '\0';
+        addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+                   20, 1, (void *)dateTime);
+    }
+}
+
+
+status_t QualcommCameraHardware::takeLiveSnapshotInternal()
+{
+    ALOGV("takeLiveSnapshotInternal : E");
+    if(liveshot_state == LIVESHOT_IN_PROGRESS || !mRecordingState) {
+        return NO_ERROR;
+    }
+
+    if( (mCurrentTarget != TARGET_MSM7630) && (mCurrentTarget != TARGET_MSM8660) && (mCurrentTarget != TARGET_MSM7627A)) {
+        ALOGI("LiveSnapshot not supported on this target");
+        liveshot_state = LIVESHOT_STOPPED;
+        return NO_ERROR;
+    }
+
+    liveshot_state = LIVESHOT_IN_PROGRESS;
+
+    if (!initLiveSnapshot(videoWidth, videoHeight)) {
+        ALOGE("takeLiveSnapshot: Jpeg Heap Memory allocation failed.  Not taking Live Snapshot.");
+        liveshot_state = LIVESHOT_STOPPED;
+        return UNKNOWN_ERROR;
+    }
+    uint32_t maxjpegsize = videoWidth * videoHeight *1.5;
+    set_liveshot_exifinfo();
+    if(!LINK_set_liveshot_params(videoWidth, videoHeight,
+                                exif_data, exif_table_numEntries,
+      (uint8_t *)mJpegLiveSnapMapped->data, maxjpegsize)) {
+        ALOGE("Link_set_liveshot_params failed.");
+        if (NULL != mJpegLiveSnapMapped) {
+              ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+              mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+              mJpegLiveSnapMapped = NULL;
+        }
+        return NO_ERROR;
+    }
+      if((mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660)) {
+          if(!native_start_ops(CAMERA_OPS_LIVESHOT, NULL)) {
+            ALOGE("start_liveshot ioctl failed");
+            liveshot_state = LIVESHOT_STOPPED;
+            if (NULL != mJpegLiveSnapMapped) {
+              ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+              mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+              mJpegLiveSnapMapped = NULL;
+            }
+            return UNKNOWN_ERROR;
+          }
+      }
+
+    ALOGV("takeLiveSnapshotInternal: X");
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::takeLiveSnapshot()
+{
+  ALOGV("takeLiveSnapshot: E ");
+  Mutex::Autolock l(&mLock);
+  ALOGV("takeLiveSnapshot: X ");
+  return takeLiveSnapshotInternal( );
+}
+
+bool QualcommCameraHardware::initLiveSnapshot(int videowidth, int videoheight)
+{
+    ALOGV("initLiveSnapshot E");
+
+    if (NULL != mJpegLiveSnapMapped) {
+        ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+        mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+        mJpegLiveSnapMapped = NULL;
+    }
+
+    mJpegMaxSize = videowidth * videoheight * 1.5;
+    ALOGV("initLiveSnapshot: initializing mJpegHeap.");
+    mJpegLiveSnapMapped = mGetMemory(-1, mJpegMaxSize,1,mCallbackCookie);
+    if(mJpegLiveSnapMapped == NULL) {
+        ALOGE("Failed to get camera memory for mJpegLibeSnapMapped" );
+        return false;
+    }
+    ALOGV("initLiveSnapshot X");
+    return true;
+}
+
+
+status_t QualcommCameraHardware::cancelPicture()
+{
+    status_t rc;
+    ALOGI("cancelPicture: E");
+
+    mSnapshotCancelLock.lock();
+    ALOGI("%s: setting mSnapshotCancel to true", __FUNCTION__);
+    mSnapshotCancel = true;
+    mSnapshotCancelLock.unlock();
+
+    if (mCurrentTarget == TARGET_MSM7627 ||
+       (mCurrentTarget == TARGET_MSM7625A ||
+        mCurrentTarget == TARGET_MSM7627A)) {
+        mSnapshotDone = TRUE;
+        mSnapshotThreadWaitLock.lock();
+        while (mSnapshotThreadRunning) {
+            ALOGV("cancelPicture: waiting for snapshot thread to complete.");
+            mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+            ALOGV("cancelPicture: snapshot thread completed.");
+        }
+        mSnapshotThreadWaitLock.unlock();
+    }
+    rc = native_stop_ops(CAMERA_OPS_CAPTURE, NULL) ? NO_ERROR : UNKNOWN_ERROR;
+    mSnapshotDone = FALSE;
+    ALOGI("cancelPicture: X: %d", rc);
+    return rc;
+}
+
+status_t QualcommCameraHardware::setParameters(const QCameraParameters& params)
+{
+    ALOGV("setParameters: E params = %p", &params);
+
+    Mutex::Autolock l(&mLock);
+    Mutex::Autolock pl(&mParametersLock);
+    status_t rc, final_rc = NO_ERROR;
+    if (mSnapshotThreadRunning) {
+        if ((rc = setCameraMode(params)))  final_rc = rc;
+        if ((rc = setPreviewSize(params)))  final_rc = rc;
+        if ((rc = setRecordSize(params)))  final_rc = rc;
+        if ((rc = setPictureSize(params)))  final_rc = rc;
+        if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+        if ((rc = setJpegQuality(params)))  final_rc = rc;
+        return final_rc;
+    }
+    if ((rc = setCameraMode(params)))  final_rc = rc;
+    if ((rc = setPreviewSize(params)))  final_rc = rc;
+    if ((rc = setRecordSize(params)))  final_rc = rc;
+    if ((rc = setPictureSize(params)))  final_rc = rc;
+    if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+    if ((rc = setJpegQuality(params)))  final_rc = rc;
+	if ((rc = setPictureFormat(params))) final_rc = rc;
+	if ((rc = setRecordSize(params)))  final_rc = rc;
+	if ((rc = setPreviewFormat(params)))   final_rc = rc;
+    if ((rc = setEffect(params)))       final_rc = rc;
+    if ((rc = setGpsLocation(params)))  final_rc = rc;
+    if ((rc = setRotation(params)))     final_rc = rc;
+    if ((rc = setZoom(params)))         final_rc = rc;
+    if ((rc = setOrientation(params)))  final_rc = rc;
+    if ((rc = setLensshadeValue(params)))  final_rc = rc;
+    if ((rc = setMCEValue(params)))  final_rc = rc;
+    //if ((rc = setHDRImaging(params)))  final_rc = rc;
+    if ((rc = setExpBracketing(params)))  final_rc = rc;
+    if ((rc = setPictureFormat(params))) final_rc = rc;
+    if ((rc = setSharpness(params)))    final_rc = rc;
+    if ((rc = setSaturation(params)))   final_rc = rc;
+    if ((rc = setTouchAfAec(params)))   final_rc = rc;
+    if ((rc = setSceneMode(params)))    final_rc = rc;
+    if ((rc = setContrast(params)))     final_rc = rc;
+    if ((rc = setRecordSize(params)))  final_rc = rc;
+    if ((rc = setSceneDetect(params)))  final_rc = rc;
+    if ((rc = setStrTextures(params)))   final_rc = rc;
+    if ((rc = setPreviewFormat(params)))   final_rc = rc;
+    if ((rc = setSkinToneEnhancement(params)))   final_rc = rc;
+    if ((rc = setAntibanding(params)))  final_rc = rc;
+    if ((rc = setRedeyeReduction(params)))  final_rc = rc;
+    if ((rc = setDenoise(params)))  final_rc = rc;
+    if ((rc = setPreviewFpsRange(params)))  final_rc = rc;
+    if ((rc = setZslParam(params)))  final_rc = rc;
+    if ((rc = setSnapshotCount(params)))  final_rc = rc;
+    if((rc = setRecordingHint(params)))   final_rc = rc;
+    const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+    int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+    if((value != NOT_FOUND) && (value == CAMERA_BESTSHOT_OFF)) {
+        if ((rc = setPreviewFrameRate(params))) final_rc = rc;
+    //    if ((rc = setPreviewFrameRateMode(params))) final_rc = rc;
+        if ((rc = setAutoExposure(params))) final_rc = rc;
+        if ((rc = setExposureCompensation(params))) final_rc = rc;
+        if ((rc = setWhiteBalance(params))) final_rc = rc;
+        if ((rc = setFlash(params)))        final_rc = rc;
+        if ((rc = setFocusMode(params)))    final_rc = rc;
+        if ((rc = setBrightness(params)))   final_rc = rc;
+        if ((rc = setISOValue(params)))  final_rc = rc;
+        if ((rc = setFocusAreas(params)))  final_rc = rc;
+        if ((rc = setMeteringAreas(params)))  final_rc = rc;
+    }
+    //selectableZoneAF needs to be invoked after continuous AF
+    if ((rc = setSelectableZoneAf(params)))   final_rc = rc;
+    // setHighFrameRate needs to be done at end, as there can
+    // be a preview restart, and need to use the updated parameters
+    if ((rc = setHighFrameRate(params)))  final_rc = rc;
+
+    ALOGV("setParameters: X");
+    return final_rc;
+}
+
+QCameraParameters QualcommCameraHardware::getParameters() const
+{
+    ALOGV("getParameters: EX");
+    return mParameters;
+}
+status_t QualcommCameraHardware::setHistogramOn()
+{
+    ALOGV("setHistogramOn: EX");
+    mStatsWaitLock.lock();
+    mSendData = true;
+    if(mStatsOn == CAMERA_HISTOGRAM_ENABLE) {
+        mStatsWaitLock.unlock();
+        return NO_ERROR;
+     }
+#if 0
+    if (mStatHeap != NULL) {
+        ALOGV("setHistogram on: clearing old mStatHeap.");
+        mStatHeap.clear();
+    }
+#endif
+
+    mStatSize = sizeof(uint32_t)* HISTOGRAM_STATS_SIZE;
+    mCurrent = -1;
+    /*Currently the Ashmem is multiplying the buffer size with total number
+    of buffers and page aligning. This causes a crash in JNI as each buffer
+    individually expected to be page aligned  */
+    int page_size_minus_1 = getpagesize() - 1;
+    int32_t mAlignedStatSize = ((mStatSize + page_size_minus_1) & (~page_size_minus_1));
+#if 0
+    mStatHeap =
+            new AshmemPool(mAlignedStatSize,
+                           3,
+                           mStatSize,
+                           "stat");
+      if (!mStatHeap->initialized()) {
+          ALOGE("Stat Heap X failed ");
+          mStatHeap.clear();
+          ALOGE("setHistogramOn X: error initializing mStatHeap");
+          mStatsWaitLock.unlock();
+          return UNKNOWN_ERROR;
+      }
+#endif
+    for(int cnt = 0; cnt<3; cnt++) {
+            mStatsMapped[cnt]=mGetMemory(-1, mStatSize,1,mCallbackCookie);
+            if(mStatsMapped[cnt] == NULL) {
+                ALOGE("Failed to get camera memory for stats heap index: %d", cnt);
+                mStatsWaitLock.unlock();
+                return false;
+            }else{
+               ALOGV("Received following info for stats mapped data:%p,handle:%p, size:%d,release:%p",
+               mStatsMapped[cnt]->data ,mStatsMapped[cnt]->handle, mStatsMapped[cnt]->size, mStatsMapped[cnt]->release);
+            }
+    }
+    mStatsOn = CAMERA_HISTOGRAM_ENABLE;
+    mStatsWaitLock.unlock();
+    mCfgControl.mm_camera_set_parm(CAMERA_PARM_HISTOGRAM, &mStatsOn);
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setHistogramOff()
+{
+    ALOGV("setHistogramOff: EX");
+    mStatsWaitLock.lock();
+    if(mStatsOn == CAMERA_HISTOGRAM_DISABLE) {
+    mStatsWaitLock.unlock();
+        return NO_ERROR;
+     }
+    mStatsOn = CAMERA_HISTOGRAM_DISABLE;
+    mStatsWaitLock.unlock();
+
+    mCfgControl.mm_camera_set_parm(CAMERA_PARM_HISTOGRAM, &mStatsOn);
+
+    mStatsWaitLock.lock();
+//    mStatHeap.clear();
+    for(int i=0; i<3; i++){
+        if(mStatsMapped[i] != NULL){
+            mStatsMapped[i]->release(mStatsMapped[i]);
+            mStatsMapped[i] = NULL;
+        }
+    }
+
+    mStatsWaitLock.unlock();
+    return NO_ERROR;
+}
+
+
+status_t QualcommCameraHardware::runFaceDetection()
+{
+    bool ret = true;
+#if 0
+    const char *str = mParameters.get(QCameraParameters::KEY_FACE_DETECTION);
+    if (str != NULL) {
+        int value = attr_lookup(facedetection,
+                sizeof(facedetection) / sizeof(str_map), str);
+
+        mMetaDataWaitLock.lock();
+        if (value == true) {
+            if(mMetaDataHeap != NULL)
+                mMetaDataHeap.clear();
+
+            mMetaDataHeap =
+                new AshmemPool((sizeof(int)*(MAX_ROI*4+1)),
+                        1,
+                        (sizeof(int)*(MAX_ROI*4+1)),
+                        "metadata");
+            if (!mMetaDataHeap->initialized()) {
+                ALOGE("Meta Data Heap allocation failed ");
+                mMetaDataHeap.clear();
+                ALOGE("runFaceDetection X: error initializing mMetaDataHeap");
+                mMetaDataWaitLock.unlock();
+                return UNKNOWN_ERROR;
+            }
+            mSendMetaData = true;
+        } else {
+            if(mMetaDataHeap != NULL)
+                mMetaDataHeap.clear();
+        }
+        mMetaDataWaitLock.unlock();
+        ret = native_set_parms(CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ #endif
+	return BAD_VALUE;
+}
+
+void* smoothzoom_thread(void* user)
+{
+    // call runsmoothzoomthread
+    ALOGV("smoothzoom_thread E");
+    CAMERA_HAL_UNUSED(user);
+
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->runSmoothzoomThread(user);
+    }
+    else ALOGE("not starting smooth zoom thread: the object went away!");
+    ALOGV("Smoothzoom_thread X");
+    return NULL;
+}
+
+status_t QualcommCameraHardware::sendCommand(int32_t command, int32_t arg1,
+                                             int32_t arg2)
+{
+    ALOGV("sendCommand: EX");
+    CAMERA_HAL_UNUSED(arg1);
+    CAMERA_HAL_UNUSED(arg2);
+    Mutex::Autolock l(&mLock);
+
+    switch(command)  {
+      case CAMERA_CMD_HISTOGRAM_ON:
+                                   ALOGV("histogram set to on");
+                                   return setHistogramOn();
+      case CAMERA_CMD_HISTOGRAM_OFF:
+                                   ALOGV("histogram set to off");
+                                   return setHistogramOff();
+      case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+                                   mStatsWaitLock.lock();
+                                   if(mStatsOn == CAMERA_HISTOGRAM_ENABLE)
+                                       mSendData = true;
+                                   mStatsWaitLock.unlock();
+                                   return NO_ERROR;
+#if 0
+      case CAMERA_CMD_FACE_DETECTION_ON:
+                                   if(supportsFaceDetection() == false){
+                                        ALOGI("face detection support is not available");
+                                        return NO_ERROR;
+                                   }
+
+                                   setFaceDetection("on");
+                                   return runFaceDetection();
+      case CAMERA_CMD_FACE_DETECTION_OFF:
+                                   if(supportsFaceDetection() == false){
+                                        ALOGI("face detection support is not available");
+                                        return NO_ERROR;
+                                   }
+                                   setFaceDetection("off");
+                                   return runFaceDetection();
+      case CAMERA_CMD_SEND_META_DATA:
+                                   mMetaDataWaitLock.lock();
+                                   if(mFaceDetectOn == true) {
+                                       mSendMetaData = true;
+                                   }
+                                   mMetaDataWaitLock.unlock();
+                                   return NO_ERROR;
+      case CAMERA_CMD_START_SMOOTH_ZOOM :
+             ALOGV("HAL sendcmd start smooth zoom %d %d", arg1 , arg2);
+             mTargetSmoothZoom = arg1;
+             if(!mPreviewStopping) {
+                 // create smooth zoom thread
+                 mSmoothzoomThreadLock.lock();
+                 mSmoothzoomThreadExit = false;
+                 pthread_attr_t attr;
+                 pthread_attr_init(&attr);
+                 pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+                 pthread_create(&mSmoothzoomThread,
+                                    &attr,
+                                    smoothzoom_thread,
+                                    NULL);
+                 mSmoothzoomThreadLock.unlock();
+             } else
+                 ALOGV(" Not creating smooth zoom thread "
+                      " since preview is stopping ");
+             mTargetSmoothZoom = arg1;
+             return NO_ERROR;
+
+      case CAMERA_CMD_STOP_SMOOTH_ZOOM :
+             mSmoothzoomThreadLock.lock();
+             mSmoothzoomThreadExit = true;
+             mSmoothzoomThreadLock.unlock();
+             ALOGV("HAL sendcmd stop smooth zoom");
+             return NO_ERROR;
+#endif
+   }
+   return BAD_VALUE;
+}
+
+void QualcommCameraHardware::runSmoothzoomThread(void * data) {
+
+    ALOGV("runSmoothzoomThread: Current zoom %d - "
+          "Target %d", mParameters.getInt("zoom"), mTargetSmoothZoom);
+    int current_zoom = mParameters.getInt("zoom");
+    int step = (current_zoom > mTargetSmoothZoom)? -1: 1;
+
+    if(current_zoom == mTargetSmoothZoom) {
+        ALOGV("Smoothzoom target zoom value is same as "
+             "current zoom value, return...");
+        if(!mPreviewStopping)
+            mNotifyCallback(CAMERA_MSG_ZOOM,
+                current_zoom, 1, mCallbackCookie);
+        else
+            ALOGV("Not issuing callback since preview is stopping");
+        return;
+    }
+
+    QCameraParameters p = getParameters();
+
+    mSmoothzoomThreadWaitLock.lock();
+    mSmoothzoomThreadRunning = true;
+    mSmoothzoomThreadWaitLock.unlock();
+
+    int i = current_zoom;
+    while(1) {  // Thread loop
+        mSmoothzoomThreadLock.lock();
+        if(mSmoothzoomThreadExit) {
+            ALOGV("Exiting smoothzoom thread, as stop smoothzoom called");
+            mSmoothzoomThreadLock.unlock();
+            break;
+        }
+        mSmoothzoomThreadLock.unlock();
+
+        if((i < 0) || (i > mMaxZoom)) {
+            ALOGE(" ERROR : beyond supported zoom values, break..");
+            break;
+        }
+        // update zoom
+        p.set("zoom", i);
+        setZoom(p);
+        if(!mPreviewStopping) {
+            // give call back to zoom listener in app
+            mNotifyCallback(CAMERA_MSG_ZOOM, i, (mTargetSmoothZoom-i == 0)?1:0,
+                    mCallbackCookie);
+        } else {
+            ALOGV("Preview is stopping. Breaking out of smooth zoom loop");
+            break;
+        }
+        if(i == mTargetSmoothZoom)
+            break;
+
+        i+=step;
+
+        /* wait on singal, which will be signalled on
+         * receiving next preview frame */
+        mSmoothzoomThreadWaitLock.lock();
+        mSmoothzoomThreadWait.wait(mSmoothzoomThreadWaitLock);
+        mSmoothzoomThreadWaitLock.unlock();
+    } // while loop over, exiting thread
+
+    mSmoothzoomThreadWaitLock.lock();
+    mSmoothzoomThreadRunning = false;
+    mSmoothzoomThreadWaitLock.unlock();
+    ALOGV("Exiting Smooth Zoom Thread");
+}
+
+extern "C" QualcommCameraHardware* HAL_openCameraHardware(int cameraId)
+{
+    int i;
+    ALOGI("openCameraHardware: call createInstance");
+    for(i = 0; i < HAL_numOfCameras; i++) {
+        if(i == cameraId) {
+            ALOGI("openCameraHardware:Valid camera ID %d", cameraId);
+            parameter_string_initialized = false;
+            HAL_currentCameraId = cameraId;
+            /* The least significant two bits of mode parameter indicates the sensor mode
+               of 2D or 3D. The next two bits indicates the snapshot mode of
+               ZSL or NONZSL
+               */
+#if 0
+            int sensorModeMask = 0x03 & mode;
+            if(sensorModeMask & HAL_cameraInfo[i].modes_supported){
+                HAL_currentCameraMode = sensorModeMask;
+            }else{
+                ALOGE("openCameraHardware:Invalid camera mode (%d) requested", mode);
+                return NULL;
+            }
+#endif
+            HAL_currentCameraMode = CAMERA_MODE_2D;
+            HAL_currentSnapshotMode = CAMERA_SNAPSHOT_NONZSL;
+            //Remove values set by app other than  supported values
+            //mode = mode & HAL_cameraInfo[cameraId].modes_supported;
+            //if((mode & CAMERA_SNAPSHOT_ZSL) == CAMERA_SNAPSHOT_ZSL)
+              //  HAL_currentSnapshotMode = CAMERA_SNAPSHOT_ZSL;
+            ALOGI("%s: HAL_currentSnapshotMode = %d HAL_currentCameraMode = %d", __FUNCTION__, HAL_currentSnapshotMode,
+                 HAL_currentCameraMode);
+            return QualcommCameraHardware::createInstance();
+        }
+    }
+    ALOGE("openCameraHardware:Invalid camera ID %d", cameraId);
+    return NULL;
+}
+
+//wp<QualcommCameraHardware> QualcommCameraHardware::singleton;
+
+// If the hardware already exists, return a strong pointer to the current
+// object. If not, create a new hardware object, put it in the singleton,
+// and return it.
+QualcommCameraHardware* QualcommCameraHardware::createInstance()
+{
+    ALOGI("createInstance: E");
+#if 0
+    singleton_lock.lock();
+
+    // Wait until the previous release is done.
+    while (singleton_releasing) {
+        if((singleton_releasing_start_time != 0) &&
+                (systemTime() - singleton_releasing_start_time) > SINGLETON_RELEASING_WAIT_TIME){
+            ALOGV("in createinstance system time is %lld %lld %lld ",
+                    systemTime(), singleton_releasing_start_time, SINGLETON_RELEASING_WAIT_TIME);
+            singleton_lock.unlock();
+            ALOGE("Previous singleton is busy and time out exceeded. Returning null");
+            return NULL;
+        }
+        ALOGI("Wait for previous release.");
+        singleton_wait.waitRelative(singleton_lock, SINGLETON_RELEASING_RECHECK_TIMEOUT);
+        ALOGI("out of Wait for previous release.");
+    }
+
+    if (singleton != 0) {
+        sp<CameraHardwareInterface> hardware = singleton.promote();
+        if (hardware != 0) {
+            ALOGD("createInstance: X return existing hardware=%p", &(*hardware));
+            singleton_lock.unlock();
+            return hardware;
+        }
+    }
+#endif
+    {
+        struct stat st;
+        int rc = stat("/dev/oncrpc", &st);
+        if (rc < 0) {
+            ALOGD("createInstance: X failed to create hardware: %s", strerror(errno));
+            singleton_lock.unlock();
+            return NULL;
+        }
+    }
+
+    QualcommCameraHardware *cam = new QualcommCameraHardware();
+    hardware=cam;
+
+
+    ALOGI("createInstance: created hardware=%p", cam);
+    if (!cam->startCamera()) {
+        ALOGE("%s: startCamera failed!", __FUNCTION__);
+        //singleton_lock.unlock();
+        delete cam;
+        return NULL;
+    }
+
+    cam->initDefaultParameters();
+    //singleton_lock.unlock();
+    ALOGI("createInstance: X");
+    return cam;
+}
+
+// For internal use only, hence the strong pointer to the derived type.
+QualcommCameraHardware* QualcommCameraHardware::getInstance()
+{
+    //QualcommCameraHardware* hardware = singleton.promote();
+    if (hardware != 0) {
+        //    ALOGV("getInstance: X old instance of hardware");
+      //  return sp<QualcommCameraHardware>(static_cast<QualcommCameraHardware*>(hardware.get()));
+	  return hardware;
+    } else {
+        ALOGV("getInstance: X new instance of hardware");
+        return new QualcommCameraHardware();
+    }
+}
+void QualcommCameraHardware::receiveRecordingFrame(struct msm_frame *frame)
+{
+    ALOGV("receiveRecordingFrame E");
+    // post busy frame
+    if (frame)
+    {
+        cam_frame_post_video (frame);
+    }
+    else ALOGE("in  receiveRecordingFrame frame is NULL");
+    ALOGV("receiveRecordingFrame X");
+}
+
+
+bool QualcommCameraHardware::native_zoom_image(int fd, int srcOffset, int dstOffSet, common_crop_t *crop)
+{
+    int result = 0;
+    struct mdp_blit_req *e;
+
+    /* Initialize yuv structure */
+    zoomImage.list.count = 1;
+
+    e = &zoomImage.list.req[0];
+
+    e->src.width = previewWidth;
+    e->src.height = previewHeight;
+    e->src.format = MDP_Y_CBCR_H2V2;
+    e->src.offset = srcOffset;
+    e->src.memory_id = fd;
+
+    e->dst.width = previewWidth;
+    e->dst.height = previewHeight;
+    e->dst.format = MDP_Y_CBCR_H2V2;
+    e->dst.offset = dstOffSet;
+    e->dst.memory_id = fd;
+
+    e->transp_mask = 0xffffffff;
+    e->flags = 0;
+    e->alpha = 0xff;
+    if (crop->in1_w != 0 && crop->in1_h != 0) {
+        e->src_rect.x = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+        e->src_rect.y = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+        e->src_rect.w = crop->in1_w;
+        e->src_rect.h = crop->in1_h;
+    } else {
+        e->src_rect.x = 0;
+        e->src_rect.y = 0;
+        e->src_rect.w = previewWidth;
+        e->src_rect.h = previewHeight;
+    }
+    //ALOGV(" native_zoom : SRC_RECT : x,y = %d,%d \t w,h = %d, %d",
+    //        e->src_rect.x, e->src_rect.y, e->src_rect.w, e->src_rect.h);
+
+    e->dst_rect.x = 0;
+    e->dst_rect.y = 0;
+    e->dst_rect.w = previewWidth;
+    e->dst_rect.h = previewHeight;
+
+    result = ioctl(fb_fd, MSMFB_BLIT, &zoomImage.list);
+    if (result < 0) {
+        ALOGE("MSM_FBIOBLT failed! line=%d\n", __LINE__);
+        return FALSE;
+    }
+    return TRUE;
+}
+
+void QualcommCameraHardware::debugShowPreviewFPS() const
+{
+    static int mFrameCount;
+    static int mLastFrameCount = 0;
+    static nsecs_t mLastFpsTime = 0;
+    static float mFps = 0;
+    mFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mLastFpsTime;
+    if (diff > ms2ns(250)) {
+        mFps =  ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+        ALOGI("Preview Frames Per Second: %.4f", mFps);
+        mLastFpsTime = now;
+        mLastFrameCount = mFrameCount;
+    }
+}
+
+void QualcommCameraHardware::debugShowVideoFPS() const
+{
+    static int mFrameCount;
+    static int mLastFrameCount = 0;
+    static nsecs_t mLastFpsTime = 0;
+    static float mFps = 0;
+    mFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - mLastFpsTime;
+    if (diff > ms2ns(250)) {
+        mFps =  ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+        ALOGI("Video Frames Per Second: %.4f", mFps);
+        mLastFpsTime = now;
+        mLastFrameCount = mFrameCount;
+    }
+}
+
+void QualcommCameraHardware::receiveLiveSnapshot(uint32_t jpeg_size)
+{
+    ALOGV("receiveLiveSnapshot E");
+#if DUMP_LIVESHOT_JPEG_FILE
+    int file_fd = open("/data/LiveSnapshot.jpg", O_RDWR | O_CREAT, 0777);
+    ALOGV("dumping live shot image in /data/LiveSnapshot.jpg");
+    if (file_fd < 0) {
+        ALOGE("cannot open file\n");
+    }
+    else
+    {
+        write(file_fd, (uint8_t *)mJpegLiveSnapMapped->data,jpeg_size);
+    }
+    close(file_fd);
+#endif
+    Mutex::Autolock cbLock(&mCallbackLock);
+    if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+          mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mJpegLiveSnapMapped ,data_counter,
+                          NULL, mCallbackCookie);
+
+    }
+    else ALOGV("JPEG callback was cancelled--not delivering image.");
+
+    //Reset the Gps Information & relieve memory
+    exif_table_numEntries = 0;
+    mJpegHeap.clear();
+
+    liveshot_state = LIVESHOT_DONE;
+
+    ALOGV("receiveLiveSnapshot X");
+}
+void QualcommCameraHardware::receivePreviewFrame(struct msm_frame *frame)
+{
+    ALOGI("receivePreviewFrame E");
+    if (!mCameraRunning) {
+        ALOGE("ignoring preview callback--camera has been stopped");
+        LINK_camframe_add_frame(CAM_PREVIEW_FRAME,frame);
+        return;
+    }
+    if((mCurrentTarget == TARGET_MSM7627A) && ( liveshot_state == LIVESHOT_IN_PROGRESS)) {
+        LINK_set_liveshot_frame(frame);
+    }
+    if(mPreviewBusyQueue.add(frame) == false)
+        LINK_camframe_add_frame(CAM_PREVIEW_FRAME,frame);
+
+
+    ALOGI("receivePreviewFrame X");
+}
+void QualcommCameraHardware::receiveCameraStats(camstats_type stype, camera_preview_histogram_info* histinfo)
+{
+  //  ALOGV("receiveCameraStats E");
+    CAMERA_HAL_UNUSED(stype);
+
+    if (!mCameraRunning) {
+        ALOGE("ignoring stats callback--camera has been stopped");
+        return;
+    }
+
+    mCallbackLock.lock();
+    int msgEnabled = mMsgEnabled;
+    camera_data_callback scb = mDataCallback;
+    void *sdata = mCallbackCookie;
+    mCallbackLock.unlock();
+    mStatsWaitLock.lock();
+    if(mStatsOn == CAMERA_HISTOGRAM_DISABLE) {
+      mStatsWaitLock.unlock();
+      return;
+    }
+    if(!mSendData) {
+        mStatsWaitLock.unlock();
+     } else {
+        mSendData = false;
+        mCurrent = (mCurrent+1)%3;
+    // The first element of the array will contain the maximum hist value provided by driver.
+    //    *(uint32_t *)((unsigned int)mStatHeap->mHeap->base()+ (mStatHeap->mBufferSize * mCurrent)) = histinfo->max_value;
+    //    memcpy((uint32_t *)((unsigned int)mStatHeap->mHeap->base()+ (mStatHeap->mBufferSize * mCurrent)+ sizeof(int32_t)), (uint32_t *)histinfo->buffer,(sizeof(int32_t) * 256));
+        *(uint32_t *)((unsigned int)(mStatsMapped[mCurrent]->data)) = histinfo->max_value;
+        memcpy((uint32_t *)((unsigned int)mStatsMapped[mCurrent]->data + sizeof(int32_t)), (uint32_t *)histinfo->buffer,(sizeof(int32_t) * 256));
+
+        mStatsWaitLock.unlock();
+
+        if (scb != NULL && (msgEnabled & CAMERA_MSG_STATS_DATA))
+            scb(CAMERA_MSG_STATS_DATA, mStatsMapped[mCurrent], data_counter, NULL,sdata);
+
+     }
+  //  ALOGV("receiveCameraStats X");
+}
+/*===========================================================================
+ * FUNCTION    - do_mmap -
+ *
+ * DESCRIPTION:  retured virtual addresss
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd)
+{
+    void *ret; /* returned virtual address */
+    int pmem_fd;
+
+    if(mCurrentTarget == TARGET_MSM8660)
+        pmem_fd = open("/dev/pmem_smipool", O_RDWR|O_SYNC);
+    else
+        pmem_fd = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+    if (pmem_fd <= 0) {
+        ALOGE("do_mmap: Open device /dev/pmem_smipool failed!\n");
+        return NULL;
+    }
+    /* to make it page size aligned */
+    size = (size + 4095) & (~4095);
+  ret = mmap(NULL,
+    size,
+    PROT_READ  | PROT_WRITE,
+    MAP_SHARED,
+    pmem_fd,
+    0);
+    if (ret == MAP_FAILED) {
+        ALOGE("do_mmap: pmem mmap() failed: %s (%d)\n", strerror(errno), errno);
+        close(pmem_fd);
+        return NULL;
+    }
+    ALOGE("do_mmap: pmem mmap fd %d ptr %p len %u\n", pmem_fd, ret, size);
+    *pmemFd = pmem_fd;
+    return(uint8_t *)ret;
+}
+
+
+bool QualcommCameraHardware::initRecord()
+{
+    const char *pmem_region;
+    int ion_heap = ION_CP_MM_HEAP_ID;
+    int CbCrOffset;
+    int recordBufferSize;
+	int active, type =0;
+
+    ALOGV("initREcord E");
+    if(mZslEnable){
+       ALOGV("initRecord X.. Not intializing Record buffers in ZSL mode");
+       return true;
+    }
+
+    if(mCurrentTarget == TARGET_MSM8660) {
+        pmem_region = "/dev/pmem_smipool";
+    } else {
+        pmem_region = "/dev/pmem_adsp";
+    }
+
+    ALOGI("initRecord: mDimension.video_width = %d mDimension.video_height = %d",
+             mDimension.video_width, mDimension.video_height);
+    // for 8x60 the Encoder expects the CbCr offset should be aligned to 2K.
+    if(mCurrentTarget == TARGET_MSM8660) {
+        CbCrOffset = PAD_TO_2K(mDimension.video_width  * mDimension.video_height);
+        recordBufferSize = CbCrOffset + PAD_TO_2K((mDimension.video_width * mDimension.video_height)/2);
+    } else {
+        CbCrOffset = PAD_TO_WORD(mDimension.video_width  * mDimension.video_height);
+        recordBufferSize = (mDimension.video_width  * mDimension.video_height *3)/2;
+    }
+
+    /* Buffersize and frameSize will be different when DIS is ON.
+     * We need to pass the actual framesize with video heap, as the same
+     * is used at camera MIO when negotiating with encoder.
+     */
+    mRecordFrameSize = PAD_TO_4K(recordBufferSize);
+    bool dis_disable = 0;
+    const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+    if((str != NULL) && (strcmp(str, QCameraParameters::VIDEO_HFR_OFF))) {
+        ALOGI("%s: HFR is ON, DIS has to be OFF", __FUNCTION__);
+        dis_disable = 1;
+    }
+    if((mVpeEnabled && mDisEnabled && (!dis_disable))|| mIs3DModeOn){
+        mRecordFrameSize = videoWidth * videoHeight * 3 / 2;
+        if(mCurrentTarget == TARGET_MSM8660){
+            mRecordFrameSize = PAD_TO_4K(PAD_TO_2K(videoWidth * videoHeight)
+                                + PAD_TO_2K((videoWidth * videoHeight)/2));
+        }
+    }
+    ALOGV("mRecordFrameSize = %d", mRecordFrameSize);
+    //if(mRecordHeap == NULL) {
+    #if 0
+#ifdef USE_ION
+        mRecordHeap = new IonPool(ion_heap,
+                                MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_VIDEO,
+                                recordBufferSize,
+                                kRecordBufferCount,
+                                mRecordFrameSize,
+                                CbCrOffset,
+                                0,
+                                "record");
+#endif
+
+        mRecordHeap = new PmemPool(pmem_region,
+                               MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+                                MSM_PMEM_VIDEO,
+                                recordBufferSize,
+                                kRecordBufferCount,
+                                mRecordFrameSize,
+                                CbCrOffset,
+                                0,
+                                "record");
+
+        if (!mRecordHeap->initialized()) {
+            mRecordHeap.clear();
+            mRecordHeap = NULL;
+            ALOGE("initRecord X: could not initialize record heap.");
+            return false;
+        }
+
+    } else {
+        if(mHFRMode == true) {
+            ALOGI("%s: register record buffers with camera driver", __FUNCTION__);
+            register_record_buffers(true);
+            mHFRMode = false;
+        }
+    }
+#endif
+
+    for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+#if 0
+       //recordframes[cnt].fd = mRecordHeap->mHeap->getHeapID();
+       recordframes[cnt].buffer = (unsigned long)mm_camera_do_mmap(mRecordFrameSize, &(recordframes[cnt].fd));
+           //(uint32_t)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+       if(!recordframes[cnt].buffer)
+       {
+         ALOGE("Buffer allocation for record fram %d failed",cnt);
+         return false;
+       }
+#endif
+#ifdef USE_ION
+    if (allocate_ion_memory(&record_main_ion_fd[cnt], &record_alloc[cnt], &record_ion_info_fd[cnt],
+                            ion_heap, mRecordFrameSize, &mRecordfd[cnt]) < 0){
+      ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+      return NULL;
+    }
+#else
+    mRecordfd[cnt] = open(pmem_region, O_RDWR|O_SYNC);
+    if (mRecordfd[cnt] <= 0) {
+        ALOGE("%s: Open device %s failed!\n",__func__, pmem_region);
+	        return NULL;
+    }
+#endif
+    ALOGE("%s  Record fd is %d ", __func__, mRecordfd[cnt]);
+        mRecordMapped[cnt]=mGetMemory(mRecordfd[cnt], mRecordFrameSize,1,mCallbackCookie);
+        if(mRecordMapped[cnt]==NULL) {
+            ALOGE("Failed to get camera memory for mRecordMapped heap");
+        }else{
+        ALOGE("Received following info for record mapped data:%p,handle:%p, size:%d,release:%p",
+           mRecordMapped[cnt]->data ,mRecordMapped[cnt]->handle, mRecordMapped[cnt]->size, mRecordMapped[cnt]->release);
+        }
+#if 1
+        recordframes[cnt].buffer = (unsigned int)mRecordMapped[cnt]->data;
+        recordframes[cnt].fd = mRecordfd[cnt];
+#endif
+        recordframes[cnt].planar0_off = 0;
+        recordframes[cnt].planar1_off = CbCrOffset;
+        recordframes[cnt].planar2_off = 0;
+        recordframes[cnt].path = OUTPUT_TYPE_V;
+        record_buffers_tracking_flag[cnt] = false;
+        ALOGV ("initRecord :  record heap , video buffers  buffer=%lu fd=%d y_off=%d cbcr_off=%d \n",
+          (unsigned long)recordframes[cnt].buffer, recordframes[cnt].fd, recordframes[cnt].planar0_off,
+          recordframes[cnt].planar1_off);
+        active=(cnt<ACTIVE_VIDEO_BUFFERS);
+        type = MSM_PMEM_VIDEO;
+        if((mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+            type = MSM_PMEM_VIDEO_VPE;
+            active = 1;
+        }
+        ALOGE("Registering buffer %d with kernel",cnt);
+                  register_buf(mRecordFrameSize,
+                             mRecordFrameSize, CbCrOffset, 0,
+                             recordframes[cnt].fd,
+                             0,
+                             (uint8_t *)recordframes[cnt].buffer,
+                             type,
+                             active);
+                  ALOGE("Came back from register call to kernel");
+    }
+
+    // initial setup : buffers 1,2,3 with kernel , 4 with camframe , 5,6,7,8 in free Q
+    // flush the busy Q
+    cam_frame_flush_video();
+
+    mVideoThreadWaitLock.lock();
+    while (mVideoThreadRunning) {
+        ALOGV("initRecord: waiting for old video thread to complete.");
+        mVideoThreadWait.wait(mVideoThreadWaitLock);
+        ALOGV("initRecord : old video thread completed.");
+    }
+    mVideoThreadWaitLock.unlock();
+
+    // flush free queue and add 5,6,7,8 buffers.
+    LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+    if(mVpeEnabled) {
+        //If VPE is enabled, the VPE buffer shouldn't be added to Free Q initally.
+        for(int i=ACTIVE_VIDEO_BUFFERS;i <kRecordBufferCount-1; i++)
+            LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[i]);
+    } else {
+        for(int i=ACTIVE_VIDEO_BUFFERS;i <kRecordBufferCount; i++)
+            LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[i]);
+    }
+    ALOGV("initREcord X");
+
+    return true;
+}
+
+
+status_t QualcommCameraHardware::setDIS() {
+    ALOGV("setDIS E");
+
+    video_dis_param_ctrl_t disCtrl;
+    bool ret = true;
+    ALOGV("mDisEnabled = %d", mDisEnabled);
+
+    int video_frame_cbcroffset;
+    video_frame_cbcroffset = PAD_TO_WORD(videoWidth * videoHeight);
+    if(mCurrentTarget == TARGET_MSM8660)
+        video_frame_cbcroffset = PAD_TO_2K(videoWidth * videoHeight);
+
+    disCtrl.dis_enable = mDisEnabled;
+    const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+    if((str != NULL) && (strcmp(str, QCameraParameters::VIDEO_HFR_OFF))) {
+        ALOGI("%s: HFR is ON, setting DIS as OFF", __FUNCTION__);
+        disCtrl.dis_enable = 0;
+    }
+    disCtrl.video_rec_width = videoWidth;
+    disCtrl.video_rec_height = videoHeight;
+    disCtrl.output_cbcr_offset = video_frame_cbcroffset;
+
+    ret = native_set_parms( CAMERA_PARM_VIDEO_DIS,
+                       sizeof(disCtrl), &disCtrl);
+
+    ALOGV("setDIS X (%d)", ret);
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setVpeParameters()
+{
+    ALOGV("setVpeParameters E");
+
+    video_rotation_param_ctrl_t rotCtrl;
+    bool ret = true;
+    ALOGV("videoWidth = %d, videoHeight = %d", videoWidth, videoHeight);
+    int rotation = (mRotation + sensor_rotation)%360;
+    rotCtrl.rotation = (rotation == 0) ? ROT_NONE :
+                       ((rotation == 90) ? ROT_CLOCKWISE_90 :
+                  ((rotation == 180) ? ROT_CLOCKWISE_180 : ROT_CLOCKWISE_270));
+
+    if( ((videoWidth == 1280 && videoHeight == 720) || (videoWidth == 800 && videoHeight == 480))
+        && (rotation == 90 || rotation == 270) ){
+        /* Due to a limitation at video core to support heights greater than 720, adding this check.
+         * This is a temporary hack, need to be removed once video core support is available
+         */
+        ALOGI("video resolution (%dx%d) with rotation (%d) is not supported, setting rotation to NONE",
+            videoWidth, videoHeight, rotation);
+        rotCtrl.rotation = ROT_NONE;
+    }
+    ALOGV("rotCtrl.rotation = %d", rotCtrl.rotation);
+
+    ret = native_set_parms(CAMERA_PARM_VIDEO_ROT,
+                           sizeof(rotCtrl), &rotCtrl);
+
+    ALOGV("setVpeParameters X (%d)", ret);
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::startRecording()
+{
+    ALOGV("startRecording E");
+    int ret;
+    Mutex::Autolock l(&mLock);
+    mReleasedRecordingFrame = false;
+    if( (ret=startPreviewInternal())== NO_ERROR){
+      if(mVpeEnabled){
+        ALOGI("startRecording: VPE enabled, setting vpe parameters");
+        bool status = setVpeParameters();
+        if(status) {
+          ALOGE("Failed to set VPE parameters");
+          return status;
+        }
+      }
+      if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) ||
+        (mCurrentTarget == TARGET_MSM8660))  {
+        for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+            if(mStoreMetaDataInFrame)
+            {
+                ALOGE("startRecording : meta data mode enabled");
+                metadata_memory[cnt] = mGetMemory(-1,  sizeof(struct encoder_media_buffer_type), 1, mCallbackCookie);
+                struct encoder_media_buffer_type * packet =
+                                  (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+                packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+                packet->buffer_type = kMetadataBufferTypeCameraSource;
+                native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+                nh->data[0] = mRecordfd[cnt];
+                nh->data[1] = 0;
+                nh->data[2] = mRecordFrameSize;
+            }
+        }
+        ALOGV(" in startREcording : calling start_recording");
+        native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+        mRecordingState = 1;
+        // Remove the left out frames in busy Q and them in free Q.
+        // this should be done before starting video_thread so that,
+        // frames in previous recording are flushed out.
+        ALOGV("frames in busy Q = %d", g_busy_frame_queue.num_of_frames);
+        while((g_busy_frame_queue.num_of_frames) >0){
+          msm_frame* vframe = cam_frame_get_video ();
+          LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+        }
+        ALOGV("frames in busy Q = %d after deQueing", g_busy_frame_queue.num_of_frames);
+        //Clear the dangling buffers and put them in free queue
+         for(int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+            if(record_buffers_tracking_flag[cnt] == true) {
+              ALOGI("Dangling buffer: offset = %d, buffer = %d", cnt,
+                (unsigned int)recordframes[cnt].buffer);
+              LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[cnt]);
+              record_buffers_tracking_flag[cnt] = false;
+            }
+         }
+          mVideoThreadWaitLock.lock();
+          mVideoThreadExit = 0;
+          pthread_attr_t attr;
+          pthread_attr_init(&attr);
+          pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+          mVideoThreadRunning = pthread_create(&mVideoThread,
+                &attr,
+                video_thread,
+                NULL);
+          mVideoThreadWaitLock.unlock();
+      } else if ( mCurrentTarget == TARGET_MSM7627A ) {
+        for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+            if(mStoreMetaDataInFrame
+                && (metadata_memory[cnt] == NULL))
+            {
+                ALOGE("startRecording : meta data mode enabled filling metadata memory ");
+                metadata_memory[cnt] = mGetMemory(-1,  sizeof(struct encoder_media_buffer_type), 1, mCallbackCookie);
+                struct encoder_media_buffer_type * packet =
+                                  (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+                packet->meta_handle = native_handle_create(1, 3); //1 fd, 1 offset and 1 size
+                packet->buffer_type = kMetadataBufferTypeCameraSource;
+                native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+                nh->data[0] = frames[cnt].fd;
+                nh->data[1] = 0;
+                nh->data[2] = previewWidth * previewHeight * 3/2;
+                nh->data[3] = (unsigned int)mPreviewMapped[cnt]->data;
+            }
+        }
+      }
+      record_flag = 1;
+    }
+    return ret;
+}
+
+status_t QualcommCameraHardware::startRecordingInternal()
+{
+    ALOGI("%s: E", __FUNCTION__);
+    mReleasedRecordingFrame = false;
+
+    /* In 3D mode, the video thread has to be started as part
+     * of preview itself, because video buffers and video callback
+     * need to be used for both display and encoding.
+     * startRecordingInternal() will be called as part of startPreview().
+     * This check is needed to support both 3D and non-3D mode.
+     */
+    if(mVideoThreadRunning) {
+        ALOGI("Video Thread is in progress");
+        return NO_ERROR;
+    }
+
+    if(mVpeEnabled){
+        ALOGI("startRecording: VPE enabled, setting vpe parameters");
+        bool status = setVpeParameters();
+        if(status) {
+            ALOGE("Failed to set VPE parameters");
+            return status;
+        }
+    }
+    if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660))  {
+        // Remove the left out frames in busy Q and them in free Q.
+        // this should be done before starting video_thread so that,
+        // frames in previous recording are flushed out.
+        ALOGV("frames in busy Q = %d", g_busy_frame_queue.num_of_frames);
+        while((g_busy_frame_queue.num_of_frames) >0){
+            msm_frame* vframe = cam_frame_get_video ();
+            LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+        }
+        ALOGV("frames in busy Q = %d after deQueing", g_busy_frame_queue.num_of_frames);
+
+        //Clear the dangling buffers and put them in free queue
+        for(int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+            if(record_buffers_tracking_flag[cnt] == true) {
+                ALOGI("Dangling buffer: offset = %d, buffer = %d", cnt, (unsigned int)recordframes[cnt].buffer);
+                LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[cnt]);
+                record_buffers_tracking_flag[cnt] = false;
+            }
+        }
+
+        ALOGE(" in startREcording : calling start_recording");
+        if(!mIs3DModeOn)
+            native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+
+        // Start video thread and wait for busy frames to be encoded, this thread
+        // should be closed in stopRecording
+        mVideoThreadWaitLock.lock();
+        mVideoThreadExit = 0;
+        pthread_attr_t attr;
+        pthread_attr_init(&attr);
+        pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+        mVideoThreadRunning = !pthread_create(&mVideoThread,
+                                              &attr,
+                                              video_thread,
+                                              NULL);
+        mVideoThreadWaitLock.unlock();
+        // Remove the left out frames in busy Q and them in free Q.
+    }
+    ALOGV("%s: E", __FUNCTION__);
+    return NO_ERROR;
+}
+
+void QualcommCameraHardware::stopRecording()
+{
+    ALOGV("stopRecording: E");
+    record_flag = 0;
+    Mutex::Autolock l(&mLock);
+    {
+        mRecordFrameLock.lock();
+        mReleasedRecordingFrame = true;
+        mRecordWait.signal();
+        mRecordFrameLock.unlock();
+
+        if(mDataCallback && !(mCurrentTarget == TARGET_QSD8250) &&
+                         (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
+            ALOGV("stopRecording: X, preview still in progress");
+            return;
+        }
+    }
+    if (NULL != mJpegLiveSnapMapped) {
+        ALOGI("initLiveSnapshot: clearing old mJpegHeap.");
+        mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+        mJpegLiveSnapMapped = NULL;
+    }
+
+    // If output2 enabled, exit video thread, invoke stop recording ioctl
+    if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660))  {
+        /* when 3D mode is ON, don't exit the video thread, as
+         * we need to support the preview mode. Just set the recordingState
+         * to zero, so that there won't be any rcb callbacks. video thread
+         * will be terminated as part of stop preview.
+         */
+        if(mIs3DModeOn) {
+            ALOGV("%s: 3D mode on, so don't exit video thread", __FUNCTION__);
+            mRecordingState = 0;
+            return;
+        }
+
+        mVideoThreadWaitLock.lock();
+        mVideoThreadExit = 1;
+        mVideoThreadWaitLock.unlock();
+        native_stop_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+
+        pthread_mutex_lock(&(g_busy_frame_queue.mut));
+        pthread_cond_signal(&(g_busy_frame_queue.wait));
+        pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+      for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+        if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+          struct encoder_media_buffer_type * packet =
+              (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+          native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+          metadata_memory[cnt]->release(metadata_memory[cnt]);
+          metadata_memory[cnt] = NULL;
+        }
+      }
+    }
+    else if(mCurrentTarget == TARGET_MSM7627A) {
+       for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+          if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+            struct encoder_media_buffer_type * packet =
+                (struct encoder_media_buffer_type  *)metadata_memory[cnt]->data;
+            native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+            metadata_memory[cnt]->release(metadata_memory[cnt]);
+            metadata_memory[cnt] = NULL;
+          }
+        }
+    }
+#if 0
+    else  // for other targets where output2 is not enabled
+        stopPreviewInternal();
+    if (mJpegHeap != NULL) {
+        ALOGV("stopRecording: clearing old mJpegHeap.");
+        mJpegHeap.clear();
+    }
+#endif
+    mRecordingState = 0; // recording not started
+    ALOGV("stopRecording: X");
+}
+
+void QualcommCameraHardware::releaseRecordingFrame(const void *opaque)
+{
+    ALOGE("%s : BEGIN, opaque = 0x%p",__func__, opaque);
+    Mutex::Autolock rLock(&mRecordFrameLock);
+    mReleasedRecordingFrame = true;
+    mRecordWait.signal();
+
+    // Ff 7x30 : add the frame to the free camframe queue
+    if( (mCurrentTarget == TARGET_MSM7630 )  || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+        ssize_t offset;
+        size_t size;
+        //sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+        msm_frame* releaseframe = NULL;
+        int cnt;
+        for (cnt = 0; cnt < kRecordBufferCount; cnt++) {
+            if(mStoreMetaDataInFrame){
+                if(metadata_memory[cnt] && metadata_memory[cnt]->data == opaque){
+                    ALOGV("in release recording frame(meta) found match , releasing buffer %d", (unsigned int)recordframes[cnt].buffer);
+                    releaseframe = &recordframes[cnt];
+                    break;
+                }
+            }else {
+                if(recordframes[cnt].buffer && ((unsigned long)opaque == recordframes[cnt].buffer) ){
+                    ALOGV("in release recording frame found match , releasing buffer %d", (unsigned int)recordframes[cnt].buffer);
+                    releaseframe = &recordframes[cnt];
+                    break;
+                }
+            }
+        }
+        if(cnt < kRecordBufferCount) {
+            // do this only if frame thread is running
+            mFrameThreadWaitLock.lock();
+            if(mFrameThreadRunning ) {
+                //Reset the track flag for this frame buffer
+                record_buffers_tracking_flag[cnt] = false;
+                LINK_camframe_add_frame(CAM_VIDEO_FRAME,releaseframe);
+            }
+
+            mFrameThreadWaitLock.unlock();
+        } else {
+            ALOGE("in release recordingframe XXXXX error , buffer not found");
+            for (int i=0; i< kRecordBufferCount; i++) {
+                 ALOGE(" recordframes[%d].buffer = %d", i, (unsigned int)recordframes[i].buffer);
+            }
+        }
+    }
+
+    ALOGV("releaseRecordingFrame X");
+}
+
+bool QualcommCameraHardware::recordingEnabled()
+{
+    return mCameraRunning && mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME);
+}
+
+void QualcommCameraHardware::notifyShutter(bool mPlayShutterSoundOnly)
+{
+    private_handle_t *thumbnailHandle;
+    if(mThumbnailBuffer) {
+        thumbnailHandle = (private_handle_t *) (*mThumbnailBuffer);
+    }
+    mShutterLock.lock();
+    //image_rect_type size;
+
+    if(mPlayShutterSoundOnly) {
+        /* At this point, invoke Notify Callback to play shutter sound only.
+         * We want to call notify callback again when we have the
+         * yuv picture ready. This is to reduce blanking at the time
+         * of displaying postview frame. Using ext2 to indicate whether
+         * to play shutter sound only or register the postview buffers.
+         */
+        mNotifyCallback(CAMERA_MSG_SHUTTER, 0, mPlayShutterSoundOnly,
+                            mCallbackCookie);
+        mShutterLock.unlock();
+        return;
+    }
+
+    if (mShutterPending && mNotifyCallback && (mMsgEnabled & CAMERA_MSG_SHUTTER)) {
+        //mDisplayHeap = mThumbnailHeap;
+#if 0
+        if (crop != NULL && (crop->in1_w != 0 && crop->in1_h != 0)) {
+            size.width = crop->in1_w;
+            size.height = crop->in1_h;
+        }
+        else {
+            size.width = mPostviewWidth;
+            size.height = mPostviewHeight;
+        }
+#endif
+/*
+        if(strTexturesOn == true) {
+            mDisplayHeap = mRawHeap;
+            size.width = mPictureWidth;
+            size.height = mPictureHeight;
+        }
+*/
+        /* Now, invoke Notify Callback to unregister preview buffer
+         * and register postview buffer with surface flinger. Set ext2
+         * as 0 to indicate not to play shutter sound.
+         */
+        mNotifyCallback(CAMERA_MSG_SHUTTER, 0, 0,
+                        mCallbackCookie);
+        mShutterPending = false;
+    }
+    mShutterLock.unlock();
+}
+
+static void receive_shutter_callback(common_crop_t *crop)
+{
+    ALOGV("receive_shutter_callback: E");
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        /* Just play shutter sound at this time */
+        obj->notifyShutter(TRUE);
+    }
+    ALOGV("receive_shutter_callback: X");
+}
+
+// Crop the picture in place.
+static void crop_yuv420(uint32_t width, uint32_t height,
+                 uint32_t cropped_width, uint32_t cropped_height,
+                 uint8_t *image, const char *name)
+{
+    uint32_t i;
+    uint32_t x, y;
+    uint8_t* chroma_src, *chroma_dst;
+    int yOffsetSrc, yOffsetDst, CbCrOffsetSrc, CbCrOffsetDst;
+    int mSrcSize, mDstSize;
+
+    //check if all fields needed eg. size and also how to set y offset. If condition for 7x27
+    //and need to check if needed for 7x30.
+
+    LINK_jpeg_encoder_get_buffer_offset(width, height, (uint32_t *)&yOffsetSrc,
+                                       (uint32_t *)&CbCrOffsetSrc, (uint32_t *)&mSrcSize);
+
+    LINK_jpeg_encoder_get_buffer_offset(cropped_width, cropped_height, (uint32_t *)&yOffsetDst,
+                                       (uint32_t *)&CbCrOffsetDst, (uint32_t *)&mDstSize);
+
+    // Calculate the start position of the cropped area.
+    x = (width - cropped_width) / 2;
+    y = (height - cropped_height) / 2;
+    x &= ~1;
+    y &= ~1;
+
+    if((mCurrentTarget == TARGET_MSM7627)
+       || (mCurrentTarget == TARGET_MSM7625A)
+       || (mCurrentTarget == TARGET_MSM7627A)
+       || (mCurrentTarget == TARGET_MSM7630)
+       || (mCurrentTarget == TARGET_MSM8660)) {
+        if (!strcmp("snapshot camera", name)) {
+            chroma_src = image + CbCrOffsetSrc;
+            chroma_dst = image + CbCrOffsetDst;
+        } else {
+            chroma_src = image + width * height;
+            chroma_dst = image + cropped_width * cropped_height;
+            yOffsetSrc = 0;
+            yOffsetDst = 0;
+            CbCrOffsetSrc = width * height;
+            CbCrOffsetDst = cropped_width * cropped_height;
+        }
+    } else {
+       chroma_src = image + CbCrOffsetSrc;
+       chroma_dst = image + CbCrOffsetDst;
+    }
+
+    int32_t bufDst = yOffsetDst;
+    int32_t bufSrc = yOffsetSrc + (width * y) + x;
+
+    if( bufDst > bufSrc ){
+        ALOGV("crop yuv Y destination position follows source position");
+        /*
+         * If buffer destination follows buffer source, memcpy
+         * of lines will lead to overwriting subsequent lines. In order
+         * to prevent this, reverse copying of lines is performed
+         * for the set of lines where destination follows source and
+         * forward copying of lines is performed for lines where source
+         * follows destination. To calculate the position to switch,
+         * the initial difference between source and destination is taken
+         * and divided by difference between width and cropped width. For
+         * every line copied the difference between source destination
+         * drops by width - cropped width
+         */
+        //calculating inversion
+        int position = ( bufDst - bufSrc ) / (width - cropped_width);
+        // Copy luma component.
+        for(i=position+1; i < cropped_height; i++){
+            memmove(image + yOffsetDst + i * cropped_width,
+                    image + yOffsetSrc + width * (y + i) + x,
+                    cropped_width);
+        }
+        for(int j=position; j>=0; j--){
+            memmove(image + yOffsetDst + j * cropped_width,
+                    image + yOffsetSrc + width * (y + j) + x,
+                    cropped_width);
+        }
+    } else {
+        // Copy luma component.
+        for(i = 0; i < cropped_height; i++)
+            memcpy(image + yOffsetDst + i * cropped_width,
+                   image + yOffsetSrc + width * (y + i) + x,
+                   cropped_width);
+    }
+
+    // Copy chroma components.
+    cropped_height /= 2;
+    y /= 2;
+
+    bufDst = CbCrOffsetDst;
+    bufSrc = CbCrOffsetSrc + (width * y) + x;
+
+    if( bufDst > bufSrc ) {
+        ALOGV("crop yuv Chroma destination position follows source position");
+        /*
+         * Similar to y
+         */
+        int position = ( bufDst - bufSrc ) / (width - cropped_width);
+        for(i=position+1; i < cropped_height; i++){
+            memmove(chroma_dst + i * cropped_width,
+                    chroma_src + width * (y + i) + x,
+                    cropped_width);
+        }
+        for(int j=position; j >=0; j--){
+            memmove(chroma_dst + j * cropped_width,
+                    chroma_src + width * (y + j) + x,
+                    cropped_width);
+        }
+    } else {
+        for(i = 0; i < cropped_height; i++)
+            memcpy(chroma_dst + i * cropped_width,
+                   chroma_src + width * (y + i) + x,
+                   cropped_width);
+    }
+}
+// ReceiveRawPicture for ICS
+void QualcommCameraHardware::receiveRawPicture(status_t status,struct msm_frame *postviewframe, struct msm_frame *mainframe)
+{
+    ALOGE("%s: E", __FUNCTION__);
+
+    void* cropp;
+    mSnapshotThreadWaitLock.lock();
+    if(mSnapshotThreadRunning == false) {
+        ALOGE("%s called in wrong state, ignore", __FUNCTION__);
+        return;
+    }
+    mSnapshotThreadWaitLock.unlock();
+
+    if(status != NO_ERROR){
+        ALOGE("%s: Failed to get Snapshot Image", __FUNCTION__);
+        if(mDataCallback &&
+            (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+            /* get picture failed. Give jpeg callback with NULL data
+             * to the application to restore to preview mode
+             */
+            ALOGE("get picture failed, giving jpeg callback with NULL data");
+            mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, NULL, data_counter, NULL, mCallbackCookie);
+        }
+        mShutterLock.lock();
+        mShutterPending = false;
+        mShutterLock.unlock();
+        mJpegThreadWaitLock.lock();
+        mJpegThreadRunning = false;
+        mJpegThreadWait.signal();
+        mJpegThreadWaitLock.unlock();
+        mInSnapshotModeWaitLock.lock();
+        mInSnapshotMode = false;
+        mInSnapshotModeWait.signal();
+        mInSnapshotModeWaitLock.unlock();
+        return;
+    }
+    /* call notifyShutter to config surface and overlay
+     * for postview rendering.
+     * Its necessary to issue another notifyShutter here with
+     * mPlayShutterSoundOnly as FALSE, since that is when the
+     * preview buffers are unregistered with the surface flinger.
+     * That is necessary otherwise the preview memory wont be
+     * deallocated.
+     */
+    cropp =postviewframe->cropinfo;
+    notifyShutter(FALSE);
+
+    if(mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+        if(cropp != NULL){
+            common_crop_t *crop = (common_crop_t *)cropp;
+            if (crop->in1_w != 0 && crop->in1_h != 0) {
+                zoomCropInfo.left = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+                zoomCropInfo.top = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+                if(zoomCropInfo.left < 0) zoomCropInfo.left = 0;
+                if(zoomCropInfo.top < 0) zoomCropInfo.top = 0;
+                zoomCropInfo.right = zoomCropInfo.left + crop->in1_w;
+                zoomCropInfo.bottom = zoomCropInfo.top + crop->in1_h;
+                mPreviewWindow->set_crop(mPreviewWindow,
+                            zoomCropInfo.left,
+                            zoomCropInfo.top,
+                            zoomCropInfo.right,
+                            zoomCropInfo.bottom);
+                mResetWindowCrop = true;
+            } else {
+                zoomCropInfo.left = 0;
+                zoomCropInfo.top = 0;
+                zoomCropInfo.right = mPostviewWidth;
+                zoomCropInfo.bottom = mPostviewHeight;
+                mPreviewWindow->set_crop(mPreviewWindow,
+                                 zoomCropInfo.left,
+                                 zoomCropInfo.top,
+                                 zoomCropInfo.right,
+                                 zoomCropInfo.bottom);
+            }
+        }
+        ALOGE("receiverawpicture : display lock");
+        mDisplayLock.lock();
+        int index = mapThumbnailBuffer(postviewframe);
+        ALOGE("receiveRawPicture : mapThumbnailBuffer returned %d", index);
+        private_handle_t *handle;
+        if(mThumbnailBuffer[index] != NULL && mZslEnable == false) {
+            handle = (private_handle_t *)(*mThumbnailBuffer[index]);
+            ALOGV("%s: Queueing postview buffer for display %d",
+                                           __FUNCTION__,handle->fd);
+            if (BUFFER_LOCKED == mThumbnailLockState[index]) {
+                if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+                    ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+                    mDisplayLock.unlock();
+                    return;
+                } else {
+                     mThumbnailLockState[index] = BUFFER_UNLOCKED;
+                }
+            }
+            status_t retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+                                                         mThumbnailBuffer[index]);
+            ALOGE(" enQ thumbnailbuffer");
+            if( retVal != NO_ERROR) {
+                ALOGE("%s: Queuebuffer failed for postview buffer", __FUNCTION__);
+            }
+
+        }
+        mDisplayLock.unlock();
+        ALOGE("receiverawpicture : display unlock");
+        /* Give the main Image as raw to upper layers */
+        //Either CAMERA_MSG_RAW_IMAGE or CAMERA_MSG_RAW_IMAGE_NOTIFY will be set not both
+        if (mDataCallback && (mMsgEnabled & CAMERA_MSG_RAW_IMAGE))
+            mDataCallback(CAMERA_MSG_RAW_IMAGE, mRawMapped[index],data_counter,
+                          NULL, mCallbackCookie);
+        else if (mNotifyCallback && (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY))
+            mNotifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0,
+                            mCallbackCookie);
+
+        if(strTexturesOn == true) {
+            ALOGI("Raw Data given to app for processing...will wait for jpeg encode call");
+            mEncodePending = true;
+            mEncodePendingWaitLock.unlock();
+            mJpegThreadWaitLock.lock();
+            mJpegThreadWait.signal();
+            mJpegThreadWaitLock.unlock();
+        }
+    } else {  // Not Jpeg snapshot, it is Raw Snapshot , handle later
+            ALOGV("ReceiveRawPicture : raw snapshot not Jpeg, sending callback up");
+             if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE))
+                mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
+                                       mRawSnapshotMapped,
+                                             data_counter,
+                                                     NULL,
+                                         mCallbackCookie);
+
+              // TEMP
+             ALOGE("receiveRawPicture : gave raw frame to app, giving signal");
+              mJpegThreadWaitLock.lock();
+              mJpegThreadRunning = false;
+              mJpegThreadWait.signal();
+              mJpegThreadWaitLock.unlock();
+
+    }
+    /* can start preview at this stage? early preview? */
+    mInSnapshotModeWaitLock.lock();
+    mInSnapshotMode = false;
+    mInSnapshotModeWait.signal();
+    mInSnapshotModeWaitLock.unlock();
+
+    ALOGV("%s: X", __FUNCTION__);
+
+}
+
+
+void QualcommCameraHardware::receiveJpegPicture(status_t status, mm_camera_buffer_t *encoded_buffer)
+{
+    Mutex::Autolock cbLock(&mCallbackLock);
+    numJpegReceived++;
+    uint32_t offset ;
+    int32_t index = -1;
+    int32_t buffer_size = 0;
+    if(encoded_buffer && status == NO_ERROR) {
+      buffer_size = encoded_buffer->filled_size;
+      ALOGV("receiveJpegPicture: E buffer_size %d mJpegMaxSize = %d",buffer_size, mJpegMaxSize);
+
+        index = mapJpegBuffer(encoded_buffer);
+        ALOGE("receiveJpegPicutre : mapJpegBuffer index : %d", index);
+    }
+    if((index < 0) || (index >= (MAX_SNAPSHOT_BUFFERS-2))){
+        ALOGE("Jpeg index is not valid or fails. ");
+        if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+          mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, NULL, data_counter, NULL, mCallbackCookie);
+        }
+        mJpegThreadWaitLock.lock();
+        mJpegThreadRunning = false;
+        mJpegThreadWait.signal();
+        mJpegThreadWaitLock.unlock();
+    } else {
+      ALOGV("receiveJpegPicture: Index of Jpeg is %d",index);
+
+      if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+          if(status == NO_ERROR) {
+            ALOGE("receiveJpegPicture : giving jpeg image callback to services");
+            mJpegCopyMapped = mGetMemory(-1, encoded_buffer->filled_size,1, mCallbackCookie);
+            if(!mJpegCopyMapped){
+              ALOGE("%s: mGetMemory failed.\n", __func__);
+            }
+            memcpy(mJpegCopyMapped->data, mJpegMapped[index]->data, encoded_buffer->filled_size );
+            mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE,mJpegCopyMapped,data_counter,NULL,mCallbackCookie);
+             if(NULL != mJpegCopyMapped) {
+               mJpegCopyMapped->release(mJpegCopyMapped);
+               mJpegCopyMapped = NULL;
+             }
+          }
+      } else {
+        ALOGE("JPEG callback was cancelled--not delivering image.");
+      }
+      if(numJpegReceived == numCapture){
+          mJpegThreadWaitLock.lock();
+          mJpegThreadRunning = false;
+          mJpegThreadWait.signal();
+          mJpegThreadWaitLock.unlock();
+      }
+    }
+
+    ALOGV("receiveJpegPicture: X callback done.");
+}
+bool QualcommCameraHardware::previewEnabled()
+{
+    /* If overlay is used the message CAMERA_MSG_PREVIEW_FRAME would
+     * be disabled at CameraService layer. Hence previewEnabled would
+     * return FALSE even though preview is running. Hence check for
+     * mOverlay not being NULL to ensure that previewEnabled returns
+     * accurate information.
+     */
+
+//    return mCameraRunning && mDataCallback &&
+//           ((mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) || (mOverlay != NULL));
+    ALOGE(" : mCameraRunning : %d mPreviewWindow = %x",mCameraRunning,mPreviewWindow);
+    return mCameraRunning;// || (mPreviewWindow != NULL);
+}
+status_t QualcommCameraHardware::setRecordSize(const QCameraParameters& params)
+{
+    const char *recordSize = NULL;
+    recordSize = params.get(QCameraParameters::KEY_VIDEO_SIZE);
+    if(!recordSize) {
+        mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+        //If application didn't set this parameter string, use the values from
+        //getPreviewSize() as video dimensions.
+        ALOGV("No Record Size requested, use the preview dimensions");
+        videoWidth = previewWidth;
+        videoHeight = previewHeight;
+    } else {
+        //Extract the record witdh and height that application requested.
+        ALOGI("%s: requested record size %s", __FUNCTION__, recordSize);
+        if(!parse_size(recordSize, videoWidth, videoHeight)) {
+            mParameters.set(QCameraParameters::KEY_VIDEO_SIZE , recordSize);
+            //VFE output1 shouldn't be greater than VFE output2.
+            if( (previewWidth > videoWidth) || (previewHeight > videoHeight)) {
+                //Set preview sizes as record sizes.
+                ALOGI("Preview size %dx%d is greater than record size %dx%d,\
+                   resetting preview size to record size",previewWidth,\
+                     previewHeight, videoWidth, videoHeight);
+                previewWidth = videoWidth;
+                previewHeight = videoHeight;
+                mParameters.setPreviewSize(previewWidth, previewHeight);
+            }
+            if( (mCurrentTarget != TARGET_MSM7630)
+                && (mCurrentTarget != TARGET_QSD8250)
+                 && (mCurrentTarget != TARGET_MSM8660) ) {
+                //For Single VFE output targets, use record dimensions as preview dimensions.
+                previewWidth = videoWidth;
+                previewHeight = videoHeight;
+                mParameters.setPreviewSize(previewWidth, previewHeight);
+            }
+            if(mIs3DModeOn == true) {
+                /* As preview and video frames are same in 3D mode,
+                 * preview size should be same as video size. This
+                 * cahnge is needed to take of video resolutions
+                 * like 720P and 1080p where the application can
+                 * request different preview sizes like 768x432
+                 */
+                previewWidth = videoWidth;
+                previewHeight = videoHeight;
+                mParameters.setPreviewSize(previewWidth, previewHeight);
+            }
+        } else {
+            mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+            ALOGE("initPreview X: failed to parse parameter record-size (%s)", recordSize);
+            return BAD_VALUE;
+        }
+    }
+    ALOGI("%s: preview dimensions: %dx%d", __FUNCTION__, previewWidth, previewHeight);
+    ALOGI("%s: video dimensions: %dx%d", __FUNCTION__, videoWidth, videoHeight);
+    mDimension.display_width = previewWidth;
+    mDimension.display_height= previewHeight;
+    return NO_ERROR;
+}
+
+status_t  QualcommCameraHardware::setCameraMode(const QCameraParameters& params) {
+    int32_t value = params.getInt(QCameraParameters::KEY_CAMERA_MODE);
+    mParameters.set(QCameraParameters::KEY_CAMERA_MODE,value);
+
+    ALOGI("ZSL is enabled  %d", value);
+    if( value != mZslEnable) {
+        mFrameThreadWaitLock.lock();
+        while (mFrameThreadRunning) {
+          ALOGI("initPreview: waiting for old frame thread to complete.");
+          mFrameThreadWait.wait(mFrameThreadWaitLock);
+          ALOGI("initPreview: old frame thread completed.");
+        }
+        mFrameThreadWaitLock.unlock();
+    }
+    if(value == 1) {
+        mZslEnable = true;
+       /* mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                       QCameraParameters::FOCUS_MODE_INFINITY);
+        mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                       QCameraParameters::FOCUS_MODE_INFINITY);*/
+    }else{
+        mZslEnable = false;
+        /*mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+                    focus_mode_values);
+        mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+                    QCameraParameters::FOCUS_MODE_AUTO);*/
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setPreviewSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPreviewSize(&width, &height);
+    ALOGV("requested preview size %d x %d", width, height);
+
+    // Validate the preview size
+    for (size_t i = 0; i <  PREVIEW_SIZE_COUNT; ++i) {
+        if (width ==  preview_sizes[i].width
+           && height ==  preview_sizes[i].height) {
+            mParameters.setPreviewSize(width, height);
+            //previewWidth = width;
+            //previewHeight = height;
+            mDimension.display_width = width;
+            mDimension.display_height= height;
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid preview size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+status_t QualcommCameraHardware::setPreviewFpsRange(const QCameraParameters& params)
+{
+    int minFps,maxFps;
+    params.getPreviewFpsRange(&minFps,&maxFps);
+    ALOGE("FPS Range Values: %dx%d", minFps, maxFps);
+
+    for(size_t i=0;i<FPS_RANGES_SUPPORTED_COUNT;i++)
+    {
+        if(minFps==FpsRangesSupported[i].minFPS && maxFps == FpsRangesSupported[i].maxFPS){
+            mParameters.setPreviewFpsRange(minFps,maxFps);
+            return NO_ERROR;
+        }
+    }
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setPreviewFrameRate(const QCameraParameters& params)
+{
+    if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+         ALOGI("Set fps is not supported for this sensor");
+        return NO_ERROR;
+    }
+    uint16_t previousFps = (uint16_t)mParameters.getPreviewFrameRate();
+    uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+    ALOGV("requested preview frame rate  is %u", fps);
+
+    if(mInitialized && (fps == previousFps)){
+        ALOGV("fps same as previous fps");
+        return NO_ERROR;
+    }
+
+    if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+        mParameters.setPreviewFrameRate(fps);
+        bool ret = native_set_parms(CAMERA_PARM_FPS,
+                sizeof(fps), (void *)&fps);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setPreviewFrameRateMode(const QCameraParameters& params) {
+    if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS_MODE) &&  !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+         ALOGI("set fps mode is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *previousMode = mParameters.getPreviewFrameRateMode();
+    const char *str = params.getPreviewFrameRateMode();
+    if( mInitialized && !strcmp(previousMode, str)) {
+        ALOGV("frame rate mode same as previous mode %s", previousMode);
+        return NO_ERROR;
+    }
+    int32_t frameRateMode = attr_lookup(frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map),str);
+    if(frameRateMode != NOT_FOUND) {
+        ALOGV("setPreviewFrameRateMode: %s ", str);
+        mParameters.setPreviewFrameRateMode(str);
+        bool ret = native_set_parms(CAMERA_PARM_FPS_MODE, sizeof(frameRateMode), (void *)&frameRateMode);
+        if(!ret) return ret;
+        //set the fps value when chaging modes
+        int16_t fps = (uint16_t)params.getPreviewFrameRate();
+        if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+            mParameters.setPreviewFrameRate(fps);
+            ret = native_set_parms(CAMERA_PARM_FPS,
+                                        sizeof(fps), (void *)&fps);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+        ALOGE("Invalid preview frame rate value: %d", fps);
+        return BAD_VALUE;
+    }
+    ALOGE("Invalid preview frame rate mode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setJpegThumbnailSize(const QCameraParameters& params){
+    int width = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+    ALOGV("requested jpeg thumbnail size %d x %d", width, height);
+
+    // Validate the picture size
+    for (unsigned int i = 0; i < JPEG_THUMBNAIL_SIZE_COUNT; ++i) {
+       if (width == jpeg_thumbnail_sizes[i].width
+         && height == jpeg_thumbnail_sizes[i].height) {
+           mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+           mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+           return NO_ERROR;
+       }
+    }
+    return BAD_VALUE;
+}
+
+bool QualcommCameraHardware::updatePictureDimension(const QCameraParameters& params, int& width, int& height)
+{
+    bool retval = false;
+    int previewWidth, previewHeight;
+    params.getPreviewSize(&previewWidth, &previewHeight);
+    ALOGV("updatePictureDimension: %dx%d <- %dx%d", width, height,
+      previewWidth, previewHeight);
+    if ((width < previewWidth) && (height < previewHeight)) {
+    /*As we donot support jpeg downscaling for picture dimension < previewdimesnion/8 ,
+     Adding support for the same for cts testcases*/
+      mActualPictWidth = width;
+      mActualPictHeight = height;
+      if((previewWidth /8) > width ) {
+        int ratio = previewWidth/width;
+        int i;
+        for(i =0 ; i < ratio ; i++) {
+          if((ratio >> i) < 8)
+            break;
+          }
+          width = width *i*2;
+          height = height *i*2;
+        }
+      else {
+        width = previewWidth;
+        height = previewHeight;
+      }
+     mUseJpegDownScaling = true;
+     retval = true;
+    } else
+        mUseJpegDownScaling = false;
+    return retval;
+}
+
+status_t QualcommCameraHardware::setPictureSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPictureSize(&width, &height);
+    ALOGV("requested picture size %d x %d", width, height);
+
+    // Validate the picture size
+    for (int i = 0; i < supportedPictureSizesCount; ++i) {
+        if (width == picture_sizes_ptr[i].width
+          && height == picture_sizes_ptr[i].height) {
+            mParameters.setPictureSize(width, height);
+            mDimension.picture_width = width;
+            mDimension.picture_height = height;
+            return NO_ERROR;
+        }
+    }
+    /* Dimension not among the ones in the list. Check if
+     * its a valid dimension, if it is, then configure the
+     * camera accordingly. else reject it.
+     */
+    if( isValidDimension(width, height) ) {
+        mParameters.setPictureSize(width, height);
+        mDimension.picture_width = width;
+        mDimension.picture_height = height;
+        return NO_ERROR;
+    } else
+        ALOGE("Invalid picture size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setJpegQuality(const QCameraParameters& params) {
+    status_t rc = NO_ERROR;
+    int quality = params.getInt(QCameraParameters::KEY_JPEG_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, quality);
+    } else {
+        ALOGE("Invalid jpeg quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+
+    quality = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, quality);
+    } else {
+        ALOGE("Invalid jpeg thumbnail quality=%d", quality);
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+status_t QualcommCameraHardware::setEffect(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_EFFECT);
+    int result;
+
+    if (str != NULL) {
+        int32_t value = attr_lookup(effects, sizeof(effects) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+           if( !mCfgControl.mm_camera_is_parm_supported(CAMERA_PARM_EFFECT, (void *) &value)){
+               ALOGE("Camera Effect - %s mode is not supported for this sensor",str);
+               return NO_ERROR;
+           }else {
+               mParameters.set(QCameraParameters::KEY_EFFECT, str);
+               bool ret = native_set_parms(CAMERA_PARM_EFFECT, sizeof(value),
+                                           (void *)&value,(int *)&result);
+                if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+                    ALOGI("Camera Effect: %s is not set as the selected value is not supported ", str);
+                }
+               return ret ? NO_ERROR : UNKNOWN_ERROR;
+          }
+        }
+    }
+    ALOGE("Invalid effect value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setRecordingHint(const QCameraParameters& params)
+{
+
+  const char * str = params.get(QCameraParameters::KEY_RECORDING_HINT);
+
+  if(str != NULL){
+      int32_t value = attr_lookup(recording_Hints,
+                                  sizeof(recording_Hints) / sizeof(str_map), str);
+      if(value != NOT_FOUND){
+
+        native_set_parms(CAMERA_PARM_RECORDING_HINT, sizeof(value),
+                                               (void *)&value);
+        /*native_set_parms(CAMERA_PARM_CAF_ENABLE, sizeof(value),
+                                               (void *)&value);*/
+        mParameters.set(QCameraParameters::KEY_RECORDING_HINT, str);
+      } else {
+          ALOGE("Invalid Picture Format value: %s", str);
+          return BAD_VALUE;
+      }
+  }
+  return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setExposureCompensation(
+        const QCameraParameters & params){
+    ALOGE("DEBBUG: %s E",__FUNCTION__);
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_EXPOSURE_COMPENSATION)) {
+        ALOGI("Exposure Compensation is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    int numerator = params.getInt(QCameraParameters::KEY_EXPOSURE_COMPENSATION);
+    if(EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR <= numerator &&
+            numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+        int16_t  numerator16 = (int16_t)(numerator & 0x0000ffff);
+        uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+        uint32_t  value = 0;
+        value = numerator16 << 16 | denominator16;
+
+        mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+                            numerator);
+       bool ret = native_set_parms(CAMERA_PARM_EXPOSURE_COMPENSATION,
+                                    sizeof(value), (void *)&value);
+       ALOGE("DEBBUG: %s ret = %d X",__FUNCTION__, ret);
+       return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    ALOGE("Invalid Exposure Compensation");
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setAutoExposure(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_EXPOSURE)) {
+        ALOGI("Auto Exposure not supported for this sensor");
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(autoexposure, sizeof(autoexposure) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE, str);
+            bool ret = native_set_parms(CAMERA_PARM_EXPOSURE, sizeof(value),
+                                       (void *)&value);
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid auto exposure value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setSharpness(const QCameraParameters& params)
+{
+     if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SHARPNESS)) {
+        ALOGI("Sharpness not supported for this sensor");
+        return NO_ERROR;
+    }
+    int sharpness = params.getInt(QCameraParameters::KEY_SHARPNESS);
+    if((sharpness < CAMERA_MIN_SHARPNESS
+            || sharpness > CAMERA_MAX_SHARPNESS))
+        return UNKNOWN_ERROR;
+
+    ALOGV("setting sharpness %d", sharpness);
+    mParameters.set(QCameraParameters::KEY_SHARPNESS, sharpness);
+    bool ret = native_set_parms(CAMERA_PARM_SHARPNESS, sizeof(sharpness),
+                               (void *)&sharpness);
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setContrast(const QCameraParameters& params)
+{
+     if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_CONTRAST)) {
+        ALOGI("Contrast not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+    int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+    if(value == CAMERA_BESTSHOT_OFF) {
+        int contrast = params.getInt(QCameraParameters::KEY_CONTRAST);
+        if((contrast < CAMERA_MIN_CONTRAST)
+                || (contrast > CAMERA_MAX_CONTRAST))
+            return UNKNOWN_ERROR;
+
+        ALOGV("setting contrast %d", contrast);
+        mParameters.set(QCameraParameters::KEY_CONTRAST, contrast);
+        bool ret = native_set_parms(CAMERA_PARM_CONTRAST, sizeof(contrast),
+                                   (void *)&contrast);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    } else {
+          ALOGI(" Contrast value will not be set " \
+          "when the scenemode selected is %s", str);
+    return NO_ERROR;
+    }
+}
+
+status_t QualcommCameraHardware::setSaturation(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SATURATION)) {
+        ALOGI("Saturation not supported for this sensor");
+        return NO_ERROR;
+    }
+    int result;
+    int saturation = params.getInt(QCameraParameters::KEY_SATURATION);
+
+    if((saturation < CAMERA_MIN_SATURATION)
+        || (saturation > CAMERA_MAX_SATURATION))
+    return UNKNOWN_ERROR;
+
+    ALOGV("Setting saturation %d", saturation);
+    mParameters.set(QCameraParameters::KEY_SATURATION, saturation);
+    bool ret = native_set_parms(CAMERA_PARM_SATURATION, sizeof(saturation),
+        (void *)&saturation, (int *)&result);
+    if(result == MM_CAMERA_ERR_INVALID_OPERATION)
+        ALOGI("Saturation Value: %d is not set as the selected value is not supported", saturation);
+
+    return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setPreviewFormat(const QCameraParameters& params) {
+    const char *str = params.getPreviewFormat();
+    int32_t previewFormat = attr_lookup(preview_formats, sizeof(preview_formats) / sizeof(str_map), str);
+    if(previewFormat != NOT_FOUND) {
+        mParameters.set(QCameraParameters::KEY_PREVIEW_FORMAT, str);
+        mPreviewFormat = previewFormat;
+        if(HAL_currentCameraMode != CAMERA_MODE_3D) {
+            ALOGI("Setting preview format to native");
+            bool ret = native_set_parms(CAMERA_PARM_PREVIEW_FORMAT, sizeof(previewFormat),
+                                       (void *)&previewFormat);
+        }else{
+            ALOGI("Skipping set preview format call to native");
+        }
+        return NO_ERROR;
+    }
+    ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setStrTextures(const QCameraParameters& params) {
+    const char *str = params.get("strtextures");
+    if(str != NULL) {
+        ALOGV("strtextures = %s", str);
+        mParameters.set("strtextures", str);
+        if(!strncmp(str, "on", 2) || !strncmp(str, "ON", 2)) {
+            strTexturesOn = true;
+        } else if (!strncmp(str, "off", 3) || !strncmp(str, "OFF", 3)) {
+            strTexturesOn = false;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setBrightness(const QCameraParameters& params) {
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BRIGHTNESS)) {
+        ALOGI("Set Brightness not supported for this sensor");
+        return NO_ERROR;
+    }
+    int brightness = params.getInt("luma-adaptation");
+    if (mBrightness !=  brightness) {
+        ALOGV(" new brightness value : %d ", brightness);
+        mBrightness =  brightness;
+        mParameters.set("luma-adaptation", brightness);
+    bool ret = native_set_parms(CAMERA_PARM_BRIGHTNESS, sizeof(mBrightness),
+                                   (void *)&mBrightness);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSkinToneEnhancement(const QCameraParameters& params) {
+     if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SCE_FACTOR)) {
+        ALOGI("SkinToneEnhancement not supported for this sensor");
+        return NO_ERROR;
+     }
+     int skinToneValue = params.getInt("skinToneEnhancement");
+     if (mSkinToneEnhancement != skinToneValue) {
+          ALOGV(" new skinTone correction value : %d ", skinToneValue);
+          mSkinToneEnhancement = skinToneValue;
+          mParameters.set("skinToneEnhancement", skinToneValue);
+          bool ret = native_set_parms(CAMERA_PARM_SCE_FACTOR, sizeof(mSkinToneEnhancement),
+                        (void *)&mSkinToneEnhancement);
+          return ret ? NO_ERROR : UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setWhiteBalance(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_WHITE_BALANCE)) {
+        ALOGI("WhiteBalance not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    int result;
+
+    const char *str = params.get(QCameraParameters::KEY_WHITE_BALANCE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(whitebalance, sizeof(whitebalance) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+            bool ret = native_set_parms(CAMERA_PARM_WHITE_BALANCE, sizeof(value),
+                                       (void *)&value, (int *)&result);
+            if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+                ALOGI("WhiteBalance Value: %s is not set as the selected value is not supported ", str);
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+        ALOGE("Invalid whitebalance value: %s", (str == NULL) ? "NULL" : str);
+        return BAD_VALUE;
+
+}
+
+status_t QualcommCameraHardware::setFlash(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_LED_MODE)) {
+        ALOGI("%s: flash not supported", __FUNCTION__);
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_FLASH_MODE);
+    if (str != NULL) {
+        int32_t value = attr_lookup(flash, sizeof(flash) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+            bool ret = native_set_parms(CAMERA_PARM_LED_MODE,
+                                       sizeof(value), (void *)&value);
+            if(mZslEnable && (value != LED_MODE_OFF)){
+                    mParameters.set("num-snaps-per-shutter", "1");
+                    ALOGI("%s Setting num-snaps-per-shutter to 1", __FUNCTION__);
+                    numCapture = 1;
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid flash mode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setAntibanding(const QCameraParameters& params)
+{
+    int result;
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ANTIBANDING)) {
+        ALOGI("Parameter AntiBanding is not supported for this sensor");
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_ANTIBANDING);
+    if (str != NULL) {
+        int value = (camera_antibanding_type)attr_lookup(
+          antibanding, sizeof(antibanding) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            camera_antibanding_type temp = (camera_antibanding_type) value;
+            mParameters.set(QCameraParameters::KEY_ANTIBANDING, str);
+            bool ret = native_set_parms(CAMERA_PARM_ANTIBANDING,
+                       sizeof(camera_antibanding_type), (void *)&temp ,(int *)&result);
+            if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+                ALOGI("AntiBanding Value: %s is not supported for the given BestShot Mode", str);
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid antibanding value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setMCEValue(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_MCE)) {
+        ALOGI("Parameter MCE is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT);
+    if (str != NULL) {
+        int value = attr_lookup(mce, sizeof(mce) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int8_t temp = (int8_t)value;
+            ALOGI("%s: setting MCE value of %s", __FUNCTION__, str);
+            mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT, str);
+
+            native_set_parms(CAMERA_PARM_MCE, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid MCE value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setHighFrameRate(const QCameraParameters& params)
+{
+    if((!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HFR)) || (mIs3DModeOn)) {
+        ALOGI("Parameter HFR is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+    if (str != NULL) {
+        int value = attr_lookup(hfr, sizeof(hfr) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int32_t temp = (int32_t)value;
+            ALOGI("%s: setting HFR value of %s(%d)", __FUNCTION__, str, temp);
+            //Check for change in HFR value
+            const char *oldHfr = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+            if(strcmp(oldHfr, str)){
+                ALOGI("%s: old HFR: %s, new HFR %s", __FUNCTION__, oldHfr, str);
+                mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE, str);
+                mHFRMode = true;
+                if(mCameraRunning == true) {
+                    mHFRThreadWaitLock.lock();
+                    pthread_attr_t pattr;
+                    pthread_attr_init(&pattr);
+                    pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+                    mHFRThreadRunning = !pthread_create(&mHFRThread,
+                                      &pattr,
+                                      hfr_thread,
+                                      (void*)NULL);
+                    mHFRThreadWaitLock.unlock();
+                    return NO_ERROR;
+                }
+            }
+            native_set_parms(CAMERA_PARM_HFR, sizeof(int32_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid HFR value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setHDRImaging(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HDR) && mZslEnable) {
+        ALOGI("Parameter HDR is not supported for this sensor/ ZSL mode");
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING);
+    if (str != NULL) {
+        int value = attr_lookup(hdr, sizeof(hdr) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            exp_bracketing_t temp;
+            memset(&temp, 0, sizeof(temp));
+            temp.hdr_enable= (int32_t)value;
+            temp.mode = HDR_MODE;
+            temp.total_frames = 3;
+            temp.total_hal_frames = HDR_HAL_FRAME;
+            mHdrMode = temp.hdr_enable;
+            ALOGI("%s: setting HDR value of %s", __FUNCTION__, str);
+            mParameters.set(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING, str);
+            if(mHdrMode){
+                numCapture = temp.total_hal_frames;
+            } else
+                numCapture = 1;
+            native_set_parms(CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid HDR value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setExpBracketing(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HDR) && mZslEnable) {
+        ALOGI("Parameter Exposure Bracketing is not supported for this sensor/ZSL mode");
+        return NO_ERROR;
+    }
+    const char *str = params.get("capture-burst-exposures");
+    if ((str != NULL) && (!mHdrMode)) {
+        char  exp_val[MAX_EXP_BRACKETING_LENGTH];
+        exp_bracketing_t temp;
+        memset(&temp, 0, sizeof(temp));
+
+        mExpBracketMode = true;
+        temp.mode = EXP_BRACKETING_MODE;
+        temp.hdr_enable = true;
+        /* App sets values separated by comma.
+           Thus total number of snapshot to capture is strlen(str)/2
+           eg: "-1,1,2" */
+        strlcpy(exp_val, str, sizeof(exp_val));
+        temp.total_frames = (strlen(exp_val) >  MAX_SNAPSHOT_BUFFERS -2) ?
+            MAX_SNAPSHOT_BUFFERS -2 : strlen(exp_val);
+        temp.total_hal_frames = temp.total_frames;
+        strlcpy(temp.values, exp_val, MAX_EXP_BRACKETING_LENGTH);
+        ALOGI("%s: setting Exposure Bracketing value of %s", __FUNCTION__, temp.values);
+        mParameters.set("capture-burst-exposures", str);
+        if(!mZslEnable){
+            numCapture = temp.total_frames;
+        }
+        native_set_parms(CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+        return NO_ERROR;
+    } else
+        mExpBracketMode = false;
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setLensshadeValue(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ROLLOFF)) {
+        ALOGI("Parameter Rolloff is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_LENSSHADE);
+    if (str != NULL) {
+        int value = attr_lookup(lensshade,
+                                    sizeof(lensshade) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int8_t temp = (int8_t)value;
+            mParameters.set(QCameraParameters::KEY_LENSSHADE, str);
+
+            native_set_parms(CAMERA_PARM_ROLLOFF, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid lensShade value: %s", (str == NULL) ? "NULL" : str);
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSelectableZoneAf(const QCameraParameters& params)
+{
+    if(mHasAutoFocusSupport && supportsSelectableZoneAf()) {
+        const char *str = params.get(QCameraParameters::KEY_SELECTABLE_ZONE_AF);
+        if (str != NULL) {
+            int32_t value = attr_lookup(selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map), str);
+            if (value != NOT_FOUND) {
+                mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF, str);
+                bool ret = native_set_parms(CAMERA_PARM_FOCUS_RECT, sizeof(value),
+                        (void *)&value);
+                return ret ? NO_ERROR : UNKNOWN_ERROR;
+            }
+        }
+        ALOGE("Invalid selectable zone af value: %s", (str == NULL) ? "NULL" : str);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setTouchAfAec(const QCameraParameters& params)
+{
+    ALOGE("%s",__func__);
+    if(mHasAutoFocusSupport){
+        int xAec, yAec, xAf, yAf;
+        int cx, cy;
+        int width, height;
+        params.getMeteringAreaCenter(&cx, &cy);
+        mParameters.getPreviewSize(&width, &height);
+
+        // @Punit
+        // The coords sent from upper layer is in range (-1000, -1000) to (1000, 1000)
+        // So, they are transformed to range (0, 0) to (previewWidth, previewHeight)
+        cx = cx + 1000;
+        cy = cy + 1000;
+        cx = cx * (width / 2000.0f);
+        cy = cy * (height / 2000.0f);
+
+        //Negative values are invalid and does not update anything
+        ALOGE("Touch Area Center (cx, cy) = (%d, %d)", cx, cy);
+
+        //Currently using same values for AF and AEC
+        xAec = cx; yAec = cy;
+        xAf = cx; yAf = cy;
+
+        const char *str = params.get(QCameraParameters::KEY_TOUCH_AF_AEC);
+        if (str != NULL) {
+            int value = attr_lookup(touchafaec,
+                    sizeof(touchafaec) / sizeof(str_map), str);
+            if (value != NOT_FOUND) {
+
+                //Dx,Dy will be same as defined in res/layout/camera.xml
+                //passed down to HAL in a key.value pair.
+                int FOCUS_RECTANGLE_DX = params.getInt("touchAfAec-dx");
+                int FOCUS_RECTANGLE_DY = params.getInt("touchAfAec-dy");
+                mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC, str);
+                mParameters.setTouchIndexAec(xAec, yAec);
+                mParameters.setTouchIndexAf(xAf, yAf);
+
+                cam_set_aec_roi_t aec_roi_value;
+                roi_info_t af_roi_value;
+
+                memset(&af_roi_value, 0, sizeof(roi_info_t));
+
+                //If touch AF/AEC is enabled and touch event has occured then
+                //call the ioctl with valid values.
+                if (value == true
+                        && (xAec >= 0 && yAec >= 0)
+                        && (xAf >= 0 && yAf >= 0)) {
+                    //Set Touch AEC params (Pass the center co-ordinate)
+                    aec_roi_value.aec_roi_enable = AEC_ROI_ON;
+                    aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+                    aec_roi_value.aec_roi_position.coordinate.x = xAec;
+                    aec_roi_value.aec_roi_position.coordinate.y = yAec;
+
+                    //Set Touch AF params (Pass the top left co-ordinate)
+                    af_roi_value.num_roi = 1;
+                    if ((xAf-(FOCUS_RECTANGLE_DX/2)) < 0)
+                        af_roi_value.roi[0].x = 1;
+                    else
+                        af_roi_value.roi[0].x = xAf - (FOCUS_RECTANGLE_DX/2);
+
+                    if ((yAf-(FOCUS_RECTANGLE_DY/2)) < 0)
+                        af_roi_value.roi[0].y = 1;
+                    else
+                        af_roi_value.roi[0].y = yAf - (FOCUS_RECTANGLE_DY/2);
+
+                    af_roi_value.roi[0].dx = FOCUS_RECTANGLE_DX;
+                    af_roi_value.roi[0].dy = FOCUS_RECTANGLE_DY;
+                    af_roi_value.is_multiwindow = mMultiTouch;
+                    native_set_parms(CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value);
+                    native_set_parms(CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value);
+                }
+                else if(value == false) {
+                    //Set Touch AEC params
+                    aec_roi_value.aec_roi_enable = AEC_ROI_OFF;
+                    aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+                    aec_roi_value.aec_roi_position.coordinate.x = DONT_CARE_COORDINATE;
+                    aec_roi_value.aec_roi_position.coordinate.y = DONT_CARE_COORDINATE;
+
+                    //Set Touch AF params
+                    af_roi_value.num_roi = 0;
+                    native_set_parms(CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value);
+                    native_set_parms(CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value);
+                }
+                //@Punit: If the values are negative, we dont send anything to the lower layer
+            }
+            return NO_ERROR;
+        }
+        ALOGE("Invalid Touch AF/AEC value: %s", (str == NULL) ? "NULL" : str);
+        return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setFaceDetection(const char *str)
+{
+    if(supportsFaceDetection() == false){
+        ALOGI("Face detection is not enabled");
+        return NO_ERROR;
+    }
+    if (str != NULL) {
+        int value = attr_lookup(facedetection,
+                                    sizeof(facedetection) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mMetaDataWaitLock.lock();
+            mFaceDetectOn = value;
+            mMetaDataWaitLock.unlock();
+            mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setRedeyeReduction(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_REDEYE_REDUCTION)) {
+        ALOGI("Parameter Redeye Reduction is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_REDEYE_REDUCTION);
+    if (str != NULL) {
+        int value = attr_lookup(redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            int8_t temp = (int8_t)value;
+            ALOGI("%s: setting Redeye Reduction value of %s", __FUNCTION__, str);
+            mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION, str);
+
+            native_set_parms(CAMERA_PARM_REDEYE_REDUCTION, sizeof(int8_t), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid Redeye Reduction value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t  QualcommCameraHardware::setISOValue(const QCameraParameters& params) {
+    int8_t temp_hjr;
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ISO)) {
+            ALOGE("Parameter ISO Value is not supported for this sensor");
+            return NO_ERROR;
+        }
+    const char *str = params.get(QCameraParameters::KEY_ISO_MODE);
+    if (str != NULL) {
+        int value = (camera_iso_mode_type)attr_lookup(
+          iso, sizeof(iso) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            camera_iso_mode_type temp = (camera_iso_mode_type) value;
+            if (value == CAMERA_ISO_DEBLUR) {
+               temp_hjr = true;
+               native_set_parms(CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+               mHJR = value;
+            }
+            else {
+               if (mHJR == CAMERA_ISO_DEBLUR) {
+                   temp_hjr = false;
+                   native_set_parms(CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+                   mHJR = value;
+               }
+            }
+
+            mParameters.set(QCameraParameters::KEY_ISO_MODE, str);
+            native_set_parms(CAMERA_PARM_ISO, sizeof(camera_iso_mode_type), (void *)&temp);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid Iso value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setSceneDetect(const QCameraParameters& params)
+{
+    bool retParm1, retParm2;
+    if (supportsSceneDetection()) {
+        if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BL_DETECTION) && !mCfgControl.mm_camera_is_supported(CAMERA_PARM_SNOW_DETECTION)) {
+            ALOGE("Parameter Auto Scene Detection is not supported for this sensor");
+            return NO_ERROR;
+        }
+        const char *str = params.get(QCameraParameters::KEY_SCENE_DETECT);
+        if (str != NULL) {
+            int32_t value = attr_lookup(scenedetect, sizeof(scenedetect) / sizeof(str_map), str);
+            if (value != NOT_FOUND) {
+                mParameters.set(QCameraParameters::KEY_SCENE_DETECT, str);
+
+                retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+                                           (void *)&value);
+
+                retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+                                           (void *)&value);
+
+                //All Auto Scene detection modes should be all ON or all OFF.
+                if(retParm1 == false || retParm2 == false) {
+                    value = !value;
+                    retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+                                               (void *)&value);
+
+                    retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+                                               (void *)&value);
+                }
+                return (retParm1 && retParm2) ? NO_ERROR : UNKNOWN_ERROR;
+            }
+        }
+    ALOGE("Invalid auto scene detection value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSceneMode(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BESTSHOT_MODE)) {
+        ALOGE("Parameter Scenemode is not supported for this sensor");
+        return NO_ERROR;
+    }
+
+    const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+
+    if (str != NULL) {
+        int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+        int32_t asd_val;
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_SCENE_MODE, str);
+            bool ret = native_set_parms(CAMERA_PARM_BESTSHOT_MODE, sizeof(value),
+                                       (void *)&value);
+
+            if (ret == NO_ERROR) {
+              int retParm1,  retParm2;
+              /*if value is auto, set ASD on, else set ASD off*/
+              if (value == CAMERA_BESTSHOT_AUTO ) {
+                asd_val = TRUE;
+              } else {
+                asd_val = FALSE;
+              }
+
+              /*note: we need to simplify this logic by using a single ctrl as in 8960*/
+              retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+                                         (void *)&asd_val);
+              retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+                                         (void *)&asd_val);
+            }
+            return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+    }
+    ALOGE("Invalid scenemode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+status_t QualcommCameraHardware::setGpsLocation(const QCameraParameters& params)
+{
+    const char *method = params.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+    if (method) {
+        mParameters.set(QCameraParameters::KEY_GPS_PROCESSING_METHOD, method);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+    }
+
+    const char *latitude = params.get(QCameraParameters::KEY_GPS_LATITUDE);
+    if (latitude) {
+        ALOGE("latitude %s",latitude);
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE, latitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE);
+    }
+
+    const char *latitudeRef = params.get(QCameraParameters::KEY_GPS_LATITUDE_REF);
+    if (latitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE_REF);
+    }
+
+    const char *longitude = params.get(QCameraParameters::KEY_GPS_LONGITUDE);
+    if (longitude) {
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE, longitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE);
+    }
+
+    const char *longitudeRef = params.get(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+    if (longitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, longitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+    }
+
+    const char *altitudeRef = params.get(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+    if (altitudeRef) {
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, altitudeRef);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+    }
+
+    const char *altitude = params.get(QCameraParameters::KEY_GPS_ALTITUDE);
+    if (altitude) {
+        mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE, altitude);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE);
+    }
+
+    const char *status = params.get(QCameraParameters::KEY_GPS_STATUS);
+    if (status) {
+        mParameters.set(QCameraParameters::KEY_GPS_STATUS, status);
+    }
+
+    const char *dateTime = params.get(QCameraParameters::KEY_EXIF_DATETIME);
+    if (dateTime) {
+        mParameters.set(QCameraParameters::KEY_EXIF_DATETIME, dateTime);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_EXIF_DATETIME);
+    }
+
+    const char *timestamp = params.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+    if (timestamp) {
+        mParameters.set(QCameraParameters::KEY_GPS_TIMESTAMP, timestamp);
+    }else {
+         mParameters.remove(QCameraParameters::KEY_GPS_TIMESTAMP);
+    }
+
+    return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::setRotation(const QCameraParameters& params)
+{
+    status_t rc = NO_ERROR;
+    int sensor_mount_angle = HAL_cameraInfo[HAL_currentCameraId].sensor_mount_angle;
+    int rotation = params.getInt(QCameraParameters::KEY_ROTATION);
+    if (rotation != NOT_FOUND) {
+        if (rotation == 0 || rotation == 90 || rotation == 180
+            || rotation == 270) {
+          rotation = (rotation + sensor_mount_angle)%360;
+          mParameters.set(QCameraParameters::KEY_ROTATION, rotation);
+          mRotation = rotation;
+        } else {
+            ALOGE("Invalid rotation value: %d", rotation);
+            rc = BAD_VALUE;
+        }
+    }
+    return rc;
+}
+
+status_t QualcommCameraHardware::setZoom(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ZOOM)) {
+        ALOGE("Parameter setZoom is not supported for this sensor");
+        return NO_ERROR;
+    }
+    status_t rc = NO_ERROR;
+    // No matter how many different zoom values the driver can provide, HAL
+    // provides applictations the same number of zoom levels. The maximum driver
+    // zoom value depends on sensor output (VFE input) and preview size (VFE
+    // output) because VFE can only crop and cannot upscale. If the preview size
+    // is bigger, the maximum zoom ratio is smaller. However, we want the
+    // zoom ratio of each zoom level is always the same whatever the preview
+    // size is. Ex: zoom level 1 is always 1.2x, zoom level 2 is 1.44x, etc. So,
+    // we need to have a fixed maximum zoom value and do read it from the
+    // driver.
+    static const int ZOOM_STEP = 1;
+    int32_t zoom_level = params.getInt("zoom");
+    if(zoom_level >= 0 && zoom_level <= mMaxZoom-1) {
+        mParameters.set("zoom", zoom_level);
+        int32_t zoom_value = ZOOM_STEP * zoom_level;
+        bool ret = native_set_parms(CAMERA_PARM_ZOOM,
+            sizeof(zoom_value), (void *)&zoom_value);
+        rc = ret ? NO_ERROR : UNKNOWN_ERROR;
+    } else {
+        rc = BAD_VALUE;
+    }
+
+    return rc;
+}
+
+status_t QualcommCameraHardware::setDenoise(const QCameraParameters& params)
+{
+    if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_WAVELET_DENOISE)) {
+        ALOGE("Wavelet Denoise is not supported for this sensor");
+        return NO_ERROR;
+    }
+    const char *str = params.get(QCameraParameters::KEY_DENOISE);
+    if (str != NULL) {
+        int value = attr_lookup(denoise,
+        sizeof(denoise) / sizeof(str_map), str);
+        if ((value != NOT_FOUND) &&  (mDenoiseValue != value)) {
+        mDenoiseValue =  value;
+        mParameters.set(QCameraParameters::KEY_DENOISE, str);
+        bool ret = native_set_parms(CAMERA_PARM_WAVELET_DENOISE, sizeof(value),
+                                               (void *)&value);
+        return ret ? NO_ERROR : UNKNOWN_ERROR;
+        }
+        return NO_ERROR;
+    }
+    ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setZslParam(const QCameraParameters& params)
+{
+    if(!mZslEnable) {
+        ALOGV("Zsl is not enabled");
+        return NO_ERROR;
+    }
+    /* This ensures that restart of Preview doesnt happen when taking
+     * Snapshot for continuous viewfinder */
+    const char *str = params.get("continuous-temporal-bracketing");
+    if(str !=NULL) {
+        if(!strncmp(str, "enable", 8))
+            mZslPanorama = true;
+        else
+            mZslPanorama = false;
+        return NO_ERROR;
+    }
+    mZslPanorama = false;
+    return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::setSnapshotCount(const QCameraParameters& params)
+{
+    int value;
+    char snapshotCount[5];
+    if(!mZslEnable){
+        value = numCapture;
+    } else {
+        /* ZSL case: Get value from App */
+        const char *str = params.get("num-snaps-per-shutter");
+        if (str != NULL) {
+            value = atoi(str);
+        } else
+            value = 1;
+    }
+    /* Sanity check */
+    if(value > MAX_SNAPSHOT_BUFFERS -2)
+        value = MAX_SNAPSHOT_BUFFERS -2;
+    else if(value < 1)
+        value = 1;
+    snprintf(snapshotCount, sizeof(snapshotCount),"%d",value);
+    numCapture = value;
+    mParameters.set("num-snaps-per-shutter", snapshotCount);
+    ALOGI("%s setting num-snaps-per-shutter to %s", __FUNCTION__, snapshotCount);
+    return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::updateFocusDistances(const char *focusmode)
+{
+    ALOGV("%s: IN", __FUNCTION__);
+    focus_distances_info_t focusDistances;
+    if( mCfgControl.mm_camera_get_parm(CAMERA_PARM_FOCUS_DISTANCES,
+        (void *)&focusDistances) == MM_CAMERA_SUCCESS) {
+        String8 str;
+        char buffer[32];
+        snprintf(buffer, sizeof(buffer), "%f", focusDistances.focus_distance[0]);
+        str.append(buffer);
+        snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[1]);
+        str.append(buffer);
+        if(strcmp(focusmode, QCameraParameters::FOCUS_MODE_INFINITY) == 0)
+            snprintf(buffer, sizeof(buffer), ",%s", "Infinity");
+        else
+            snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[2]);
+        str.append(buffer);
+        ALOGI("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+        mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+        return NO_ERROR;
+    }
+    ALOGE("%s: get CAMERA_PARM_FOCUS_DISTANCES failed!!!", __FUNCTION__);
+    return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setMeteringAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_METERING_AREAS);
+    if (str == NULL || (strcmp(str, "0") == 0)) {
+        ALOGE("%s: Parameter string is null", __FUNCTION__);
+    }
+    else {
+        // handling default string
+        if ((strcmp("(-2000,-2000,-2000,-2000,0)", str) == 0) ||
+            (strcmp("(0,0,0,0,0)", str) == 0)){
+          mParameters.set(QCameraParameters::KEY_METERING_AREAS, NULL);
+          return NO_ERROR;
+        }
+        if(checkAreaParameters(str) != 0) {
+          ALOGE("%s: Failed to parse the input string '%s'", __FUNCTION__, str);
+          return BAD_VALUE;
+        }
+        mParameters.set(QCameraParameters::KEY_METERING_AREAS, str);
+    }
+
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setFocusAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_FOCUS_AREAS);
+
+    if (str == NULL || (strcmp(str, "0") == 0)) {
+        ALOGE("%s: Parameter string is null", __FUNCTION__);
+    }
+    else {
+        // handling default string
+        if ((strcmp("(-2000,-2000,-2000,-2000,0)", str) == 0) ||
+            (strcmp("(0,0,0,0,0)", str) == 0)) {
+          mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, NULL);
+          return NO_ERROR;
+        }
+
+        if(checkAreaParameters(str) != 0) {
+          ALOGE("%s: Failed to parse the input string '%s'", __FUNCTION__, str);
+          return BAD_VALUE;
+        }
+
+        mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, str);
+    }
+
+    return NO_ERROR;
+}
+status_t QualcommCameraHardware::setFocusMode(const QCameraParameters& params)
+{
+    const char *str = params.get(QCameraParameters::KEY_FOCUS_MODE);
+    if (str != NULL) {
+      ALOGE("FocusMode =%s", str);
+        int32_t value = attr_lookup(focus_modes,
+                                    sizeof(focus_modes) / sizeof(str_map), str);
+        if (value != NOT_FOUND) {
+            mParameters.set(QCameraParameters::KEY_FOCUS_MODE, str);
+
+            if(mHasAutoFocusSupport && (updateFocusDistances(str) != NO_ERROR)) {
+                ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, str);
+                return UNKNOWN_ERROR;
+            }
+
+            if(mHasAutoFocusSupport){
+                int cafSupport = FALSE;
+                if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ||
+                   !strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)){
+                    cafSupport = TRUE;
+                }
+                ALOGV("Continuous Auto Focus %d", cafSupport);
+                native_set_parms(CAMERA_PARM_CONTINUOUS_AF, sizeof(int8_t), (void *)&cafSupport);
+            }
+            // Focus step is reset to infinity when preview is started. We do
+            // not need to do anything now.
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid focus mode value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+
+}
+QualcommCameraHardware::DispMemPool::DispMemPool(int fd, int buffer_size,
+                                               int num_buffers, int frame_size,
+                                               const char *name) :
+    QualcommCameraHardware::MemPool(buffer_size,
+                                    num_buffers,
+                                    frame_size,
+                                    name),
+    mFD(fd)
+{
+#if 0
+    ALOGV("constructing MemPool %s from gralloc memory: "
+         "%d frames @ %d size "
+         "buffer size %d",
+         mName,
+         num_buffers, frame_size, buffer_size);
+    /* Use the fd given by gralloc and ask MemoryHeapBase to map it
+     * in this process space */
+    mHeap = new MemoryHeapBase(mFD, buffer_size, MemoryHeapBase::NO_CACHING, 0);
+    completeInitialization();
+#endif
+}
+
+QualcommCameraHardware::DispMemPool::~DispMemPool()
+{
+    /* Not much to do in destructor for now */
+    ALOGV(" ~DispMemPool : E ");
+    mFD = -1;
+    ALOGV(" ~DispMemPool : X ");
+}
+status_t QualcommCameraHardware::setOrientation(const QCameraParameters& params)
+{
+    const char *str = params.get("orientation");
+
+    if (str != NULL) {
+        if (strcmp(str, "portrait") == 0 || strcmp(str, "landscape") == 0) {
+            // Camera service needs this to decide if the preview frames and raw
+            // pictures should be rotated.
+            mParameters.set("orientation", str);
+        } else {
+            ALOGE("Invalid orientation value: %s", str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setPictureFormat(const QCameraParameters& params)
+{
+    const char * str = params.get(QCameraParameters::KEY_PICTURE_FORMAT);
+
+    if(str != NULL){
+        int32_t value = attr_lookup(picture_formats,
+                                    sizeof(picture_formats) / sizeof(str_map), str);
+        if(value != NOT_FOUND){
+            mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT, str);
+        } else {
+            ALOGE("Invalid Picture Format value: %s", str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+QualcommCameraHardware::MMCameraDL::MMCameraDL(){
+    ALOGV("MMCameraDL: E");
+    libmmcamera = NULL;
+#if DLOPEN_LIBMMCAMERA
+    libmmcamera = ::dlopen("liboemcamera.so", RTLD_NOW);
+#endif
+    ALOGV("Open MM camera DL libeomcamera loaded at %p ", libmmcamera);
+    ALOGV("MMCameraDL: X");
+}
+
+void * QualcommCameraHardware::MMCameraDL::pointer(){
+    return libmmcamera;
+}
+
+QualcommCameraHardware::MMCameraDL::~MMCameraDL(){
+    ALOGV("~MMCameraDL: E");
+    LINK_mm_camera_destroy();
+    if (libmmcamera != NULL) {
+        ::dlclose(libmmcamera);
+        ALOGV("closed MM Camera DL ");
+    }
+    libmmcamera = NULL;
+    ALOGV("~MMCameraDL: X");
+}
+
+wp<QualcommCameraHardware::MMCameraDL> QualcommCameraHardware::MMCameraDL::instance;
+Mutex QualcommCameraHardware::MMCameraDL::singletonLock;
+
+
+sp<QualcommCameraHardware::MMCameraDL> QualcommCameraHardware::MMCameraDL::getInstance(){
+    Mutex::Autolock instanceLock(singletonLock);
+    sp<MMCameraDL> mmCamera = instance.promote();
+    if(mmCamera == NULL){
+        mmCamera = new MMCameraDL();
+        instance = mmCamera;
+    }
+    return mmCamera;
+}
+
+QualcommCameraHardware::MemPool::MemPool(int buffer_size, int num_buffers,
+                                         int frame_size,
+                                         const char *name) :
+    mBufferSize(buffer_size),
+    mNumBuffers(num_buffers),
+    mFrameSize(frame_size),
+    mBuffers(NULL), mName(name)
+{
+    int page_size_minus_1 = getpagesize() - 1;
+    mAlignedBufferSize = (buffer_size + page_size_minus_1) & (~page_size_minus_1);
+}
+
+void QualcommCameraHardware::MemPool::completeInitialization()
+{
+    // If we do not know how big the frame will be, we wait to allocate
+    // the buffers describing the individual frames until we do know their
+    // size.
+
+    if (mFrameSize > 0) {
+	    ALOGE("Before new Mem BASE #buffers :%d",mNumBuffers);
+        mBuffers = new sp<MemoryBase>[mNumBuffers];
+        for (int i = 0; i < mNumBuffers; i++) {
+            mBuffers[i] = new
+                MemoryBase(mHeap,
+                           i * mAlignedBufferSize,
+                           mFrameSize);
+        }
+    }
+}
+
+QualcommCameraHardware::AshmemPool::AshmemPool(int buffer_size, int num_buffers,
+                                               int frame_size,
+                                               const char *name) :
+    QualcommCameraHardware::MemPool(buffer_size,
+                                    num_buffers,
+                                    frame_size,
+                                    name)
+{
+    ALOGV("constructing MemPool %s backed by ashmem: "
+         "%d frames @ %d uint8_ts, "
+         "buffer size %d",
+         mName,
+         num_buffers, frame_size, buffer_size);
+
+    int page_mask = getpagesize() - 1;
+    int ashmem_size = buffer_size * num_buffers;
+    ashmem_size += page_mask;
+    ashmem_size &= ~page_mask;
+
+    mHeap = new MemoryHeapBase(ashmem_size);
+
+    completeInitialization();
+}
+
+bool QualcommCameraHardware::register_record_buffers(bool register_buffer) {
+    ALOGI("%s: (%d) E", __FUNCTION__, register_buffer);
+    struct msm_pmem_info pmemBuf;
+#if 0
+    for (int cnt = 0; cnt < kRecordBufferCount; ++cnt) {
+        pmemBuf.type     = MSM_PMEM_VIDEO;
+        pmemBuf.fd       = mRecordHeap->mHeap->getHeapID();
+        pmemBuf.offset   = mRecordHeap->mAlignedBufferSize * cnt;
+        pmemBuf.len      = mRecordHeap->mBufferSize;
+        pmemBuf.vaddr    = (uint8_t *)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+        pmemBuf.planar0_off    = 0;
+        pmemBuf.planar1_off = recordframes[0].planar1_off;
+        pmemBuf.planar2_off = 0;
+        if(register_buffer == true) {
+            pmemBuf.active   = (cnt<ACTIVE_VIDEO_BUFFERS);
+            if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+                pmemBuf.type = MSM_PMEM_VIDEO_VPE;
+                pmemBuf.active = 1;
+            }
+        } else {
+            pmemBuf.active   = false;
+        }
+
+        ALOGV("register_buf:  reg = %d buffer = %p", !register_buffer,
+          (void *)pmemBuf.vaddr);
+        if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+                CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+            ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM  error %s",
+                strerror(errno));
+            return false;
+        }
+    }
+#endif
+    return true;
+}
+
+QualcommCameraHardware::PmemPool::PmemPool(const char *pmem_pool,
+                                           int flags,
+                                           int pmem_type,
+                                           int buffer_size, int num_buffers,
+                                           int frame_size, int cbcr_offset,
+                                           int yOffset, const char *name) :
+    QualcommCameraHardware::MemPool(buffer_size,
+                                    num_buffers,
+                                    frame_size,
+                                    name),
+    mPmemType(pmem_type),
+    mCbCrOffset(cbcr_offset),
+    myOffset(yOffset)
+{
+    bool all_chnls = false;
+    ALOGI("constructing MemPool %s backed by pmem pool %s: "
+         "%d frames @ %d bytes, buffer size %d",
+         mName,
+         pmem_pool, num_buffers, frame_size,
+         buffer_size);
+
+    mMMCameraDLRef = QualcommCameraHardware::MMCameraDL::getInstance();
+
+
+    // Make a new mmap'ed heap that can be shared across processes.
+    // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+    mAlignedSize = mAlignedBufferSize * num_buffers;
+
+    sp<MemoryHeapBase> masterHeap =
+        new MemoryHeapBase(pmem_pool, mAlignedSize, flags);
+
+    if (masterHeap->getHeapID() < 0) {
+        ALOGE("failed to construct master heap for pmem pool %s", pmem_pool);
+        masterHeap.clear();
+        return;
+    }
+
+    sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(masterHeap, flags);
+    if (pmemHeap->getHeapID() >= 0) {
+        pmemHeap->slap();
+        masterHeap.clear();
+        mHeap = pmemHeap;
+        pmemHeap.clear();
+
+        mFd = mHeap->getHeapID();
+        if (::ioctl(mFd, PMEM_GET_SIZE, &mSize)) {
+            ALOGE("pmem pool %s ioctl(PMEM_GET_SIZE) error %s (%d)",
+                 pmem_pool,
+                 ::strerror(errno), errno);
+            mHeap.clear();
+            return;
+        }
+
+        ALOGV("pmem pool %s ioctl(fd = %d, PMEM_GET_SIZE) is %ld",
+             pmem_pool,
+             mFd,
+             mSize.len);
+        ALOGD("mBufferSize=%d, mAlignedBufferSize=%d\n", mBufferSize, mAlignedBufferSize);
+        // Unregister preview buffers with the camera drivers.  Allow the VFE to write
+        // to all preview buffers except for the last one.
+        // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buf = num_buffers;
+            if(!strcmp("preview", mName)) num_buf = kTotalPreviewBufferCount;
+            ALOGD("num_buffers = %d", num_buf);
+            for (int cnt = 0; cnt < num_buf; ++cnt) {
+                int active = 1;
+                if(pmem_type == MSM_PMEM_VIDEO){
+                     active = (cnt<ACTIVE_VIDEO_BUFFERS);
+                     //When VPE is enabled, set the last record
+                     //buffer as active and pmem type as PMEM_VIDEO_VPE
+                     //as this is a requirement from VPE operation.
+                     //No need to set this pmem type to VIDEO_VPE while unregistering,
+                     //because as per camera stack design: "the VPE AXI is also configured
+                     //when VFE is configured for VIDEO, which is as part of preview
+                     //initialization/start. So during this VPE AXI config camera stack
+                     //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+                     //change it's type to PMEM_VIDEO".
+                     if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+                         active = 1;
+                         pmem_type = MSM_PMEM_VIDEO_VPE;
+                     }
+                     ALOGV(" pmempool creating video buffers : active %d ", active);
+                }
+                else if (pmem_type == MSM_PMEM_PREVIEW){
+                    active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+                }
+                else if ((pmem_type == MSM_PMEM_MAINIMG)
+                     || (pmem_type == MSM_PMEM_THUMBNAIL)){
+                    active = (cnt < ACTIVE_ZSL_BUFFERS);
+                }
+                 if (pmem_type == MSM_PMEM_PREVIEW &&
+                       mPreviewFormat == CAMERA_YUV_420_YV12 && mCurrentTarget != TARGET_MSM7627A)
+                       all_chnls = true;
+
+                register_buf(mBufferSize,
+                         mFrameSize, mCbCrOffset, myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         pmem_type,
+                         active,true,
+                         all_chnls);
+            }
+        }
+
+        completeInitialization();
+    }
+    else ALOGE("pmem pool %s error: could not create master heap!",
+              pmem_pool);
+    ALOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+
+QualcommCameraHardware::PmemPool::~PmemPool()
+{
+    ALOGI("%s: %s E", __FUNCTION__, mName);
+    if (mHeap != NULL) {
+        // Unregister preview buffers with the camera drivers.
+        //  Only Unregister the preview, snapshot and thumbnail
+        //  buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buffers = mNumBuffers;
+            if(!strcmp("preview", mName)) num_buffers = kTotalPreviewBufferCount;
+            for (int cnt = 0; cnt < num_buffers; ++cnt) {
+                register_buf(mBufferSize,
+                         mFrameSize,
+                         mCbCrOffset,
+                         myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         mPmemType,
+                         false,
+                         false,/* unregister */
+                         false);
+            }
+        }
+    }
+    mMMCameraDLRef.clear();
+    ALOGI("%s: %s X", __FUNCTION__, mName);
+}
+#if 0
+#ifdef USE_ION
+const char QualcommCameraHardware::IonPool::mIonDevName[] = "/dev/ion";
+QualcommCameraHardware::IonPool::IonPool(int ion_heap_id, int flags,
+                                           int ion_type,
+                                           int buffer_size, int num_buffers,
+                                           int frame_size, int cbcr_offset,
+                                           int yOffset, const char *name) :
+    QualcommCameraHardware::MemPool(buffer_size,
+                                    num_buffers,
+                                    frame_size,
+                                    name),
+    mIonType(ion_type),
+    mCbCrOffset(cbcr_offset),
+    myOffset(yOffset)
+{
+    ALOGI("constructing MemPool %s backed by pmem pool %s: "
+         "%d frames @ %d bytes, buffer size %d",
+         mName,
+         mIonDevName, num_buffers, frame_size,
+         buffer_size);
+
+    mMMCameraDLRef = QualcommCameraHardware::MMCameraDL::getInstance();
+
+
+    // Make a new mmap'ed heap that can be shared across processes.
+    // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+    mAlignedSize = mAlignedBufferSize * num_buffers;
+    sp<MemoryHeapIon> ionHeap = new MemoryHeapIon(mIonDevName, mAlignedSize,
+                                                  flags, 0x1<<ion_heap_id);
+    if (ionHeap->getHeapID() >= 0) {
+        mHeap = ionHeap;
+        ionHeap.clear();
+
+        mFd = mHeap->getHeapID();
+        ALOGE("ion pool %s fd = %d", mIonDevName, mFd);
+        ALOGE("mBufferSize=%d, mAlignedBufferSize=%d\n",
+                      mBufferSize, mAlignedBufferSize);
+
+        // Unregister preview buffers with the camera drivers.  Allow the VFE to write
+        // to all preview buffers except for the last one.
+        // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buf = num_buffers;
+            if(!strcmp("preview", mName)) num_buf = kPreviewBufferCount;
+            ALOGD("num_buffers = %d", num_buf);
+            for (int cnt = 0; cnt < num_buf; ++cnt) {
+                int active = 1;
+                if(ion_type == MSM_PMEM_VIDEO){
+                     active = (cnt<ACTIVE_VIDEO_BUFFERS);
+                     //When VPE is enabled, set the last record
+                     //buffer as active and pmem type as PMEM_VIDEO_VPE
+                     //as this is a requirement from VPE operation.
+                     //No need to set this pmem type to VIDEO_VPE while unregistering,
+                     //because as per camera stack design: "the VPE AXI is also configured
+                     //when VFE is configured for VIDEO, which is as part of preview
+                     //initialization/start. So during this VPE AXI config camera stack
+                     //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+                     //change it's type to PMEM_VIDEO".
+                     if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+                         active = 1;
+                         ion_type = MSM_PMEM_VIDEO_VPE;
+                     }
+                     ALOGV(" pmempool creating video buffers : active %d ", active);
+                }
+                else if (ion_type == MSM_PMEM_PREVIEW){
+                    active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+                }
+                else if ((ion_type == MSM_PMEM_MAINIMG)
+                     || (ion_type == MSM_PMEM_THUMBNAIL)){
+                    active = (cnt < ACTIVE_ZSL_BUFFERS);
+                }
+                register_buf(mBufferSize,
+                         mFrameSize, mCbCrOffset, myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         ion_type,
+                         active);
+            }
+        }
+
+        completeInitialization();
+    }
+    else ALOGE("pmem pool %s error: could not create master heap!",
+              mIonDevName);
+    ALOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+
+QualcommCameraHardware::IonPool::~IonPool()
+{
+    ALOGI("%s: %s E", __FUNCTION__, mName);
+    if (mHeap != NULL) {
+        // Unregister preview buffers with the camera drivers.
+        //  Only Unregister the preview, snapshot and thumbnail
+        //  buffers with the kernel.
+        if( (strcmp("postview", mName) != 0) ){
+            int num_buffers = mNumBuffers;
+            if(!strcmp("preview", mName)) num_buffers = kPreviewBufferCount;
+            for (int cnt = 0; cnt < num_buffers; ++cnt) {
+                register_buf(mBufferSize,
+                         mFrameSize,
+                         mCbCrOffset,
+                         myOffset,
+                         mHeap->getHeapID(),
+                         mAlignedBufferSize * cnt,
+                         (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+                         mIonType,
+                         false,
+                         false /* unregister */);
+            }
+        }
+    }
+    mMMCameraDLRef.clear();
+    ALOGI("%s: %s X", __FUNCTION__, mName);
+}
+#endif
+#endif
+QualcommCameraHardware::MemPool::~MemPool()
+{
+    ALOGV("destroying MemPool %s", mName);
+    if (mFrameSize > 0)
+        delete [] mBuffers;
+    mHeap.clear();
+    ALOGV("destroying MemPool %s completed", mName);
+}
+
+status_t QualcommCameraHardware::MemPool::dump(int fd, const Vector<String16>& args) const
+{
+    const size_t SIZE = 256;
+    char buffer[SIZE];
+    String8 result;
+    CAMERA_HAL_UNUSED(args);
+    snprintf(buffer, 255, "QualcommCameraHardware::AshmemPool::dump\n");
+    result.append(buffer);
+    if (mName) {
+        snprintf(buffer, 255, "mem pool name (%s)\n", mName);
+        result.append(buffer);
+    }
+    if (mHeap != 0) {
+        snprintf(buffer, 255, "heap base(%p), size(%d), flags(%d), device(%s)\n",
+                 mHeap->getBase(), mHeap->getSize(),
+                 mHeap->getFlags(), mHeap->getDevice());
+        result.append(buffer);
+    }
+    snprintf(buffer, 255,
+             "buffer size (%d), number of buffers (%d), frame size(%d)",
+             mBufferSize, mNumBuffers, mFrameSize);
+    result.append(buffer);
+    write(fd, result.string(), result.size());
+    return NO_ERROR;
+}
+
+static void receive_camframe_callback(struct msm_frame *frame)
+{
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->receivePreviewFrame(frame);
+    }
+}
+
+static void receive_camstats_callback(camstats_type stype, camera_preview_histogram_info* histinfo)
+{
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        obj->receiveCameraStats(stype,histinfo);
+    }
+}
+
+static void receive_liveshot_callback(liveshot_status status, uint32_t jpeg_size)
+{
+    if(status == LIVESHOT_SUCCESS) {
+        QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+        if (obj != 0) {
+            obj->receiveLiveSnapshot(jpeg_size);
+        }
+    }
+    else
+        ALOGE("Liveshot not succesful");
+}
+
+
+static int8_t receive_event_callback(mm_camera_event* event)
+{
+    ALOGV("%s: E", __FUNCTION__);
+    if(event == NULL) {
+        ALOGE("%s: event is NULL!", __FUNCTION__);
+        return FALSE;
+    }
+    switch(event->event_type) {
+        case SNAPSHOT_DONE:
+        {
+            /* postview buffer is received */
+            QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+            if (obj != 0) {
+
+                obj->receiveRawPicture(NO_ERROR, event->event_data.yuv_frames[0], event->event_data.yuv_frames[0]);
+            }
+        }
+        break;
+        case SNAPSHOT_FAILED:
+        {
+            /* postview buffer is received */
+            QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+            if (obj != 0) {
+
+                obj->receiveRawPicture(UNKNOWN_ERROR, NULL, NULL);
+            }
+        }
+        break;
+        case JPEG_ENC_DONE:
+        {
+            QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+            if (obj != 0) {
+                obj->receiveJpegPicture(NO_ERROR, event->event_data.encoded_frame);
+            }
+        }
+        break;
+        case JPEG_ENC_FAILED:
+        {
+            QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+            if (obj != 0) {
+                obj->receiveJpegPicture(UNKNOWN_ERROR, 0);
+            }
+        }
+        break;
+        default:
+            ALOGE("%s: ignore default case", __FUNCTION__);
+    }
+    return TRUE;
+    ALOGV("%s: X", __FUNCTION__);
+}
+// 720p : video frame calbback from camframe
+static void receive_camframe_video_callback(struct msm_frame *frame)
+{
+    ALOGV("receive_camframe_video_callback E");
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+            obj->receiveRecordingFrame(frame);
+         }
+    ALOGV("receive_camframe_video_callback X");
+}
+
+
+int QualcommCameraHardware::storeMetaDataInBuffers(int enable)
+{
+        /* this is a dummy func now. fix me later */
+    ALOGI("in storeMetaDataInBuffers : enable %d", enable);
+    mStoreMetaDataInFrame = enable;
+    return 0;
+}
+
+void QualcommCameraHardware::setCallbacks(camera_notify_callback notify_cb,
+                             camera_data_callback data_cb,
+                             camera_data_timestamp_callback data_cb_timestamp,
+                             camera_request_memory get_memory,
+                             void* user)
+{
+    Mutex::Autolock lock(mLock);
+    mNotifyCallback = notify_cb;
+    mDataCallback = data_cb;
+    mDataCallbackTimestamp = data_cb_timestamp;
+	mGetMemory = get_memory;
+    mCallbackCookie = user;
+}
+int32_t QualcommCameraHardware::getNumberOfVideoBuffers() {
+    ALOGE("getNumOfVideoBuffers: %d", kRecordBufferCount);
+    return kRecordBufferCount;
+}
+
+sp<IMemory> QualcommCameraHardware::getVideoBuffer(int32_t index) {
+   if(index > kRecordBufferCount)
+     return NULL;
+   else
+     return NULL;
+#if 0
+        return  mRecordHeap->mBuffers[index];
+#endif
+}
+void QualcommCameraHardware::enableMsgType(int32_t msgType)
+{
+    Mutex::Autolock lock(mLock);
+    mMsgEnabled |= msgType;
+    if( (mCurrentTarget != TARGET_MSM7630 ) &&  (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+      if(mMsgEnabled & CAMERA_MSG_VIDEO_FRAME){
+        native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+        mRecordingState = 1;
+      }
+    }
+}
+
+void QualcommCameraHardware::disableMsgType(int32_t msgType)
+{
+    Mutex::Autolock lock(mLock);
+    if( (mCurrentTarget != TARGET_MSM7630 ) &&  (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+      if(mMsgEnabled & CAMERA_MSG_VIDEO_FRAME){
+        native_stop_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+        mRecordingState = 0;
+      }
+    }
+    mMsgEnabled &= ~msgType;
+}
+
+bool QualcommCameraHardware::msgTypeEnabled(int32_t msgType)
+{
+    return (mMsgEnabled & msgType);
+}
+
+
+void QualcommCameraHardware::receive_camframe_error_timeout(void) {
+    ALOGI("receive_camframe_error_timeout: E");
+    Mutex::Autolock l(&mCamframeTimeoutLock);
+    ALOGE(" Camframe timed out. Not receiving any frames from camera driver ");
+    camframe_timeout_flag = TRUE;
+    mNotifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0,
+                    mCallbackCookie);
+    ALOGI("receive_camframe_error_timeout: X");
+}
+
+static void receive_camframe_error_callback(camera_error_type err) {
+    QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+    if (obj != 0) {
+        if ((err == CAMERA_ERROR_TIMEOUT) ||
+            (err == CAMERA_ERROR_ESD)) {
+            /* Handling different error types is dependent on the requirement.
+             * Do the same action by default
+             */
+            obj->receive_camframe_error_timeout();
+        }
+    }
+}
+
+bool QualcommCameraHardware::storePreviewFrameForPostview(void) {
+    ALOGV("storePreviewFrameForPostview : E ");
+
+    /* Since there is restriction on the maximum overlay dimensions
+     * that can be created, we use the last preview frame as postview
+     * for 7x30. */
+    ALOGV("Copying the preview buffer to postview buffer %d  ",
+         mPreviewFrameSize);
+    if(mLastPreviewFrameHeap == NULL) {
+        int CbCrOffset = PAD_TO_WORD(mPreviewFrameSize * 2/3);
+#if 0
+#ifdef USE_ION
+
+        mLastPreviewFrameHeap =
+           new IonPool(ION_HEAP_ADSP_ID,
+           MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+           MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+           mPreviewFrameSize,
+           1,
+           mPreviewFrameSize,
+           CbCrOffset,
+           0,
+           "postview");
+#else
+        mLastPreviewFrameHeap =
+           new PmemPool("/dev/pmem_adsp",
+           MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+           MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+           mPreviewFrameSize,
+           1,
+           mPreviewFrameSize,
+           CbCrOffset,
+           0,
+           "postview");
+#endif
+           if (!mLastPreviewFrameHeap->initialized()) {
+               mLastPreviewFrameHeap.clear();
+               ALOGE(" Failed to initialize Postview Heap");
+               return false;
+            }
+#endif
+    }
+#if 0
+    if( mLastPreviewFrameHeap != NULL && mLastQueuedFrame != NULL) {
+        memcpy(mLastPreviewFrameHeap->mHeap->base(),
+               (uint8_t *)mLastQueuedFrame, mPreviewFrameSize );
+
+        if(mUseOverlay && !mZslPanorama) {
+            //mOverlayLock.lock();
+            //if(mOverlay != NULL){
+                //mOverlay->setFd(mLastPreviewFrameHeap->mHeap->getHeapID());
+                if( zoomCropInfo.w !=0 && zoomCropInfo.h !=0) {
+                    ALOGE("zoomCropInfo non-zero, setting crop ");
+                    ALOGE("setCrop with %dx%d and %dx%d", zoomCropInfo.x, zoomCropInfo.y, zoomCropInfo.w, zoomCropInfo.h);
+                   // mOverlay->setCrop(zoomCropInfo.x, zoomCropInfo.y,
+                               //zoomCropInfo.w, zoomCropInfo.h);
+                }
+                ALOGV("Queueing Postview with last frame till the snapshot is done ");
+                //mOverlay->queueBuffer((void *)0);
+            }
+            //mOverlayLock.unlock();
+        }
+
+    } else
+        ALOGE("Failed to store Preview frame. No Postview ");
+#endif
+    ALOGV("storePreviewFrameForPostview : X ");
+    return true;
+}
+
+bool QualcommCameraHardware::isValidDimension(int width, int height) {
+    bool retVal = FALSE;
+    /* This function checks if a given resolution is valid or not.
+     * A particular resolution is considered valid if it satisfies
+     * the following conditions:
+     * 1. width & height should be multiple of 16.
+     * 2. width & height should be less than/equal to the dimensions
+     *    supported by the camera sensor.
+     * 3. the aspect ratio is a valid aspect ratio and is among the
+     *    commonly used aspect ratio as determined by the thumbnail_sizes
+     *    data structure.
+     */
+
+    if( (width == CEILING16(width)) && (height == CEILING16(height))
+     && (width <= maxSnapshotWidth)
+    && (height <= maxSnapshotHeight) )
+    {
+        uint32_t pictureAspectRatio = (uint32_t)((width * Q12)/height);
+        for(uint32_t i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ) {
+            if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio) {
+                retVal = TRUE;
+                break;
+            }
+        }
+    }
+    return retVal;
+}
+status_t QualcommCameraHardware::getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize) {
+    status_t ret;
+    ALOGV(" getBufferInfo : E ");
+    if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660) )
+    {
+    if( mRecordHeap != NULL){
+        ALOGV(" Setting valid buffer information ");
+        Frame = mRecordHeap->mBuffers[0];
+        if( alignedSize != NULL) {
+            *alignedSize = mRecordHeap->mAlignedBufferSize;
+            ALOGV(" HAL : alignedSize = %d ", *alignedSize);
+            ret = NO_ERROR;
+        } else {
+                ALOGE(" HAL : alignedSize is NULL. Cannot update alignedSize ");
+                ret = UNKNOWN_ERROR;
+        }
+        } else {
+        ALOGE(" RecordHeap is null. Buffer information wont be updated ");
+        Frame = NULL;
+        ret = UNKNOWN_ERROR;
+    }
+    } else {
+    if(mPreviewHeap != NULL) {
+        ALOGV(" Setting valid buffer information ");
+       // Frame = mPreviewHeap->mBuffers[0];
+        if( alignedSize != NULL) {
+            //*alignedSize = mPreviewHeap->mAlignedBufferSize;
+                ALOGV(" HAL : alignedSize = %d ", *alignedSize);
+                ret = NO_ERROR;
+            } else {
+                ALOGE(" HAL : alignedSize is NULL. Cannot update alignedSize ");
+                ret = UNKNOWN_ERROR;
+            }
+    } else {
+            ALOGE(" PreviewHeap is null. Buffer information wont be updated ");
+            Frame = NULL;
+            ret = UNKNOWN_ERROR;
+    }
+    }
+    ALOGV(" getBufferInfo : X ");
+    return ret;
+}
+
+void QualcommCameraHardware::encodeData() {
+    ALOGV("encodeData: E");
+
+    if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+        mJpegThreadWaitLock.lock();
+            mJpegThreadRunning = true;
+            mJpegThreadWaitLock.unlock();
+            mm_camera_ops_type_t current_ops_type = CAMERA_OPS_ENCODE;
+            mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+                                     (void *)&mImageEncodeParms);
+            //Wait until jpeg encoding is done and clear the resources.
+            mJpegThreadWaitLock.lock();
+            while (mJpegThreadRunning) {
+                ALOGV("encodeData: waiting for jpeg thread to complete.");
+                mJpegThreadWait.wait(mJpegThreadWaitLock);
+                ALOGV("encodeData: jpeg thread completed.");
+            }
+            mJpegThreadWaitLock.unlock();
+    }
+    else ALOGV("encodeData: JPEG callback is NULL, not encoding image.");
+
+    mCamOps.mm_camera_deinit(CAMERA_OPS_CAPTURE, NULL, NULL);
+    //clear the resources
+    deinitRaw();
+    //Encoding is done.
+    mEncodePendingWaitLock.lock();
+    mEncodePending = false;
+    mEncodePendingWait.signal();
+    mEncodePendingWaitLock.unlock();
+
+    ALOGV("encodeData: X");
+}
+
+void QualcommCameraHardware::getCameraInfo()
+{
+    ALOGI("getCameraInfo: IN");
+    mm_camera_status_t status;
+
+#if DLOPEN_LIBMMCAMERA
+    void *libhandle = ::dlopen("liboemcamera.so", RTLD_NOW);
+    ALOGI("getCameraInfo: loading libqcamera at %p", libhandle);
+    if (!libhandle) {
+        ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+    }
+    *(void **)&LINK_mm_camera_get_camera_info =
+        ::dlsym(libhandle, "mm_camera_get_camera_info");
+#endif
+    storeTargetType();
+    status = LINK_mm_camera_get_camera_info(HAL_cameraInfo, &HAL_numOfCameras);
+    ALOGI("getCameraInfo: numOfCameras = %d", HAL_numOfCameras);
+    for(int i = 0; i < HAL_numOfCameras; i++) {
+        if((HAL_cameraInfo[i].position == BACK_CAMERA )&&
+            mCurrentTarget == TARGET_MSM8660){
+            HAL_cameraInfo[i].modes_supported |= CAMERA_ZSL_MODE;
+        } else{
+            HAL_cameraInfo[i].modes_supported |= CAMERA_NONZSL_MODE;
+        }
+        ALOGI("Camera sensor %d info:", i);
+        ALOGI("camera_id: %d", HAL_cameraInfo[i].camera_id);
+        ALOGI("modes_supported: %x", HAL_cameraInfo[i].modes_supported);
+        ALOGI("position: %d", HAL_cameraInfo[i].position);
+        ALOGI("sensor_mount_angle: %d", HAL_cameraInfo[i].sensor_mount_angle);
+    }
+
+#if DLOPEN_LIBMMCAMERA
+    if (libhandle) {
+        ::dlclose(libhandle);
+        ALOGV("getCameraInfo: dlclose(libqcamera)");
+    }
+#endif
+    ALOGI("getCameraInfo: OUT");
+}
+
+extern "C" int HAL_isIn3DMode()
+{
+    return HAL_currentCameraMode == CAMERA_MODE_3D;
+}
+
+extern "C" int HAL_getNumberOfCameras()
+{
+    QualcommCameraHardware::getCameraInfo();
+    return HAL_numOfCameras;
+}
+
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
+{
+    int i;
+    char mDeviceName[PROPERTY_VALUE_MAX];
+    if(cameraInfo == NULL) {
+        ALOGE("cameraInfo is NULL");
+        return;
+    }
+
+    property_get("ro.board.platform",mDeviceName," ");
+
+    for(i = 0; i < HAL_numOfCameras; i++) {
+        if(i == cameraId) {
+            ALOGI("Found a matching camera info for ID %d", cameraId);
+            cameraInfo->facing = (HAL_cameraInfo[i].position == BACK_CAMERA)?
+                                   CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+            // App Orientation not needed for 7x27 , sensor mount angle 0 is
+            // enough.
+            if(cameraInfo->facing == CAMERA_FACING_FRONT)
+                cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+            else if( !strncmp(mDeviceName, "msm7625a", 8))
+                cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+            else if( !strncmp(mDeviceName, "msm7627a", 8))
+                cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+            else if( !strncmp(mDeviceName, "msm7627", 7))
+                cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+            else if( !strncmp(mDeviceName, "msm8660", 7))
+                cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+            else
+                cameraInfo->orientation = ((APP_ORIENTATION - HAL_cameraInfo[i].sensor_mount_angle) + 360)%360;
+
+            ALOGI("%s: orientation = %d", __FUNCTION__, cameraInfo->orientation);
+            sensor_rotation = HAL_cameraInfo[i].sensor_mount_angle;
+            cameraInfo->mode = 0;
+            if(HAL_cameraInfo[i].modes_supported & CAMERA_MODE_2D)
+                cameraInfo->mode |= CAMERA_SUPPORT_MODE_2D;
+            if(HAL_cameraInfo[i].modes_supported & CAMERA_MODE_3D)
+                cameraInfo->mode |= CAMERA_SUPPORT_MODE_3D;
+            if((HAL_cameraInfo[i].position == BACK_CAMERA )&&
+                !strncmp(mDeviceName, "msm8660", 7)){
+                cameraInfo->mode |= CAMERA_ZSL_MODE;
+            } else{
+                cameraInfo->mode |= CAMERA_NONZSL_MODE;
+            }
+
+            ALOGI("%s: modes supported = %d", __FUNCTION__, cameraInfo->mode);
+
+            return;
+        }
+    }
+//    ALOGE("Unable to find matching camera info for ID %d", cameraId);
+}
+
+}; // namespace android
diff --git a/camera/QualcommCameraHardware.h b/camera/QualcommCameraHardware.h
new file mode 100644
index 0000000..01b8836
--- /dev/null
+++ b/camera/QualcommCameraHardware.h
@@ -0,0 +1,650 @@
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2009-2011, Code Aurora Forum. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_HARDWARE_H
+
+#define ICS
+
+//#include <camera/CameraHardwareInterface.h>
+#include <utils/threads.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <stdint.h>
+#ifdef ICS
+#include <hardware/camera.h>
+#endif
+#include <camera/Camera.h>
+#include "QCameraParameters.h"
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+#include "QCamera_Intf.h"
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+#include <mm_camera_interface.h>
+}
+
+struct str_map {
+    const char *const desc;
+    int val;
+};
+
+struct buffer_map {
+    msm_frame *frame;
+    buffer_handle_t * buffer;
+    int size;
+    int lockState;
+};
+
+typedef enum {
+    TARGET_MSM7625,
+    TARGET_MSM7625A,
+    TARGET_MSM7627,
+    TARGET_MSM7627A,
+    TARGET_QSD8250,
+    TARGET_MSM7630,
+    TARGET_MSM8660,
+    TARGET_MAX
+}targetType;
+
+typedef enum {
+    LIVESHOT_DONE,
+    LIVESHOT_IN_PROGRESS,
+    LIVESHOT_STOPPED
+}liveshotState;
+#define MIN_UNDEQUEUD_BUFFER_COUNT 2
+struct target_map {
+    const char *targetStr;
+    targetType targetEnum;
+};
+
+enum {
+    BUFFER_UNLOCKED,
+    BUFFER_LOCKED
+};
+
+struct board_property{
+    targetType target;
+    unsigned int previewSizeMask;
+    bool hasSceneDetect;
+    bool hasSelectableZoneAf;
+    bool hasFaceDetect;
+};
+
+namespace android {
+
+class QualcommCameraHardware : public RefBase{
+public:
+
+    //virtual sp<IMemoryHeap> getPreviewHeap() const;
+    //virtual sp<IMemoryHeap> getRawHeap() const;
+
+    void setCallbacks(camera_notify_callback notify_cb,
+                            camera_data_callback data_cb,
+                            camera_data_timestamp_callback data_cb_timestamp,
+                            camera_request_memory get_memory,
+                            void *user);
+
+    virtual void enableMsgType(int32_t msgType);
+    virtual void disableMsgType(int32_t msgType);
+    virtual bool msgTypeEnabled(int32_t msgType);
+
+    virtual status_t dump(int fd, const Vector<String16>& args) const;
+    virtual status_t startPreview();
+    virtual void stopPreview();
+    virtual bool previewEnabled();
+    virtual status_t startRecording();
+    virtual void stopRecording();
+    virtual bool recordingEnabled();
+    virtual void releaseRecordingFrame(const void *opaque);
+    virtual status_t autoFocus();
+    virtual status_t cancelAutoFocus();
+    virtual status_t takePicture();
+    virtual status_t takeLiveSnapshot();
+    virtual status_t takeLiveSnapshotInternal();
+    void set_liveshot_exifinfo();
+    virtual status_t cancelPicture();
+    virtual status_t setParameters(const QCameraParameters& params);
+    virtual QCameraParameters getParameters() const;
+    virtual status_t sendCommand(int32_t command, int32_t arg1, int32_t arg2);
+    virtual int32_t getNumberOfVideoBuffers();
+    virtual sp<IMemory> getVideoBuffer(int32_t index);
+    virtual status_t getBufferInfo( sp<IMemory>& Frame, size_t *alignedSize);
+    virtual void encodeData( );
+#ifdef ICS
+    virtual status_t set_PreviewWindow(void* param);
+    virtual status_t setPreviewWindow(preview_stream_ops_t* window);
+#endif
+    virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) {return NO_ERROR;};
+    virtual void release();
+
+    static QualcommCameraHardware* createInstance();
+    static QualcommCameraHardware* getInstance();
+
+    void receivePreviewFrame(struct msm_frame *frame);
+    void receiveLiveSnapshot(uint32_t jpeg_size);
+    void receiveCameraStats(camstats_type stype, camera_preview_histogram_info* histinfo);
+    void receiveRecordingFrame(struct msm_frame *frame);
+    void receiveJpegPicture(status_t status, mm_camera_buffer_t *encoded_buffer);
+    void jpeg_set_location();
+    void receiveJpegPictureFragment(uint8_t *buf, uint32_t size);
+    void notifyShutter(bool mPlayShutterSoundOnly);
+    void receive_camframe_error_timeout();
+    static void getCameraInfo();
+    void receiveRawPicture(status_t status,struct msm_frame *postviewframe, struct msm_frame *mainframe);
+    int allocate_ion_memory(int *main_ion_fd, struct ion_allocation_data* alloc,
+    struct ion_fd_data* ion_info_fd, int ion_type, int size, int *memfd);
+    int deallocate_ion_memory(int *main_ion_fd, struct ion_fd_data* ion_info_fd);
+    virtual ~QualcommCameraHardware();
+    int storeMetaDataInBuffers(int enable);
+
+private:
+    QualcommCameraHardware();
+    status_t startPreviewInternal();
+    status_t startRecordingInternal();
+    status_t setHistogramOn();
+    status_t setHistogramOff();
+    status_t runFaceDetection();
+    status_t setFaceDetection(const char *str);
+
+    void stopPreviewInternal();
+    friend void *auto_focus_thread(void *user);
+    void runAutoFocus();
+    status_t cancelAutoFocusInternal();
+    bool native_set_dimension (int camfd);
+    bool native_jpeg_encode (void);
+    bool updatePictureDimension(const QCameraParameters& params, int& width, int& height);
+    bool native_set_parms(camera_parm_type_t type, uint16_t length, void *value);
+    bool native_set_parms(camera_parm_type_t type, uint16_t length, void *value, int *result);
+    bool native_zoom_image(int fd, int srcOffset, int dstOffset, common_crop_t *crop);
+
+    status_t startInitialPreview();
+    void stopInitialPreview();
+    status_t getBuffersAndStartPreview();
+    void relinquishBuffers();
+
+    QualcommCameraHardware * singleton;
+
+    /* These constants reflect the number of buffers that libmmcamera requires
+       for preview and raw, and need to be updated when libmmcamera
+       changes.
+    */
+    static const int kPreviewBufferCount = NUM_PREVIEW_BUFFERS;
+    static const int kRawBufferCount = 1;
+    static const int kJpegBufferCount = 1;
+    static const int kTotalPreviewBufferCount = kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT;
+    int numCapture;
+    int numJpegReceived;
+    int jpegPadding;
+
+    QCameraParameters mParameters;
+    unsigned int frame_size;
+    bool mCameraRunning;
+    Mutex mCameraRunningLock;
+    bool mPreviewInitialized;
+
+
+    class MMCameraDL : public RefBase{
+    private:
+        static wp<MMCameraDL> instance;
+        MMCameraDL();
+        virtual ~MMCameraDL();
+        void *libmmcamera;
+        static Mutex singletonLock;
+    public:
+        static sp<MMCameraDL> getInstance();
+        void * pointer();
+    };
+
+    // This class represents a heap which maintains several contiguous
+    // buffers.  The heap may be backed by pmem (when pmem_pool contains
+    // the name of a /dev/pmem* file), or by ashmem (when pmem_pool == NULL).
+    struct MemPool : public RefBase {
+        MemPool(int buffer_size, int num_buffers,
+                int frame_size,
+                const char *name);
+
+        virtual ~MemPool()  ;// = 0;
+
+        void completeInitialization();
+        bool initialized() const {
+            return mHeap != NULL && mHeap->base() != MAP_FAILED;
+        }
+
+        virtual status_t dump(int fd, const Vector<String16>& args) const;
+
+        int mBufferSize;
+        int mAlignedBufferSize;
+        int mNumBuffers;
+        int mFrameSize;
+        sp<MemoryHeapBase> mHeap;
+        sp<MemoryBase> *mBuffers;
+
+        const char *mName;
+    };
+      struct DispMemPool : public MemPool {
+          DispMemPool(int fd, int buffer_size,
+          int num_buffers, int frame_size,
+          const char *name);
+          virtual ~DispMemPool();
+          int mFD;
+      };
+      sp<DispMemPool> mPreviewHeap[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+
+    struct AshmemPool : public MemPool {
+        AshmemPool(int buffer_size, int num_buffers,
+                   int frame_size,
+                   const char *name);
+    };
+
+    struct PmemPool : public MemPool {
+        PmemPool(const char *pmem_pool,
+                 int flags, int pmem_type,
+                 int buffer_size, int num_buffers,
+                 int frame_size, int cbcr_offset,
+                 int yoffset, const char *name);
+        virtual ~PmemPool();
+        int mFd;
+        int mPmemType;
+        int mCbCrOffset;
+        int myOffset;
+        int mCameraControlFd;
+        uint32_t mAlignedSize;
+        struct pmem_region mSize;
+        sp<QualcommCameraHardware::MMCameraDL> mMMCameraDLRef;
+    };
+//TODO
+    struct IonPool : public MemPool {
+        IonPool( int ion_heap_id, int flags, int ion_type,
+             int buffer_size, int num_buffers,
+             int frame_size, int cbcr_offset,
+             int yoffset, const char *name);
+    virtual ~IonPool();
+    int mFd;
+    int mIonType;
+    int mCbCrOffset;
+    int myOffset;
+    int mCameraControlFd;
+    uint32_t mAlignedSize;
+    sp<QualcommCameraHardware::MMCameraDL> mMMCameraDLRef;
+    static const char mIonDevName[];
+    };
+#ifdef USE_ION
+//    sp<IonPool> mPreviewHeap;
+    sp<IonPool> mYV12Heap;
+    sp<IonPool> mRecordHeap;
+    sp<IonPool> mThumbnailHeap;
+    sp<IonPool> mRawHeap;
+    sp<IonPool> mDisplayHeap;
+    sp<AshmemPool> mJpegHeap;
+    sp<AshmemPool> mStatHeap;
+    sp<AshmemPool> mMetaDataHeap;
+    sp<IonPool> mRawSnapShotPmemHeap;
+    sp<IonPool> mLastPreviewFrameHeap;
+    sp<IonPool> mPostviewHeap;
+#else
+//    sp<PmemPool> mPreviewHeap;
+    sp<PmemPool> mYV12Heap;
+    sp<PmemPool> mRecordHeap;
+    sp<PmemPool> mThumbnailHeap;
+    sp<PmemPool> mRawHeap;
+    sp<PmemPool> mDisplayHeap;
+    sp<AshmemPool> mJpegHeap;
+    sp<AshmemPool> mStatHeap;
+    sp<AshmemPool> mMetaDataHeap;
+    sp<PmemPool> mRawSnapShotPmemHeap;
+    sp<PmemPool> mLastPreviewFrameHeap;
+    sp<PmemPool> mPostviewHeap;
+	sp<PmemPool> mPostViewHeap;
+    sp<PmemPool> mInitialPreviewHeap;
+#endif
+
+    sp<MMCameraDL> mMMCameraDLRef;
+
+    bool startCamera();
+    bool initPreview();
+    bool initRecord();
+    void deinitPreview();
+    bool initRaw(bool initJpegHeap);
+    bool initZslBuffers(bool initJpegHeap);
+    bool deinitZslBuffers();
+    bool initLiveSnapshot(int videowidth, int videoheight);
+    bool initRawSnapshot();
+    void deinitRaw();
+    void deinitRawSnapshot();
+    bool mPreviewThreadRunning;
+    bool createSnapshotMemory (int numberOfRawBuffers, int numberOfJpegBuffers,
+                                   bool initJpegHeap, int snapshotFormat = 1 /*PICTURE_FORMAT_JPEG*/);
+    Mutex mPreviewThreadWaitLock;
+    Condition mPreviewThreadWait;
+    friend void *preview_thread(void *user);
+    friend void *openCamera(void *data);
+    void runPreviewThread(void *data);
+    friend void *hfr_thread(void *user);
+    void runHFRThread(void *data);
+    bool mHFRThreadRunning;
+	int mapBuffer(msm_frame *frame);
+	int mapRawBuffer(msm_frame *frame);
+	int mapThumbnailBuffer(msm_frame *frame);
+	int mapJpegBuffer(mm_camera_buffer_t* buffer);
+        int mapvideoBuffer( msm_frame *frame);
+	int mapFrame(buffer_handle_t *buffer);
+    Mutex mHFRThreadWaitLock;
+
+    class FrameQueue : public RefBase{
+    private:
+        Mutex mQueueLock;
+        Condition mQueueWait;
+        bool mInitialized;
+
+        Vector<struct msm_frame *> mContainer;
+    public:
+        FrameQueue();
+        virtual ~FrameQueue();
+        bool add(struct msm_frame *element);
+        void flush();
+        struct msm_frame* get();
+        void init();
+        void deinit();
+        bool isInitialized();
+    };
+
+    FrameQueue mPreviewBusyQueue;
+
+    bool mFrameThreadRunning;
+    Mutex mFrameThreadWaitLock;
+    Condition mFrameThreadWait;
+    friend void *frame_thread(void *user);
+    void runFrameThread(void *data);
+
+    //720p recording video thread
+    bool mVideoThreadExit;
+    bool mVideoThreadRunning;
+    Mutex mVideoThreadWaitLock;
+    Condition mVideoThreadWait;
+    friend void *video_thread(void *user);
+    void runVideoThread(void *data);
+
+    // smooth zoom
+    int mTargetSmoothZoom;
+    bool mSmoothzoomThreadExit;
+    bool mSmoothzoomThreadRunning;
+    Mutex mSmoothzoomThreadWaitLock;
+    Mutex mSmoothzoomThreadLock;
+    Condition mSmoothzoomThreadWait;
+    friend void *smoothzoom_thread(void *user);
+    void runSmoothzoomThread(void* data);
+
+    // For Histogram
+    int mStatsOn;
+    int mCurrent;
+    bool mSendData;
+    Mutex mStatsWaitLock;
+    Condition mStatsWait;
+
+    //For Face Detection
+    int mFaceDetectOn;
+    bool mSendMetaData;
+    Mutex mMetaDataWaitLock;
+
+    bool mShutterPending;
+    Mutex mShutterLock;
+
+    bool mSnapshotThreadRunning;
+    Mutex mSnapshotThreadWaitLock;
+    Condition mSnapshotThreadWait;
+    friend void *snapshot_thread(void *user);
+    void runSnapshotThread(void *data);
+    Mutex mRawPictureHeapLock;
+    bool mJpegThreadRunning;
+    Mutex mJpegThreadWaitLock;
+    Condition mJpegThreadWait;
+    bool mInSnapshotMode;
+    Mutex mInSnapshotModeWaitLock;
+    Condition mInSnapshotModeWait;
+    bool mEncodePending;
+    Mutex mEncodePendingWaitLock;
+    Condition mEncodePendingWait;
+	bool mBuffersInitialized;
+
+    void debugShowPreviewFPS() const;
+    void debugShowVideoFPS() const;
+
+    int mSnapshotFormat;
+    bool mFirstFrame;
+    void hasAutoFocusSupport();
+    void filterPictureSizes();
+    void filterPreviewSizes();
+    static void storeTargetType();
+    bool supportsSceneDetection();
+    bool supportsSelectableZoneAf();
+    bool supportsFaceDetection();
+
+    void initDefaultParameters();
+    bool initImageEncodeParameters(int size);
+    bool initZslParameter(void);
+    status_t setCameraMode(const QCameraParameters& params);
+    status_t setPreviewSize(const QCameraParameters& params);
+    status_t setJpegThumbnailSize(const QCameraParameters& params);
+    status_t setPreviewFpsRange(const QCameraParameters& params);
+    status_t setPreviewFrameRate(const QCameraParameters& params);
+    status_t setPreviewFrameRateMode(const QCameraParameters& params);
+    status_t setRecordSize(const QCameraParameters& params);
+    status_t setPictureSize(const QCameraParameters& params);
+    status_t setJpegQuality(const QCameraParameters& params);
+    status_t setAntibanding(const QCameraParameters& params);
+    status_t setEffect(const QCameraParameters& params);
+    status_t setRecordingHint(const QCameraParameters& params);
+    status_t setExposureCompensation(const QCameraParameters &params);
+    status_t setAutoExposure(const QCameraParameters& params);
+    status_t setWhiteBalance(const QCameraParameters& params);
+    status_t setFlash(const QCameraParameters& params);
+    status_t setGpsLocation(const QCameraParameters& params);
+    status_t setRotation(const QCameraParameters& params);
+    status_t setZoom(const QCameraParameters& params);
+    status_t setFocusMode(const QCameraParameters& params);
+    status_t setFocusAreas(const QCameraParameters& params);
+    status_t setMeteringAreas(const QCameraParameters& params);
+    status_t setBrightness(const QCameraParameters& params);
+    status_t setSkinToneEnhancement(const QCameraParameters& params);
+    status_t setOrientation(const QCameraParameters& params);
+    status_t setLensshadeValue(const QCameraParameters& params);
+    status_t setMCEValue(const QCameraParameters& params);
+    status_t setHDRImaging(const QCameraParameters& params);
+    status_t setExpBracketing(const QCameraParameters& params);
+    status_t setISOValue(const QCameraParameters& params);
+    status_t setPictureFormat(const QCameraParameters& params);
+    status_t setSharpness(const QCameraParameters& params);
+    status_t setContrast(const QCameraParameters& params);
+    status_t setSaturation(const QCameraParameters& params);
+    status_t setSceneMode(const QCameraParameters& params);
+    status_t setContinuousAf(const QCameraParameters& params);
+    status_t setTouchAfAec(const QCameraParameters& params);
+    status_t setSceneDetect(const QCameraParameters& params);
+    status_t setStrTextures(const QCameraParameters& params);
+    status_t setPreviewFormat(const QCameraParameters& params);
+    status_t setSelectableZoneAf(const QCameraParameters& params);
+    status_t setHighFrameRate(const QCameraParameters& params);
+    bool register_record_buffers(bool register_buffer);
+    status_t setRedeyeReduction(const QCameraParameters& params);
+    status_t setDenoise(const QCameraParameters& params);
+    status_t setZslParam(const QCameraParameters& params);
+    status_t setSnapshotCount(const QCameraParameters& params);
+    void setGpsParameters();
+    bool storePreviewFrameForPostview();
+    bool isValidDimension(int w, int h);
+    status_t updateFocusDistances(const char *focusmode);
+    int mStoreMetaDataInFrame;
+
+    Mutex mLock;
+	Mutex mDisplayLock;
+    Mutex mCamframeTimeoutLock;
+    bool camframe_timeout_flag;
+    bool mReleasedRecordingFrame;
+
+    Mutex mParametersLock;
+
+
+    Mutex mCallbackLock;
+    Mutex mOverlayLock;
+	Mutex mRecordLock;
+	Mutex mRecordFrameLock;
+	Condition mRecordWait;
+    Condition mStateWait;
+
+    /* mJpegSize keeps track of the size of the accumulated JPEG.  We clear it
+       when we are about to take a picture, so at any time it contains either
+       zero, or the size of the last JPEG picture taken.
+    */
+    uint32_t mJpegSize;
+    unsigned int        mPreviewFrameSize;
+    unsigned int        mRecordFrameSize;
+    int                 mRawSize;
+    int                 mCbCrOffsetRaw;
+    int                 mYOffset;
+    int                 mJpegMaxSize;
+    int32_t                 mStatSize;
+
+
+    cam_ctrl_dimension_t mDimension;
+    bool mAutoFocusThreadRunning;
+    Mutex mAutoFocusThreadLock;
+
+    Mutex mAfLock;
+
+    pthread_t mFrameThread;
+    pthread_t mVideoThread;
+    pthread_t mPreviewThread;
+    pthread_t mSnapshotThread;
+    pthread_t mDeviceOpenThread;
+    pthread_t mSmoothzoomThread;
+    pthread_t mHFRThread;
+
+    common_crop_t mCrop;
+
+    bool mInitialized;
+
+    int mBrightness;
+    int mSkinToneEnhancement;
+    int mHJR;
+    unsigned int mThumbnailMapped[MAX_SNAPSHOT_BUFFERS];
+    unsigned int mThumbnailLockState[MAX_SNAPSHOT_BUFFERS];
+    int mRawfd[MAX_SNAPSHOT_BUFFERS];
+    int mRawSnapshotfd;
+    int mJpegfd[MAX_SNAPSHOT_BUFFERS];
+    int mRecordfd[9];
+    camera_memory_t *mPreviewMapped[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+    camera_memory_t *mRawMapped[MAX_SNAPSHOT_BUFFERS];
+    camera_memory_t *mJpegMapped[MAX_SNAPSHOT_BUFFERS];
+    camera_memory_t *mRawSnapshotMapped;
+    camera_memory_t *mStatsMapped[3];
+    camera_memory_t *mRecordMapped[9];
+    camera_memory_t *mJpegCopyMapped;
+    camera_memory_t* metadata_memory[9];
+    camera_memory_t *mJpegLiveSnapMapped;
+    int raw_main_ion_fd[MAX_SNAPSHOT_BUFFERS];
+    int raw_snapshot_main_ion_fd;
+    int Jpeg_main_ion_fd[MAX_SNAPSHOT_BUFFERS];
+    int record_main_ion_fd[9];
+    struct ion_allocation_data raw_alloc[MAX_SNAPSHOT_BUFFERS];
+    struct ion_allocation_data raw_snapshot_alloc;
+    struct ion_allocation_data Jpeg_alloc[MAX_SNAPSHOT_BUFFERS];
+    struct ion_allocation_data record_alloc[9];
+    struct ion_fd_data raw_ion_info_fd[MAX_SNAPSHOT_BUFFERS];
+    struct ion_fd_data raw_snapshot_ion_info_fd;
+    struct ion_fd_data Jpeg_ion_info_fd[MAX_SNAPSHOT_BUFFERS];
+    struct ion_fd_data record_ion_info_fd[9];
+
+    struct msm_frame frames[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+    struct buffer_map frame_buffer[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+    struct msm_frame *recordframes;
+    struct msm_frame *rawframes;
+    bool *record_buffers_tracking_flag;
+    bool mInPreviewCallback;
+    preview_stream_ops_t* mPreviewWindow;
+    android_native_buffer_t *mPostViewBuffer;
+    buffer_handle_t *mThumbnailBuffer[MAX_SNAPSHOT_BUFFERS];
+    bool mIs3DModeOn;
+
+    int32_t mMsgEnabled;    // camera msg to be handled
+    camera_notify_callback mNotifyCallback;
+    camera_data_callback mDataCallback;
+    camera_data_timestamp_callback mDataCallbackTimestamp;
+    camera_request_memory mGetMemory;
+    void *mCallbackCookie;  // same for all callbacks
+    int mDebugFps;
+    int kPreviewBufferCountActual;
+    int previewWidth, previewHeight;
+    int yv12framesize;
+    bool mSnapshotDone;
+    int maxSnapshotWidth;
+    int maxSnapshotHeight;
+    bool mHasAutoFocusSupport;
+    int videoWidth, videoHeight;
+
+    bool mDisEnabled;
+    int mRotation;
+    bool mResetWindowCrop;
+    int mThumbnailWidth, mThumbnailHeight;
+    status_t setVpeParameters();
+    status_t setDIS();
+    bool strTexturesOn;
+    int mPictureWidth;
+    int mPictureHeight;
+    int mPostviewWidth;
+    int mPostviewHeight;
+	int mTotalPreviewBufferCount;
+    int mDenoiseValue;
+    int mZslEnable;
+    int mZslPanorama;
+    bool mZslFlashEnable;
+    cam_3d_frame_format_t mSnapshot3DFormat;
+    bool mSnapshotCancel;
+    bool mHFRMode;
+    Mutex mSnapshotCancelLock;
+    int mActualPictWidth;
+    int mActualPictHeight;
+    bool mUseJpegDownScaling;
+    bool mPreviewStopping;
+    bool mInHFRThread;
+    Mutex mPmemWaitLock;
+    Condition mPmemWait;
+    bool mPrevHeapDeallocRunning;
+    bool mHdrMode;
+    bool mExpBracketMode;
+
+    bool mMultiTouch;
+
+    int mRecordingState;
+
+    int mNumFDRcvd;
+    int mFacesDetected;
+    int mFaceArray[MAX_ROI * 4 + 1];
+
+};
+
+extern "C" int HAL_getNumberOfCameras();
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
+extern "C" QualcommCameraHardware* HAL_openCameraHardware(int cameraId);
+}; // namespace android
+
+#endif
diff --git a/camera/configure.ac b/camera/configure.ac
new file mode 100644
index 0000000..790d7e0
--- /dev/null
+++ b/camera/configure.ac
@@ -0,0 +1,81 @@
+AC_PREREQ(2.61)
+
+AC_INIT([camera-hal],1.0.0)
+
+AM_INIT_AUTOMAKE([-Werror -Wall gnu foreign])
+
+AM_MAINTAINER_MODE
+
+AC_CONFIG_HEADER([configure.h])
+AC_CONFIG_MACRO_DIR([m4])
+
+# Checks for programs.
+AM_PROG_AS
+AC_PROG_CC
+AC_PROG_CXX
+AM_PROG_CC_C_O
+AC_PROG_LIBTOOL
+AC_PROG_AWK
+AC_PROG_CPP
+AC_PROG_INSTALL
+AC_PROG_LN_S
+AC_PROG_MAKE_SET
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_TYPE_SIZE_T
+AC_TYPE_SSIZE_T
+AC_TYPE_UINT16_T
+AC_TYPE_UINT32_T
+AC_TYPE_UINT64_T
+AC_TYPE_UINT8_T
+
+AC_CHECK_TOOL(OBJCOPY, objcopy, false)
+
+COMPILE_CAMERA=yes
+AMSS_VERSION=
+VFE_VERS=
+MSM_VERSION=
+DEBUG_CPPFLAGS=
+DEBUG_CFLAGS=
+DEBUG_LDFLAGS=
+
+AC_ARG_ENABLE([target],
+	[AS_HELP_STRING([--enable-target=TARGET],[target to build for])],
+	[],
+	[enable_target=none]
+)
+
+if test "x$enable_target" = "xmsm7627a"; then
+	MSM_VERSION=7x27A
+	BUILD_UNIFIED_CODE=false
+	BUILD_JPEG=false
+elif test "x$enable_target" = "xmsm8960"; then
+	MSM_VERSION=8960
+	BUILD_UNIFIED_CODE=false
+	BUILD_JPEG=true
+else
+	MSM_VERSION=
+	BUILD_UNIFIED_CODE=false
+	BUILD_JPEG=false
+fi
+
+AC_ARG_ENABLE([debug],
+	[AS_HELP_STRING([--enable-debug],[Build with debug flags and options])],
+	[DEBUG=$enableval],
+	[DEBUG=no]
+)
+if test "x$DEBUG" = "xyes"; then
+	DEBUG_CPPFLAGS="${DEBUG_CPPFLAGS} -DLOG_DEBUG -DLOG_TAG=\"CameraService\""
+	DEBUG_CFLAGS="${DEBUG_CFLAGS} -DLOG_DEBUG -DLOG_TAG=\"CameraService\" -g -O0"
+fi
+
+AC_SUBST([MSM_VERSION])
+AM_CONDITIONAL([MSM7X27A], [test "x$MSM_VERSION" = "x7x27A"])
+AM_CONDITIONAL([MSM8960], [test "x$MSM_VERSION" = "x8960"])
+AM_CONDITIONAL([BUILD_UNIFIED_CODE], [test "x$BUILD_UNIFIED_CODE" = "xtrue"])
+AM_CONDITIONAL([BUILD_JPEG], [test "x$BUILD_JPEG" = "xtrue"])
+
+AC_OUTPUT([ \
+	Makefile
+])
+
diff --git a/camera/mm-camera-interface/Android.mk b/camera/mm-camera-interface/Android.mk
new file mode 100644
index 0000000..a3372c6
--- /dev/null
+++ b/camera/mm-camera-interface/Android.mk
@@ -0,0 +1,40 @@
+LOCAL_PATH:= $(call my-dir)
+LOCAL_DIR_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+MM_CAM_FILES:= \
+        mm_camera_interface2.c \
+        mm_camera_stream.c \
+        mm_camera_channel.c \
+        mm_camera.c \
+        mm_camera_poll_thread.c \
+        mm_camera_notify.c \
+        mm_camera_sock.c \
+        mm_camera_helper.c \
+        mm_omx_jpeg_encoder.c
+
+LOCAL_CFLAGS+= -D_ANDROID_
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += mm_camera_interface2.h
+LOCAL_COPY_HEADERS += mm_omx_jpeg_encoder.h
+
+LOCAL_C_INCLUDES+= $(LOCAL_PATH)/..
+LOCAL_C_INCLUDES+= \
+    $(TARGET_OUT_HEADERS)/mm-camera \
+    $(TARGET_OUT_HEADERS)/mm-camera/common \
+    $(TARGET_OUT_HEADERS)/mm-still \
+    $(TARGET_OUT_HEADERS)/mm-still/jpeg \
+    $(TARGET_OUT_HEADERS)/mm-still/mm-omx
+
+LOCAL_C_INCLUDES+= hardware/qcom/media/mm-core/inc
+LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/socket.h
+LOCAL_CFLAGS += -include bionic/libc/kernel/common/linux/un.h
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE           := libmmcamera_interface2
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libmmstillomx libimage-jpeg-enc-omx-comp
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/camera/mm-camera-interface/mm_camera.c b/camera/mm-camera-interface/mm_camera.c
new file mode 100644
index 0000000..31dfadf
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera.c
@@ -0,0 +1,1089 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_sock.h"
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+static int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+    cam_ctrl_type type, uint32_t length, void *value);
+static int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+    cam_ctrl_type type, uint32_t length, void *value, int timeout);
+static int32_t mm_camera_ctrl_set_specialEffect (mm_camera_obj_t *my_obj, int effect) {
+    struct v4l2_control ctrl;
+    if (effect == CAMERA_EFFECT_MAX)
+        effect = CAMERA_EFFECT_OFF;
+    int rc = 0;
+
+    ctrl.id = MSM_V4L2_PID_EFFECT;
+    ctrl.value = effect;
+    rc = ioctl(my_obj->ctrl_fd, VIDIOC_S_CTRL, &ctrl);
+    return rc;
+}
+
+static int32_t mm_camera_ctrl_set_antibanding (mm_camera_obj_t *my_obj, int antibanding) {
+    int rc = 0;
+    struct v4l2_control ctrl;
+
+    ctrl.id = V4L2_CID_POWER_LINE_FREQUENCY;
+    ctrl.value = antibanding;
+    rc = ioctl(my_obj->ctrl_fd, VIDIOC_S_CTRL, &ctrl);
+    return rc;
+}
+
+static int32_t mm_camera_ctrl_set_auto_focus (mm_camera_obj_t *my_obj, int value)
+{
+    int rc = 0;
+    struct v4l2_queryctrl queryctrl;
+
+    memset (&queryctrl, 0, sizeof (queryctrl));
+    queryctrl.id = V4L2_CID_FOCUS_AUTO;
+
+    if(value != 0 && value != 1) {
+        CDBG("%s:boolean required, invalid value = %d\n",__func__, value);
+        return -MM_CAMERA_E_INVALID_INPUT;
+    }
+    if (-1 == ioctl (my_obj->ctrl_fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+        CDBG ("V4L2_CID_FOCUS_AUTO is not supported\n");
+    } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+        CDBG ("%s:V4L2_CID_FOCUS_AUTO is not supported\n", __func__);
+    } else {
+        if(0 != (rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+                V4L2_CID_FOCUS_AUTO, value))){
+            CDBG("%s: error, id=0x%x, value=%d, rc = %d\n",
+                     __func__, V4L2_CID_FOCUS_AUTO, value, rc);
+            rc = -1;
+        }
+    }
+    return rc;
+}
+
+static int32_t mm_camera_ctrl_set_whitebalance (mm_camera_obj_t *my_obj, int mode) {
+
+    int rc = 0, value;
+    uint32_t id;
+
+    switch(mode) {
+    case MM_CAMERA_WHITE_BALANCE_AUTO:
+        id = V4L2_CID_AUTO_WHITE_BALANCE;
+        value = 1; /* TRUE */
+        break;
+    case MM_CAMERA_WHITE_BALANCE_OFF:
+        id = V4L2_CID_AUTO_WHITE_BALANCE;
+        value = 0; /* FALSE */
+        break;
+    default:
+        id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+        if(mode == WHITE_BALANCE_DAYLIGHT) value = 6500;
+        else if(mode == WHITE_BALANCE_INCANDESCENT) value = 2800;
+        else if(mode == WHITE_BALANCE_FLUORESCENT ) value = 4200;
+        else if(mode == WHITE_BALANCE_CLOUDY) value = 7500;
+        else
+            value = 4200;
+    }
+    if(0 != (rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+            id, value))){
+        CDBG("%s: error, exp_metering_action_param=%d, rc = %d\n", __func__, value, rc);
+        goto end;
+    }
+end:
+    return rc;
+}
+
+static int32_t mm_camera_ctrl_set_toggle_afr (mm_camera_obj_t *my_obj) {
+    int rc = 0;
+    int value = 0;
+    if(0 != (rc =  mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+            V4L2_CID_EXPOSURE_AUTO, &value))){
+        goto end;
+    }
+    /* V4L2_CID_EXPOSURE_AUTO needs to be AUTO or SHUTTER_PRIORITY */
+    if (value != V4L2_EXPOSURE_AUTO && value != V4L2_EXPOSURE_SHUTTER_PRIORITY) {
+    CDBG("%s: V4L2_CID_EXPOSURE_AUTO needs to be AUTO/SHUTTER_PRIORITY\n",
+        __func__);
+    return -1;
+  }
+    if(0 != (rc =  mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+            V4L2_CID_EXPOSURE_AUTO_PRIORITY, &value))){
+        goto end;
+    }
+    value = !value;
+    if(0 != (rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+            V4L2_CID_EXPOSURE_AUTO_PRIORITY, value))){
+        goto end;
+    }
+end:
+    return rc;
+}
+
+static mm_camera_channel_type_t mm_camera_util_opcode_2_ch_type(
+                             mm_camera_obj_t *my_obj,
+                             mm_camera_ops_type_t opcode)
+{
+    mm_camera_channel_type_t type = MM_CAMERA_CH_MAX;
+    switch(opcode) {
+    case MM_CAMERA_OPS_PREVIEW:
+        return MM_CAMERA_CH_PREVIEW;
+    case MM_CAMERA_OPS_VIDEO:
+        return MM_CAMERA_CH_VIDEO;
+    case MM_CAMERA_OPS_SNAPSHOT:
+        return MM_CAMERA_CH_SNAPSHOT;
+    case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+        return MM_CAMERA_CH_SNAPSHOT;
+    case MM_CAMERA_OPS_RAW:
+        return MM_CAMERA_CH_RAW;
+    case MM_CAMERA_OPS_ZSL:
+        return MM_CAMERA_CH_SNAPSHOT;
+    default:
+        break;
+    }
+    return type;
+}
+
+static int32_t mm_camera_util_set_op_mode(mm_camera_obj_t * my_obj,
+    mm_camera_op_mode_type_t *op_mode)
+{
+    int32_t rc = MM_CAMERA_OK;
+    uint32_t v4l2_op_mode = MSM_V4L2_CAM_OP_DEFAULT;
+
+    if (my_obj->op_mode == *op_mode)
+        goto end;
+    if(mm_camera_poll_busy(my_obj) == TRUE) {
+        CDBG("%s: cannot change op_mode while stream on\n", __func__);
+        rc = -MM_CAMERA_E_INVALID_OPERATION;
+        goto end;
+    }
+    switch(*op_mode) {
+    case MM_CAMERA_OP_MODE_ZSL:
+        v4l2_op_mode = MSM_V4L2_CAM_OP_ZSL;
+            break;
+    case MM_CAMERA_OP_MODE_CAPTURE:
+        v4l2_op_mode = MSM_V4L2_CAM_OP_CAPTURE;
+            break;
+    case MM_CAMERA_OP_MODE_VIDEO:
+        v4l2_op_mode = MSM_V4L2_CAM_OP_VIDEO;
+            break;
+    default:
+        rc = - MM_CAMERA_E_INVALID_INPUT;
+        goto end;
+        break;
+    }
+    if(0 != (rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+            MSM_V4L2_PID_CAM_MODE, v4l2_op_mode))){
+        CDBG("%s: input op_mode=%d, s_ctrl rc=%d\n", __func__, *op_mode, rc);
+        goto end;
+    }
+    /* if success update mode field */
+    my_obj->op_mode = *op_mode;
+end:
+    CDBG("%s: op_mode=%d,rc=%d\n", __func__, *op_mode, rc);
+    return rc;
+}
+
+int32_t mm_camera_set_general_parm(mm_camera_obj_t * my_obj, mm_camera_parm_t *parm)
+{
+    int rc = -MM_CAMERA_E_NOT_SUPPORTED;
+    int isZSL =0;
+
+    switch(parm->parm_type)  {
+    case MM_CAMERA_PARM_EXPOSURE:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+                                                                        MSM_V4L2_PID_EXP_METERING,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_SHARPNESS:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_SHARPNESS,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_CONTRAST:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_CONTRAST,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_SATURATION:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_SATURATION,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_BRIGHTNESS:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_BRIGHTNESS,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_WHITE_BALANCE:
+        return mm_camera_ctrl_set_whitebalance (my_obj, *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_ISO:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_ISO,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_ZOOM:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_ZOOM_ABSOLUTE,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_LUMA_ADAPTATION:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_LUMA_ADAPTATION,
+                                                                            *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_ANTIBANDING:
+        return mm_camera_ctrl_set_antibanding (my_obj, *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_CONTINUOUS_AF:
+        return mm_camera_ctrl_set_auto_focus(my_obj, *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_HJR:
+        return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_HJR, *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_EFFECT:
+        return mm_camera_ctrl_set_specialEffect (my_obj, *((int *)(parm->p_value)));
+    case MM_CAMERA_PARM_FPS:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_FPS, sizeof(uint32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_FPS_MODE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_FPS_MODE, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_EXPOSURE_COMPENSATION:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_EXPOSURE_COMPENSATION, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_LED_MODE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_LED_MODE, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_ROLLOFF:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_ROLLOFF, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_MODE:
+        my_obj->current_mode = *((camera_mode_t *)parm->p_value);
+        break;
+    case MM_CAMERA_PARM_FOCUS_RECT:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_FOCUS_RECT, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_AEC_ROI:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_AF_ROI:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_AF_ROI, sizeof(roi_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_FOCUS_MODE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_AF_MODE, sizeof(int32_t), (void *)parm->p_value);
+#if 0 //to be enabled later: @punits
+    case MM_CAMERA_PARM_AF_MTR_AREA:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_AF_MTR_AREA, sizeof(af_mtr_area_t), (void *)parm->p_value);*/
+    case MM_CAMERA_PARM_AEC_MTR_AREA:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_AEC_MTR_AREA, sizeof(aec_mtr_area_t), (void *)parm->p_value);
+#endif
+    case MM_CAMERA_PARM_CAF_ENABLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_CAF, sizeof(uint32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_BESTSHOT_MODE:
+        CDBG("%s : MM_CAMERA_PARM_BESTSHOT_MODE value : %d",__func__,*((int *)(parm->p_value)));
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_BESTSHOT_MODE, sizeof(int32_t), (void *)parm->p_value);
+        break;
+    case MM_CAMERA_PARM_VIDEO_DIS:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_VIDEO_DIS_PARAMS, sizeof(video_dis_param_ctrl_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_VIDEO_ROT:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_VIDEO_ROT_PARAMS, sizeof(video_rotation_param_ctrl_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_SCE_FACTOR:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_SCE_FACTOR, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_FD:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_FD, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_AEC_LOCK:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_AEC_LOCK, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_AWB_LOCK:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_SET_AWB_LOCK,
+                                                     sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_MCE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_SET_PARM_MCE,
+                                                     sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+                                                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+                                                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_RESET_LENS_TO_INFINITY:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                            CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+                            0, NULL);
+    case MM_CAMERA_PARM_SNAPSHOTDATA:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_SNAPSHOTDATA,
+                                                     sizeof(snapshotData_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_HFR:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_HFR, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_REDEYE_REDUCTION:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_REDEYE_REDUCTION, sizeof(int32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_WAVELET_DENOISE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_WAVELET_DENOISE, sizeof(denoise_param_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_3D_DISPLAY_DISTANCE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_3D_DISPLAY_DISTANCE, sizeof(float), (void *)parm->p_value);
+    case MM_CAMERA_PARM_3D_VIEW_ANGLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_3D_VIEW_ANGLE, sizeof(uint32_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_ZOOM_RATIO:
+        break;
+    case MM_CAMERA_PARM_HISTOGRAM:
+        return mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_HISTOGRAM, sizeof(int8_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_JPEG_ROTATION:
+        if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL){
+           isZSL =1;
+        }
+        mm_jpeg_encoder_setRotation(*((int *)parm->p_value),isZSL);
+        return MM_CAMERA_OK;
+
+    case MM_CAMERA_PARM_ASD_ENABLE:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_ASD_ENABLE, sizeof(uint32_t), (void *)parm->p_value);
+
+    case MM_CAMERA_PARM_RECORDING_HINT:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_RECORDING_HINT, sizeof(uint32_t), (void *)parm->p_value);
+
+    case MM_CAMERA_PARM_PREVIEW_FORMAT:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_PARM_PREVIEW_FORMAT, sizeof(uint32_t), (void *)parm->p_value);
+
+    case MM_CAMERA_PARM_DIS_ENABLE:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_DIS_ENABLE, sizeof(uint32_t), (void *)parm->p_value);
+
+    case MM_CAMERA_PARM_FULL_LIVESHOT: {
+      my_obj->full_liveshot = *((int *)(parm->p_value));
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_FULL_LIVESHOT, sizeof(uint32_t), (void *)parm->p_value);
+    }
+
+    case MM_CAMERA_PARM_LOW_POWER_MODE:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                  CAMERA_SET_LOW_POWER_MODE, sizeof(uint32_t), (void *)parm->p_value);
+
+    case MM_CAMERA_PARM_HDR:
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                        CAMERA_SET_PARM_HDR, sizeof(exp_bracketing_t), (void *)parm->p_value);
+
+    default:
+        CDBG("%s: default: parm %d not supported\n", __func__, parm->parm_type);
+        break;
+    }
+    return rc;
+}
+static int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+                    cam_ctrl_type type, uint32_t length, void *value)
+{
+    int rc = -1;
+    struct msm_ctrl_cmd ctrl_cmd;
+    memset(&ctrl_cmd, 0, sizeof(ctrl_cmd));
+    ctrl_cmd.type = type;
+    ctrl_cmd.length = (uint16_t)length;
+    ctrl_cmd.timeout_ms = 1000;
+    ctrl_cmd.value = value;
+    ctrl_cmd.status = CAM_CTRL_SUCCESS;
+    rc = mm_camera_util_private_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_CTRL_CMD,
+                                                                            (int)&ctrl_cmd);
+    CDBG("%s: type=%d, rc = %d, status = %d\n",
+                __func__, type, rc, ctrl_cmd.status);
+
+    if(rc != MM_CAMERA_OK || ((ctrl_cmd.status != CAM_CTRL_ACCEPTED) &&
+      (ctrl_cmd.status != CAM_CTRL_SUCCESS) &&
+      (ctrl_cmd.status != CAM_CTRL_INVALID_PARM)))
+        rc = -1;
+    return rc;
+}
+
+static int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+  cam_ctrl_type type, uint32_t length, void *value,int timeout)
+{
+    int rc = -1;
+    struct msm_ctrl_cmd ctrl_cmd;
+    memset(&ctrl_cmd, 0, sizeof(ctrl_cmd));
+    ctrl_cmd.type = type;
+    ctrl_cmd.length = (uint16_t)length;
+    ctrl_cmd.timeout_ms = timeout;
+    ctrl_cmd.value = value;
+    ctrl_cmd.status = CAM_CTRL_SUCCESS;
+    rc = mm_camera_util_private_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_CTRL_CMD,
+        (int)&ctrl_cmd);
+    CDBG("%s: type=%d, rc = %d, status = %d\n",
+        __func__, type, rc, ctrl_cmd.status);
+    if(rc != MM_CAMERA_OK || ((ctrl_cmd.status != CAM_CTRL_ACCEPTED) &&
+        (ctrl_cmd.status != CAM_CTRL_SUCCESS) &&
+        (ctrl_cmd.status != CAM_CTRL_INVALID_PARM)))
+        rc = -1;
+    return rc;
+}
+
+int32_t mm_camera_set_parm(mm_camera_obj_t * my_obj,
+    mm_camera_parm_t *parm)
+{
+    int32_t rc = -1;
+    uint16_t len;
+    CDBG("%s type =%d", __func__, parm->parm_type);
+    switch(parm->parm_type) {
+    case MM_CAMERA_PARM_OP_MODE:
+        rc = mm_camera_util_set_op_mode(my_obj,
+                        (mm_camera_op_mode_type_t *)parm->p_value);
+        break;
+    case MM_CAMERA_PARM_DIMENSION:
+        rc = mm_camera_send_native_ctrl_cmd(my_obj,
+                    CAMERA_SET_PARM_DIMENSION, sizeof(cam_ctrl_dimension_t), parm->p_value);
+        if(rc != MM_CAMERA_OK) {
+            CDBG("%s: mm_camera_send_native_ctrl_cmd err=%d\n", __func__, rc);
+            break;
+        }
+        memcpy(&my_obj->dim, (cam_ctrl_dimension_t *)parm->p_value,
+                     sizeof(cam_ctrl_dimension_t));
+        CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,raw_w=%d,raw_h=%d\n",
+                 __func__,
+                 my_obj->dim.display_width,my_obj->dim.display_height,
+                 my_obj->dim.video_width, my_obj->dim.video_height,
+                 my_obj->dim.picture_width,my_obj->dim.picture_height,
+                 my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+                 my_obj->dim.raw_picture_width,my_obj->dim.raw_picture_height);
+        break;
+    case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+        CDBG("%s: Setting snapshot burst number: %d\n", __func__, *((int *)parm->p_value));
+        my_obj->snap_burst_num_by_user = *((int *)parm->p_value);
+        rc = MM_CAMERA_OK;
+        break;
+    case MM_CAMERA_PARM_CH_IMAGE_FMT:
+        {
+            mm_camera_ch_image_fmt_parm_t *fmt;
+            fmt = (mm_camera_ch_image_fmt_parm_t *)parm->p_value;
+            rc = mm_camera_ch_fn(my_obj,    fmt->ch_type,
+                            MM_CAMERA_STATE_EVT_SET_FMT, fmt);
+        }
+        break;
+    default:
+        rc = mm_camera_set_general_parm(my_obj, parm);
+        break;
+    }
+    return rc;
+}
+
+int32_t mm_camera_get_parm(mm_camera_obj_t * my_obj,
+                            mm_camera_parm_t *parm)
+{
+    int32_t rc = MM_CAMERA_OK;
+
+    switch(parm->parm_type) {
+    case MM_CAMERA_PARM_CROP:
+      return rc = mm_camera_ch_fn(my_obj,
+                    ((mm_camera_ch_crop_t *)parm->p_value)->ch_type,
+                    MM_CAMERA_STATE_EVT_GET_CROP, parm->p_value);
+      break;
+    case MM_CAMERA_PARM_DIMENSION:
+        memcpy(parm->p_value, &my_obj->dim, sizeof(my_obj->dim));
+        CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,ovx=%x,ovy=%d,opx=%d,opy=%d, m_fmt=%d, t_ftm=%d\n",
+                 __func__,
+                 my_obj->dim.display_width,my_obj->dim.display_height,
+                 my_obj->dim.video_width,my_obj->dim.video_height,
+                 my_obj->dim.picture_width,my_obj->dim.picture_height,
+                 my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+                 my_obj->dim.orig_video_width,my_obj->dim.orig_video_height,
+                 my_obj->dim.orig_picture_width,my_obj->dim.orig_picture_height,
+                 my_obj->dim.main_img_format, my_obj->dim.thumb_format);
+        break;
+    case MM_CAMERA_PARM_MAX_PICTURE_SIZE: {
+        mm_camera_dimension_t *dim =
+            (mm_camera_dimension_t *)parm->p_value;
+        dim->height = my_obj->properties.max_pict_height;
+        dim->width = my_obj->properties.max_pict_width;
+        CDBG("%s: Max Picture Size: %d X %d\n", __func__,
+             dim->width, dim->height);
+    }
+        break;
+    case MM_CAMERA_PARM_PREVIEW_FORMAT:
+        *((int *)parm->p_value) = my_obj->properties.preview_format;
+        break;
+    case MM_CAMERA_PARM_PREVIEW_SIZES_CNT:
+        *((int *)parm->p_value) = my_obj->properties.preview_sizes_cnt;
+        break;
+    case MM_CAMERA_PARM_VIDEO_SIZES_CNT:
+        *((int *)parm->p_value) = my_obj->properties.video_sizes_cnt;
+        break;
+    case MM_CAMERA_PARM_THUMB_SIZES_CNT:
+        *((int *)parm->p_value) = my_obj->properties.thumb_sizes_cnt;
+        break;
+    case MM_CAMERA_PARM_HFR_SIZES_CNT:
+        *((int *)parm->p_value) = my_obj->properties.hfr_sizes_cnt;
+        break;
+    case MM_CAMERA_PARM_HFR_FRAME_SKIP:
+        *((int *)parm->p_value) = my_obj->properties.hfr_frame_skip;
+        break;
+    case MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH:
+        *((int *)parm->p_value) = my_obj->properties.default_preview_width;
+        break;
+    case MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT:
+        *((int *)parm->p_value) = my_obj->properties.default_preview_height;
+        break;
+    case MM_CAMERA_PARM_BESTSHOT_RECONFIGURE:
+        *((int *)parm->p_value) = my_obj->properties.bestshot_reconfigure;
+        break;
+    case MM_CAMERA_PARM_MAX_PREVIEW_SIZE: {
+        mm_camera_dimension_t *dim =
+            (mm_camera_dimension_t *)parm->p_value;
+        dim->height = my_obj->properties.max_preview_height;
+        dim->width = my_obj->properties.max_preview_width;
+        CDBG("%s: Max Preview Size: %d X %d\n", __func__,
+             dim->width, dim->height);
+    }
+        break;
+    case MM_CAMERA_PARM_MAX_VIDEO_SIZE: {
+        mm_camera_dimension_t *dim =
+            (mm_camera_dimension_t *)parm->p_value;
+        dim->height = my_obj->properties.max_video_height;
+        dim->width = my_obj->properties.max_video_width;
+        CDBG("%s: Max Video Size: %d X %d\n", __func__,
+             dim->width, dim->height);
+    }
+        break;
+    case MM_CAMERA_PARM_MAX_HFR_MODE:
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_MAX_HFR_MODE,
+                sizeof(camera_hfr_mode_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_FOCAL_LENGTH:
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_FOCAL_LENGTH,
+                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+                                                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+                                                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_FOCUS_DISTANCES:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_FOCUS_DISTANCES,
+                     sizeof(focus_distances_info_t), (void *)parm->p_value);
+  case MM_CAMERA_PARM_QUERY_FALSH4SNAP:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+                     sizeof(int), (void *)parm->p_value);
+  case MM_CAMERA_PARM_3D_FRAME_FORMAT:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_3D_FRAME_FORMAT,
+                     sizeof(camera_3d_frame_t), (void *)parm->p_value);
+    case MM_CAMERA_PARM_MAXZOOM:
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_MAXZOOM,
+                     sizeof(int), (void *)parm->p_value);
+    case MM_CAMERA_PARM_ZOOM_RATIO: {
+        mm_camera_zoom_tbl_t *tbl = (mm_camera_zoom_tbl_t *)parm->p_value;
+        return mm_camera_send_native_ctrl_cmd(my_obj,   CAMERA_GET_PARM_ZOOMRATIOS,
+                     sizeof(int16_t)*tbl->size, tbl->zoom_ratio_tbl);
+    }
+    case MM_CAMERA_PARM_DEF_PREVIEW_SIZES: {
+        default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+                     sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+    }
+    case MM_CAMERA_PARM_DEF_VIDEO_SIZES: {
+        default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+                     sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+    }
+    case MM_CAMERA_PARM_DEF_THUMB_SIZES: {
+        default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_THUMB_SIZES,
+                     sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+    }
+    case MM_CAMERA_PARM_DEF_HFR_SIZES:{
+        default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+        return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_HFR_SIZES,
+                     sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+    }
+    case MM_CAMERA_PARM_OP_MODE:
+        *((mm_camera_op_mode_type_t *)parm->p_value) = my_obj->op_mode;
+        break;
+    case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+        *((int *)parm->p_value) = my_obj->snap_burst_num_by_user;
+        break;
+    case MM_CAMERA_PARM_VFE_OUTPUT_ENABLE:
+        *((int *)parm->p_value) = my_obj->properties.vfe_output_enable;
+        break;
+    default:
+        /* needs to add more implementation */
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+int32_t mm_camera_request_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    rc = mm_camera_ch_fn(my_obj,    buf->ch_type,
+                    MM_CAMERA_STATE_EVT_REQUEST_BUF, (void *)&buf->preview);
+    return rc;
+}
+
+int32_t mm_camera_enqueue_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    rc = mm_camera_ch_fn(my_obj,    buf->ch_type,
+                    MM_CAMERA_STATE_EVT_ENQUEUE_BUF, (void *)&buf->preview);
+    return rc;
+}
+
+int32_t mm_camera_prepare_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    rc = mm_camera_ch_fn(my_obj,    buf->ch_type,
+                    MM_CAMERA_STATE_EVT_REG_BUF, (void *)&buf->preview);
+    return rc;
+}
+int32_t mm_camera_unprepare_buf(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    pthread_mutex_lock(&my_obj->ch[ch_type].mutex);
+    rc = mm_camera_ch_fn(my_obj, ch_type,
+                    MM_CAMERA_STATE_EVT_UNREG_BUF, NULL);
+    pthread_mutex_unlock(&my_obj->ch[ch_type].mutex);
+    return rc;
+}
+
+static int mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                             mm_camera_event_type_t evt_type, int reg_count)
+{
+    int rc = MM_CAMERA_OK;
+    struct v4l2_event_subscription sub;
+
+    memset(&sub, 0, sizeof(sub));
+    sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_EVENT;
+    if(reg_count == 0) {
+        /* unsubscribe */
+        if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+            rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+            CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+            sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+            rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+            CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+        }
+        my_obj->evt_type_mask &= ~(1 << evt_type);
+        if(my_obj->evt_type_mask == 0) {
+            /* kill the polling thraed when unreg the last event */
+            mm_camera_poll_thread_release(my_obj, MM_CAMERA_CH_MAX);
+        }
+    } else {
+        if(!my_obj->evt_type_mask) {
+            /* this is the first reg event */
+            rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+            CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+            if (rc < 0)
+                goto end;
+            sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+            rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+            CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+            if (rc < 0)
+                goto end;
+        }
+        my_obj->evt_type_mask |= (1 << evt_type);
+        if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+            /* launch event polling when subscribe the first event */
+            rc = mm_camera_poll_thread_launch(my_obj, MM_CAMERA_CH_MAX);
+        }
+    }
+end:
+    return rc;
+}
+
+int mm_camera_reg_event(mm_camera_obj_t * my_obj, mm_camera_event_notify_t evt_cb,
+                           void *user_data, mm_camera_event_type_t evt_type)
+{
+    int i;
+    int rc = -1;
+    mm_camera_evt_obj_t *evt_array = &my_obj->evt[evt_type];
+    if(evt_cb) {
+        /* this is reg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == NULL) {
+                evt_array->evt[i].evt_cb = evt_cb;
+                evt_array->evt[i].user_data = user_data;
+                evt_array->reg_count++;
+                rc = MM_CAMERA_OK;
+                break;
+            }
+        }
+    } else {
+        /* this is unreg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == user_data) {
+                evt_array->evt[i].evt_cb = NULL;
+                evt_array->evt[i].user_data = NULL;
+                evt_array->reg_count--;
+                rc = MM_CAMERA_OK;
+                break;
+            }
+        }
+    }
+    if(rc == MM_CAMERA_OK && evt_array->reg_count <= 1) {
+        /* subscribe/unsubscribe event to kernel */
+        rc = mm_camera_evt_sub(my_obj, evt_type, evt_array->reg_count);
+    }
+    return rc;
+}
+
+
+static int32_t mm_camera_send_af_failed_event(mm_camera_obj_t *my_obj)
+{
+    int rc = 0;
+    mm_camera_event_t event;
+    event.event_type = MM_CAMERA_EVT_TYPE_CTRL;
+    event.e.ctrl.evt= MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE;
+    event.e.ctrl.status=CAM_CTRL_FAILED;
+    CDBG_HIGH("%s: Issuing call",__func__);
+    rc = mm_camera_poll_send_ch_event(my_obj, &event);
+    return rc;
+}
+
+static int32_t mm_camera_send_ch_on_off_event(mm_camera_obj_t *my_obj,
+                                       mm_camera_channel_type_t ch_type,
+                                       mm_camera_ch_event_type_t evt)
+{
+    int rc = 0;
+    mm_camera_event_t event;
+    event.event_type = MM_CAMERA_EVT_TYPE_CH;
+    event.e.ch.evt = evt;
+    event.e.ch.ch = ch_type;
+    CDBG("%s: stream on event, type=0x%x, ch=%d, evt=%d",
+         __func__, event.event_type, event.e.ch.ch, event.e.ch.evt);
+    rc = mm_camera_poll_send_ch_event(my_obj, &event);
+    return rc;
+}
+
+int32_t mm_camera_action_start(mm_camera_obj_t *my_obj,
+                            mm_camera_ops_type_t opcode, void *parm)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    int send_on_off_evt = 1;
+    mm_camera_channel_type_t ch_type;
+    switch(opcode) {
+    case MM_CAMERA_OPS_FOCUS: {
+        if(!parm) return rc;
+        if(0 > mm_camera_send_native_ctrl_cmd(my_obj,
+          CAMERA_SET_PARM_AUTO_FOCUS,
+          sizeof(isp3a_af_mode_t), parm))
+          mm_camera_send_af_failed_event(my_obj);
+        return MM_CAMERA_OK;
+    }
+    case MM_CAMERA_OPS_GET_BUFFERED_FRAME: {
+        mm_camera_ops_parm_get_buffered_frame_t *tmp =
+            (mm_camera_ops_parm_get_buffered_frame_t *)parm;
+        rc = mm_camera_ch_fn(my_obj, tmp->ch_type,
+                 MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME, NULL);
+        return rc;
+    }
+    default:
+        break;
+    }
+    ch_type = mm_camera_util_opcode_2_ch_type(my_obj, opcode);
+    CDBG("%s:ch=%d,op_mode=%d,opcode=%d\n",
+        __func__,ch_type,my_obj->op_mode,opcode);
+    switch(my_obj->op_mode) {
+    case MM_CAMERA_OP_MODE_ZSL:
+    case MM_CAMERA_OP_MODE_CAPTURE:
+        switch(opcode) {
+        case MM_CAMERA_OPS_PREVIEW:
+        case MM_CAMERA_OPS_SNAPSHOT:
+        case MM_CAMERA_OPS_ZSL:
+        case MM_CAMERA_OPS_RAW:
+            rc = mm_camera_ch_fn(my_obj, ch_type,
+                    MM_CAMERA_STATE_EVT_STREAM_ON, NULL);
+            break;
+        default:
+            break;
+        }
+        break;
+    case MM_CAMERA_OP_MODE_VIDEO:
+        switch(opcode) {
+        case MM_CAMERA_OPS_PREVIEW:
+        case MM_CAMERA_OPS_VIDEO:
+        case MM_CAMERA_OPS_SNAPSHOT:
+            rc = mm_camera_ch_fn(my_obj,    ch_type,
+                    MM_CAMERA_STATE_EVT_STREAM_ON, NULL);
+            CDBG("%s: op_mode=%d, ch %d, rc=%d\n",
+                __func__, MM_CAMERA_OP_MODE_VIDEO, ch_type ,rc);
+            break;
+        case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+            send_on_off_evt = 0;
+            rc = mm_camera_send_native_ctrl_timeout_cmd(my_obj,CAMERA_PREPARE_SNAPSHOT, 0, NULL, 2000);
+            CDBG("%s: prepare snapshot done opcode = %d, rc= %d\n", __func__, opcode, rc);
+            break;
+        default:
+            break;
+        }
+        break;
+    default:
+        break;
+    }
+    CDBG("%s: ch=%d,op_mode=%d,opcode=%d\n", __func__, ch_type,
+      my_obj->op_mode, opcode);
+    if(send_on_off_evt)
+      rc = mm_camera_send_ch_on_off_event(my_obj,ch_type,MM_CAMERA_CH_EVT_STREAMING_ON);
+    return rc;
+}
+
+int32_t mm_camera_action_stop(mm_camera_obj_t *my_obj,
+    mm_camera_ops_type_t opcode, void *parm)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    mm_camera_channel_type_t ch_type;
+
+    if(opcode == MM_CAMERA_OPS_FOCUS) {
+      return mm_camera_send_native_ctrl_cmd(my_obj,
+                                            CAMERA_AUTO_FOCUS_CANCEL, 0, NULL);
+    }
+
+    ch_type = mm_camera_util_opcode_2_ch_type(my_obj, opcode);
+    switch(my_obj->op_mode) {
+    case MM_CAMERA_OP_MODE_ZSL:
+    case MM_CAMERA_OP_MODE_CAPTURE:
+        switch(opcode) {
+        case MM_CAMERA_OPS_PREVIEW:
+        case MM_CAMERA_OPS_SNAPSHOT:
+        case MM_CAMERA_OPS_ZSL:
+        case MM_CAMERA_OPS_RAW:
+            rc = mm_camera_ch_fn(my_obj, ch_type,
+                            MM_CAMERA_STATE_EVT_STREAM_OFF, NULL);
+            CDBG("%s:CAPTURE mode STREAMOFF rc=%d\n",__func__, rc);
+            break;
+        default:
+            break;
+        }
+        break;
+    case MM_CAMERA_OP_MODE_VIDEO:
+        switch(opcode) {
+        case MM_CAMERA_OPS_PREVIEW:
+        case MM_CAMERA_OPS_VIDEO:
+        case MM_CAMERA_OPS_SNAPSHOT:
+            rc = mm_camera_ch_fn(my_obj , ch_type,
+                            MM_CAMERA_STATE_EVT_STREAM_OFF, NULL);
+            CDBG("%s:VIDEO mode STREAMOFF rc=%d\n",__func__, rc);
+            break;
+        default:
+            break;
+        }
+        break;
+    default:
+        break;
+    }
+    CDBG("%s:ch=%d\n",__func__, ch_type);
+    rc = mm_camera_send_ch_on_off_event(my_obj,ch_type,MM_CAMERA_CH_EVT_STREAMING_OFF);
+    return rc;
+}
+
+static void mm_camera_init_ch_stream_count(mm_camera_obj_t *my_obj)
+{
+    int i;
+
+    for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+        if(i == MM_CAMERA_CH_SNAPSHOT) {
+            my_obj->ch_stream_count[i].stream_on_count_cfg = 2;
+            my_obj->ch_stream_count[i].stream_off_count_cfg = 2;
+        } else {
+            my_obj->ch_stream_count[i].stream_on_count_cfg = 1;
+            my_obj->ch_stream_count[i].stream_off_count_cfg = 1;
+        }
+    }
+}
+
+int32_t mm_camera_open(mm_camera_obj_t *my_obj,
+                    mm_camera_op_mode_type_t op_mode)
+{
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+    int32_t rc = MM_CAMERA_OK;
+    int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+    uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+    uint8_t i;
+
+	CDBG("%s:  begin\n", __func__);
+
+    if(my_obj->op_mode != MM_CAMERA_OP_MODE_NOTUSED) {
+        CDBG("%s: not allowed in existing op mode %d\n",
+                 __func__, my_obj->op_mode);
+        return -MM_CAMERA_E_INVALID_OPERATION;
+    }
+    if(op_mode >= MM_CAMERA_OP_MODE_MAX) {
+        CDBG("%s: invalid input %d\n",
+                 __func__, op_mode);
+        return -MM_CAMERA_E_INVALID_INPUT;
+    }
+    snprintf(dev_name, sizeof(dev_name), "/dev/%s", mm_camera_util_get_dev_name(my_obj));
+    //rc = mm_camera_dev_open(&my_obj->ctrl_fd, dev_name);
+	CDBG("%s: mm_camera_dev_open rc = %d\n", __func__, rc);
+
+    do{
+        n_try--;
+        my_obj->ctrl_fd = open(dev_name,O_RDWR | O_NONBLOCK);
+		ALOGE("%s:  ctrl_fd = %d", __func__, my_obj->ctrl_fd);
+        ALOGE("Errno:%d",errno);
+        if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
+			ALOGE("%s:  opened, break out while loop", __func__);
+
+            break;
+		}
+        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__,sleep_msec);
+        usleep(sleep_msec*1000);
+    }while(n_try>0);
+
+	ALOGE("%s:  after while loop", __func__);
+    if (my_obj->ctrl_fd <= 0) {
+        CDBG("%s: cannot open control fd of '%s' Errno = %d\n",
+                 __func__, mm_camera_util_get_dev_name(my_obj),errno);
+        return -MM_CAMERA_E_GENERAL;
+    }
+	ALOGE("%s:  2\n", __func__);
+
+    /* open domain socket*/
+    n_try=MM_CAMERA_DEV_OPEN_TRIES;
+    do{
+        n_try--;
+        my_obj->ds_fd = mm_camera_socket_create(my_obj->my_id, MM_CAMERA_SOCK_TYPE_UDP); // TODO: UDP for now, change to TCP
+        ALOGE("%s:  ds_fd = %d", __func__, my_obj->ds_fd);
+        ALOGE("Errno:%d",errno);
+        if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
+            ALOGE("%s:  opened, break out while loop", __func__);
+            break;
+        }
+        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__,sleep_msec);
+        usleep(sleep_msec*1000);
+    }while(n_try>0);
+
+    ALOGE("%s:  after while loop for domain socket open", __func__);
+    if (my_obj->ds_fd <= 0) {
+        CDBG_ERROR("%s: cannot open domain socket fd of '%s' Errno = %d\n",
+                 __func__, mm_camera_util_get_dev_name(my_obj),errno);
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = -1;
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    /* set ctrl_fd to be the mem_mapping fd */
+    rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+                        MSM_V4L2_PID_MMAP_INST, 0);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("error: ioctl VIDIOC_S_CTRL MSM_V4L2_PID_MMAP_INST failed: %s\n",
+        strerror(errno));
+        close(my_obj->ctrl_fd);
+        close(my_obj->ds_fd);
+        my_obj->ctrl_fd = -1;
+        my_obj->ds_fd = -1;
+        return -MM_CAMERA_E_GENERAL;
+    }
+    if(op_mode != MM_CAMERA_OP_MODE_NOTUSED)
+        rc =  mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+                            MSM_V4L2_PID_CAM_MODE, op_mode);
+    if(!rc) {
+        my_obj->op_mode = op_mode;
+        my_obj->current_mode = CAMERA_MODE_2D; /* set geo mode to 2D by default */
+    }
+
+    /* get camera capabilities */
+    memset(&my_obj->properties, 0, sizeof(cam_prop_t));
+    rc = mm_camera_send_native_ctrl_cmd(my_obj,
+                                        CAMERA_GET_CAPABILITIES,
+                                        sizeof(cam_prop_t),
+                                        (void *)& my_obj->properties);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: cannot get camera capabilities\n", __func__);
+        close(my_obj->ctrl_fd);
+        close(my_obj->ds_fd);
+        my_obj->ctrl_fd = -1;
+        my_obj->ds_fd = -1;
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    mm_camera_poll_threads_init(my_obj);
+    mm_camera_init_ch_stream_count(my_obj);
+    CDBG("%s : Launch Threads in Cam Open",__func__);
+    for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+        mm_camera_poll_thread_launch(my_obj,(mm_camera_channel_type_t)i);
+    }
+    CDBG("%s: '%s', ctrl_fd=%d,op_mode=%d,rc=%d\n",
+             __func__, dev_name, my_obj->ctrl_fd, my_obj->op_mode, rc);
+    return rc;
+}
+
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+    int i, rc = 0;
+
+    for(i = 0; i < MM_CAMERA_CH_MAX; i++){
+        mm_camera_ch_fn(my_obj, (mm_camera_channel_type_t)i,
+                                MM_CAMERA_STATE_EVT_RELEASE, NULL);
+    }
+
+    CDBG("%s : Close Threads in Cam Close",__func__);
+    for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+        mm_camera_poll_thread_release(my_obj,(mm_camera_channel_type_t)i);
+    }
+    mm_camera_poll_threads_deinit(my_obj);
+    my_obj->op_mode = MM_CAMERA_OP_MODE_NOTUSED;
+    if(my_obj->ctrl_fd > 0) {
+        rc = close(my_obj->ctrl_fd);
+        if(rc < 0) {
+            /* this is a dead end. */
+            CDBG("%s: !!!!FATAL ERROR!!!! ctrl_fd = %d, rc = %d",
+                 __func__, my_obj->ctrl_fd, rc);
+        }
+        my_obj->ctrl_fd = 0;
+    }
+    if(my_obj->ds_fd > 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+        my_obj->ds_fd = 0;
+    }
+    return MM_CAMERA_OK;
+}
+
+int32_t mm_camera_action(mm_camera_obj_t *my_obj, uint8_t start,
+                        mm_camera_ops_type_t opcode, void *parm)
+{
+    int32_t rc = - MM_CAMERA_E_INVALID_OPERATION;
+
+    if(start)   rc = mm_camera_action_start(my_obj, opcode, parm);
+    else rc = mm_camera_action_stop(my_obj, opcode, parm);
+    CDBG("%s:start_flag=%d,opcode=%d,parm=%p,rc=%d\n",__func__,start,opcode,parm, rc);
+    return rc;
+}
+
+int32_t mm_camera_ch_acquire(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type)
+{
+    return mm_camera_ch_fn(my_obj,ch_type, MM_CAMERA_STATE_EVT_ACQUIRE, 0);
+}
+
+void mm_camera_ch_release(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type)
+{
+    mm_camera_ch_fn(my_obj,ch_type, MM_CAMERA_STATE_EVT_RELEASE, 0);
+}
+
+int32_t mm_camera_sendmsg(mm_camera_obj_t *my_obj, void *msg, uint32_t buf_size, int sendfd)
+{
+    return mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd);
+}
diff --git a/camera/mm-camera-interface/mm_camera.h b/camera/mm-camera-interface/mm_camera.h
new file mode 100644
index 0000000..e6031e1
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera.h
@@ -0,0 +1,352 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+typedef enum {
+    MM_CAMERA_STREAM_STATE_NOTUSED,     /* not used */
+    MM_CAMERA_STREAM_STATE_ACQUIRED,    /* acquired, fd opened  */
+    MM_CAMERA_STREAM_STATE_CFG,             /* fmt & dim configured */
+    MM_CAMERA_STREAM_STATE_REG,             /* buf regged, stream off */
+    MM_CAMERA_STREAM_STATE_ACTIVE,      /* stream on */
+    MM_CAMERA_STREAM_STATE_MAX
+} mm_camera_stream_state_type_t;
+
+typedef enum {
+    MM_CAMERA_STATE_EVT_NOTUSED,
+    MM_CAMERA_STATE_EVT_ACQUIRE,
+    MM_CAMERA_STATE_EVT_ATTR,
+    MM_CAMERA_STATE_EVT_RELEASE,
+    MM_CAMERA_STATE_EVT_REG_BUF_CB,
+    MM_CAMERA_STATE_EVT_SET_FMT,
+    MM_CAMERA_STATE_EVT_SET_DIM,
+    MM_CAMERA_STATE_EVT_REG_BUF, // request amount of buffers and enqueue all buffers to kernel
+    MM_CAMERA_STATE_EVT_UNREG_BUF,
+    MM_CAMERA_STATE_EVT_STREAM_ON,
+    MM_CAMERA_STATE_EVT_STREAM_OFF,
+    MM_CAMERA_STATE_EVT_QBUF,
+    MM_CAMERA_STATE_EVT_GET_CROP,
+    MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME,
+    MM_CAMERA_STATE_EVT_REQUEST_BUF, // request amount of buffers to kernel only
+    MM_CAMERA_STATE_EVT_ENQUEUE_BUF, // enqueue some of buffers to kernel only
+    MM_CAMERA_STATE_EVT_MAX
+} mm_camera_state_evt_type_t;
+
+typedef struct {
+    mm_camera_event_notify_t evt_cb;
+    void * user_data;
+} mm_camera_notify_cb_t;
+
+typedef enum {
+    MM_CAMERA_BUF_CB_ONCE,
+    MM_CAMERA_BUF_CB_COUNT,
+    MM_CAMERA_BUF_CB_INFINITE
+} mm_camera_buf_cb_type_t;
+
+typedef struct {
+    mm_camera_buf_notify_t cb;
+    mm_camera_buf_cb_type_t cb_type;
+    uint32_t cb_count;
+    void *user_data;
+} mm_camera_buf_cb_t;
+
+typedef enum {
+    MM_CAMERA_STREAM_PIPE,
+    MM_CAMERA_STREAM_PREVIEW,
+    MM_CAMERA_STREAM_VIDEO,
+    MM_CAMERA_STREAM_SNAPSHOT,
+    MM_CAMERA_STREAM_THUMBNAIL,
+    MM_CAMERA_STREAM_RAW,
+    MM_CAMERA_STREAM_VIDEO_MAIN,
+    MM_CAMERA_STREAM_MAX
+} mm_camera_stream_type_t;
+
+typedef struct mm_camera_frame_t mm_camera_frame_t;
+struct mm_camera_frame_t{
+    struct msm_frame frame;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    uint8_t num_planes;
+    int idx;
+    int match;
+    int valid_entry;
+    mm_camera_frame_t *next;
+};
+
+typedef struct {
+    pthread_mutex_t mutex;
+    int cnt;
+	int match_cnt;
+    mm_camera_frame_t *head;
+    mm_camera_frame_t *tail;
+} mm_camera_frame_queue_t;
+
+typedef struct {
+    mm_camera_frame_queue_t readyq;
+    int32_t num_frame;
+    uint32_t frame_len;
+    int8_t reg_flag;
+    uint32_t frame_offset[MM_CAMERA_MAX_NUM_FRAMES];
+    mm_camera_frame_t frame[MM_CAMERA_MAX_NUM_FRAMES];
+    int8_t ref_count[MM_CAMERA_MAX_NUM_FRAMES];
+    int32_t use_multi_fd;
+    int qbuf;
+    pthread_mutex_t mutex;
+} mm_camera_stream_frame_t;
+
+typedef struct {
+    int32_t fd;
+    mm_camera_stream_state_type_t state;
+    mm_camera_stream_type_t stream_type;
+    struct v4l2_format fmt;
+    cam_format_t cam_fmt;
+    mm_camera_stream_frame_t frame;
+} mm_camera_stream_t;
+
+typedef struct {
+    mm_camera_stream_t stream;
+    mm_camera_raw_streaming_type_t mode;
+} mm_camera_ch_raw_t;
+
+typedef struct {
+    mm_camera_stream_t stream;
+} mm_camera_ch_preview_t;
+
+typedef struct {
+    mm_camera_stream_t thumbnail;
+    mm_camera_stream_t main;
+    int    delivered_cnt;
+    int8_t pending_cnt;
+    uint32_t expected_matching_id;
+} mm_camera_ch_snapshot_t;
+
+typedef struct {
+    int8_t fifo[MM_CAMERA_MAX_FRAME_NUM];
+    int8_t low;
+    int8_t high;
+    int8_t len;
+    int8_t water_mark;
+} mm_camera_circule_fifo_t;
+
+typedef struct {
+    mm_camera_stream_t video;
+    mm_camera_stream_t main;
+    uint8_t has_main;
+} mm_camera_ch_video_t;
+
+#define MM_CAMERA_BUF_CB_MAX 4
+typedef struct {
+    mm_camera_channel_type_t type;
+    pthread_mutex_t mutex;
+    uint8_t acquired;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_channel_attr_buffering_frame_t buffering_frame;
+    union {
+        mm_camera_ch_raw_t raw;
+        mm_camera_ch_preview_t preview;
+        mm_camera_ch_snapshot_t snapshot;
+        mm_camera_ch_video_t video;
+    };
+} mm_camera_ch_t;
+
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+typedef struct {
+    mm_camera_event_notify_t evt_cb;
+    void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+    mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+    int reg_count;
+} mm_camera_evt_obj_t;
+
+#define MM_CAMERA_CH_STREAM_MAX 2
+typedef enum {
+    MM_CAMERA_POLL_TYPE_EVT,
+    MM_CAMERA_POLL_TYPE_CH,
+    MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+typedef struct {
+    mm_camera_poll_thread_type_t poll_type;
+    int32_t pfds[2];
+    int poll_fd[MM_CAMERA_CH_STREAM_MAX+1];
+    int num_fds;
+    int used;
+    pthread_t pid;
+    int32_t state;
+    int timeoutms;
+    void *my_obj;
+    mm_camera_channel_type_t ch_type;
+    mm_camera_stream_t *poll_streams[MM_CAMERA_CH_STREAM_MAX];
+    uint32_t cmd;
+} mm_camera_poll_thread_data_t;
+
+typedef struct {
+    pthread_mutex_t mutex;
+    pthread_cond_t cond_v;
+    int32_t status;
+    mm_camera_poll_thread_data_t data;
+} mm_camera_poll_thread_t;
+
+typedef struct {
+    int stream_on_count_cfg;
+    int stream_off_count_cfg;
+    int stream_on_count;
+    int stream_off_count;
+} mm_camera_ch_stream_count_t;
+#define MM_CAMERA_POLL_THRAED_MAX (MM_CAMERA_CH_MAX+1)
+
+typedef struct {
+  struct msm_mem_map_info cookie;
+  uint32_t vaddr;
+} mm_camera_mem_map_entry_t;
+
+#define MM_CAMERA_MEM_MAP_MAX 8
+typedef struct {
+  int num;
+  mm_camera_mem_map_entry_t entry[MM_CAMERA_MEM_MAP_MAX];
+} mm_camera_mem_map_t;
+
+typedef struct {
+    int8_t my_id;
+    camera_mode_t current_mode;
+    mm_camera_op_mode_type_t op_mode;
+    mm_camera_notify_cb_t *notify;
+    mm_camera_ch_t ch[MM_CAMERA_CH_MAX];
+    int ref_count;
+    uint32_t ch_streaming_mask;
+    int32_t ctrl_fd;
+    int32_t ds_fd; // domain socket fd
+    cam_ctrl_dimension_t dim;
+    cam_prop_t properties;
+    pthread_mutex_t mutex;
+    mm_camera_evt_obj_t evt[MM_CAMERA_EVT_TYPE_MAX];
+    mm_camera_ch_stream_count_t ch_stream_count[MM_CAMERA_CH_MAX];
+    uint32_t evt_type_mask;
+    mm_camera_poll_thread_t poll_threads[MM_CAMERA_POLL_THRAED_MAX];
+    mm_camera_mem_map_t hist_mem_map;
+    int full_liveshot;
+    int snap_burst_num_by_user;
+} mm_camera_obj_t;
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 2
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+
+typedef struct {
+    mm_camera_t camera[MSM_MAX_CAMERA_SENSORS];
+    int8_t num_cam;
+    char video_dev_name[MSM_MAX_CAMERA_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+    mm_camera_obj_t *cam_obj[MSM_MAX_CAMERA_SENSORS];
+} mm_camera_ctrl_t;
+
+typedef struct {
+    mm_camera_parm_type_t parm_type;
+     void *p_value;
+} mm_camera_parm_t;
+
+extern int32_t mm_camera_stream_fsm_fn_vtbl (mm_camera_obj_t * my_obj,
+                                            mm_camera_stream_t *stream,
+                                            mm_camera_state_evt_type_t evt, void *val);
+extern const char *mm_camera_util_get_dev_name(mm_camera_obj_t * my_obj);
+extern int32_t mm_camera_util_s_ctrl( int32_t fd,
+                                            uint32_t id, int32_t value);
+extern int32_t mm_camera_util_private_s_ctrl( int32_t fd,
+                                            uint32_t id, int32_t value);
+extern int32_t mm_camera_util_g_ctrl( int32_t fd,
+                                            uint32_t id, int32_t *value);
+extern int32_t mm_camera_ch_fn(mm_camera_obj_t * my_obj,
+                                            mm_camera_channel_type_t ch_type,
+                                            mm_camera_state_evt_type_t evt, void *val);
+extern int32_t mm_camera_action(mm_camera_obj_t *my_obj, uint8_t start,
+                                            mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj,
+                                            mm_camera_op_mode_type_t op_mode);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_start(mm_camera_obj_t *my_obj,
+                                            mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_stop(mm_camera_obj_t *my_obj,
+                                            mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_get_parm(mm_camera_obj_t * my_obj,
+                                            mm_camera_parm_t *parm);
+extern int32_t mm_camera_set_parm(mm_camera_obj_t * my_obj,
+                                            mm_camera_parm_t *parm);
+extern int32_t mm_camera_request_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_enqueue_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_prepare_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_unprepare_buf(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type);
+extern int mm_camera_poll_thread_launch(mm_camera_obj_t * my_obj, int ch_type);
+
+int mm_camera_poll_thread_del_ch(mm_camera_obj_t * my_obj, int ch_type);
+int mm_camera_poll_thread_add_ch(mm_camera_obj_t * my_obj, int ch_type);
+extern int32_t mm_camera_poll_dispatch_buffered_frames(mm_camera_obj_t * my_obj, int ch_type);
+extern int mm_camera_poll_thread_release(mm_camera_obj_t * my_obj, int ch_type);
+extern void mm_camera_poll_threads_init(mm_camera_obj_t * my_obj);
+extern void mm_camera_poll_threads_deinit(mm_camera_obj_t * my_obj);
+extern int mm_camera_poll_busy(mm_camera_obj_t * my_obj);
+extern void mm_camera_msm_data_notify(mm_camera_obj_t * my_obj, int fd,
+                                            mm_camera_stream_type_t stream_type);
+extern void mm_camera_msm_evt_notify(mm_camera_obj_t * my_obj, int fd);
+extern int mm_camera_read_msm_frame(mm_camera_obj_t * my_obj,
+                        mm_camera_stream_t *stream);
+extern int32_t mm_camera_ch_acquire(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_ch_release(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern int mm_camera_ch_is_active(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_ch_util_get_stream_objs(mm_camera_obj_t * my_obj,
+                                                            mm_camera_channel_type_t ch_type,
+                                                            mm_camera_stream_t **stream1,
+                                                            mm_camera_stream_t **stream2);
+extern int mm_camera_stream_qbuf(mm_camera_obj_t * my_obj,
+                                                            mm_camera_stream_t *stream,
+                                                            int idx);
+extern int mm_camera_stream_frame_get_q_cnt(mm_camera_frame_queue_t *q);
+extern mm_camera_frame_t *mm_camera_stream_frame_deq(mm_camera_frame_queue_t *q);
+extern mm_camera_frame_t *mm_camera_stream_frame_deq_no_lock(mm_camera_frame_queue_t *q);
+extern void mm_camera_stream_frame_enq(mm_camera_frame_queue_t *q, mm_camera_frame_t *node);
+extern void mm_camera_stream_frame_enq_no_lock(mm_camera_frame_queue_t *q, mm_camera_frame_t *node);
+extern void mm_camera_stream_frame_refill_q(mm_camera_frame_queue_t *q, mm_camera_frame_t *node, int num);
+extern int mm_camera_stream_is_active(mm_camera_stream_t *stream);
+extern int32_t mm_camera_stream_util_buf_done(mm_camera_obj_t * my_obj,
+                    mm_camera_stream_t *stream,
+                    mm_camera_notify_frame_t *frame);
+//extern int mm_camera_poll_add_stream(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream);
+//extern int mm_camera_poll_del_stream(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream);
+extern int mm_camera_dev_open(int *fd, char *dev_name);
+extern int mm_camera_reg_event(mm_camera_obj_t * my_obj, mm_camera_event_notify_t evt_cb,
+                           void *user_data, uint32_t evt_type);
+extern int mm_camera_poll_send_ch_event(mm_camera_obj_t * my_obj, mm_camera_event_t *event);
+extern void mm_camera_msm_proc_ch_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event);
+extern void mm_camera_dispatch_app_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event);
+extern void mm_camera_dispatch_buffered_frames(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_check_pending_zsl_frames(mm_camera_obj_t *my_obj,
+                                        mm_camera_channel_type_t ch_type);
+extern int mm_camera_ch_util_get_num_stream(mm_camera_obj_t * my_obj,mm_camera_channel_type_t ch_type);
+extern int32_t mm_camera_sendmsg(mm_camera_obj_t *my_obj, void *msg, uint32_t buf_size, int sendfd);
+#endif /* __MM_CAMERA_H__ */
diff --git a/camera/mm-camera-interface/mm_camera_channel.c b/camera/mm-camera-interface/mm_camera_channel.c
new file mode 100644
index 0000000..93d5389
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_channel.c
@@ -0,0 +1,761 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#if 0
+#undef CDBG
+#define CDBG ALOGE
+#endif
+/* static functions prototype declarations */
+static int mm_camera_channel_skip_frames(mm_camera_obj_t *my_obj,
+                                          mm_camera_frame_queue_t *mq,
+                                          mm_camera_frame_queue_t *sq,
+                                          mm_camera_stream_t *mstream,
+                                          mm_camera_stream_t *sstream,
+                                          mm_camera_channel_attr_buffering_frame_t *frame_attr);
+static int mm_camera_channel_get_starting_frame(mm_camera_obj_t *my_obj,
+                                                mm_camera_ch_t *ch,
+                                                mm_camera_stream_t *mstream,
+                                                mm_camera_stream_t *sstream,
+                                                mm_camera_frame_queue_t *mq,
+                                                mm_camera_frame_queue_t *sq,
+                                                mm_camera_frame_t **mframe,
+                                                mm_camera_frame_t **sframe);
+static int mm_camera_ch_search_frame_based_on_time(mm_camera_obj_t *my_obj,
+                                                   mm_camera_ch_t *ch,
+                                                   mm_camera_stream_t *mstream,
+                                                   mm_camera_stream_t *sstream,
+                                                   mm_camera_frame_queue_t *mq,
+                                                   mm_camera_frame_queue_t *sq,
+                                                   mm_camera_frame_t **mframe,
+                                                   mm_camera_frame_t **sframe);
+
+
+
+int mm_camera_ch_util_get_num_stream(mm_camera_obj_t * my_obj,mm_camera_channel_type_t ch_type)
+{
+    int num = 0;
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        num =  1;
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        num =  1;
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        num =  1;
+        if(my_obj->ch[ch_type].video.has_main) {
+            num +=  1;
+        }
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        num =  2;
+        break;
+    default:
+        break;
+    }
+    return num;
+}
+
+void mm_camera_ch_util_get_stream_objs(mm_camera_obj_t * my_obj,
+                                       mm_camera_channel_type_t ch_type,
+                                       mm_camera_stream_t **stream1,
+                                       mm_camera_stream_t **stream2)
+{
+    *stream1 = NULL;
+    *stream2 = NULL;
+
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        *stream1 = &my_obj->ch[ch_type].raw.stream;
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        *stream1 = &my_obj->ch[ch_type].preview.stream;
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        *stream1 = &my_obj->ch[ch_type].video.video;
+        if(my_obj->ch[ch_type].video.has_main) {
+            *stream2 = &my_obj->ch[ch_type].video.main;
+        }
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        *stream1 = &my_obj->ch[ch_type].snapshot.main;
+        if (!my_obj->full_liveshot)
+            *stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+        break;
+    default:
+        break;
+    }
+}
+
+static int32_t mm_camera_ch_util_set_fmt(mm_camera_obj_t * my_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_ch_image_fmt_parm_t *fmt)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream1 = NULL;
+    mm_camera_stream_t *stream2 = NULL;
+    mm_camera_image_fmt_t *fmt1 = NULL;
+    mm_camera_image_fmt_t *fmt2 = NULL;
+
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        stream1 = &my_obj->ch[ch_type].raw.stream;
+        fmt1 = &fmt->def;
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        stream1 = &my_obj->ch[ch_type].preview.stream;
+        fmt1 = &fmt->def;
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        stream1 = &my_obj->ch[ch_type].video.video;
+        fmt1 = &fmt->video.video;
+        if(my_obj->ch[ch_type].video.has_main) {
+            CDBG("%s:video channel has main image stream\n", __func__);
+            stream2 = &my_obj->ch[ch_type].video.main;
+            fmt2 = &fmt->video.main;
+        }
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        stream1 = &my_obj->ch[ch_type].snapshot.main;
+        fmt1 = &fmt->snapshot.main;
+        if (!my_obj->full_liveshot) {
+            stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+            fmt2 = &fmt->snapshot.thumbnail;
+        }
+        break;
+    default:
+        rc = -1;
+        break;
+    }
+    CDBG("%s:ch=%d, streams[0x%x,0x%x]\n", __func__, ch_type,
+             (uint32_t)stream1, (uint32_t)stream2);
+    if(stream1)
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream1,
+                         MM_CAMERA_STATE_EVT_SET_FMT, fmt1);
+    if(stream2 && !rc)
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream2,
+                         MM_CAMERA_STATE_EVT_SET_FMT, fmt2);
+    return rc;
+}
+
+static int32_t mm_camera_ch_util_acquire(mm_camera_obj_t * my_obj,
+                                         mm_camera_channel_type_t ch_type)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream1 = NULL;
+    mm_camera_stream_t *stream2 = NULL;
+    mm_camera_stream_type_t type1;
+    mm_camera_stream_type_t type2;
+
+    if(my_obj->ch[ch_type].acquired) {
+        rc = MM_CAMERA_OK;
+        goto end;
+    }
+    pthread_mutex_init(&my_obj->ch[ch_type].mutex, NULL);
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        stream1 = &my_obj->ch[ch_type].raw.stream;
+        type1 = MM_CAMERA_STREAM_RAW;
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        stream1 = &my_obj->ch[ch_type].preview.stream;
+        type1 = MM_CAMERA_STREAM_PREVIEW;
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        stream1 = &my_obj->ch[ch_type].video.video;
+        type1 = MM_CAMERA_STREAM_VIDEO;
+        /* no full image live shot by default */
+        my_obj->ch[ch_type].video.has_main = FALSE;
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        stream1 = &my_obj->ch[ch_type].snapshot.main;
+        type1 = MM_CAMERA_STREAM_SNAPSHOT;
+        if (!my_obj->full_liveshot) {
+            stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+            type2 = MM_CAMERA_STREAM_THUMBNAIL;
+        }
+        break;
+    default:
+        return -1;
+        break;
+    }
+    if(stream1) rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream1,
+                                            MM_CAMERA_STATE_EVT_ACQUIRE, &type1);
+    if(stream2 && !rc) rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream2,
+                                            MM_CAMERA_STATE_EVT_ACQUIRE, &type2);
+    if(rc == MM_CAMERA_OK) {
+        if(!my_obj->ch[ch_type].acquired)    my_obj->ch[ch_type].acquired = TRUE;
+    }
+
+end:
+    return rc;
+}
+
+static int32_t mm_camera_ch_util_release(mm_camera_obj_t * my_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_state_evt_type_t evt)
+{
+    mm_camera_stream_t *stream1, *stream2;
+
+    if(!my_obj->ch[ch_type].acquired) return MM_CAMERA_OK;
+
+    mm_camera_ch_util_get_stream_objs(my_obj,ch_type, &stream1, &stream2);
+    if(stream1)
+        mm_camera_stream_fsm_fn_vtbl(my_obj, stream1, evt, NULL);
+    if(stream2)
+        mm_camera_stream_fsm_fn_vtbl(my_obj, stream2, evt, NULL);
+    pthread_mutex_destroy(&my_obj->ch[ch_type].mutex);
+    memset(&my_obj->ch[ch_type],0,sizeof(my_obj->ch[ch_type]));
+    return 0;
+}
+
+static int32_t mm_camera_ch_util_stream_null_val(mm_camera_obj_t * my_obj,
+                                                 mm_camera_channel_type_t ch_type,
+                                                            mm_camera_state_evt_type_t evt, void *val)
+{
+        int32_t rc = 0;
+        switch(ch_type) {
+        case MM_CAMERA_CH_RAW:
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj, &my_obj->ch[ch_type].raw.stream,
+                                              evt, NULL);
+            break;
+        case MM_CAMERA_CH_PREVIEW:
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj, &my_obj->ch[ch_type].preview.stream,
+                                              evt, NULL);
+            break;
+        case MM_CAMERA_CH_VIDEO:
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                            &my_obj->ch[ch_type].video.video, evt,
+                            NULL);
+            if(!rc && my_obj->ch[ch_type].video.main.fd)
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                &my_obj->ch[ch_type].video.main, evt,
+                                NULL);
+            break;
+        case MM_CAMERA_CH_SNAPSHOT:
+            my_obj->ch[ch_type].snapshot.expected_matching_id = 0;
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                            &my_obj->ch[ch_type].snapshot.main, evt,
+                            NULL);
+            if(!rc && !my_obj->full_liveshot)
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+                                NULL);
+            break;
+        default:
+            CDBG_ERROR("%s: Invalid ch_type=%d", __func__, ch_type);
+            rc = -1;
+            break;
+        }
+        return rc;
+}
+
+static int32_t mm_camera_ch_util_reg_buf(mm_camera_obj_t * my_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_state_evt_type_t evt, void *val)
+{
+        int32_t rc = 0;
+        switch(ch_type) {
+        case MM_CAMERA_CH_RAW:
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                             &my_obj->ch[ch_type].raw.stream, evt,
+                                             (mm_camera_buf_def_t *)val);
+            break;
+        case MM_CAMERA_CH_PREVIEW:
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                             &my_obj->ch[ch_type].preview.stream, evt,
+                                             (mm_camera_buf_def_t *)val);
+            break;
+        case MM_CAMERA_CH_VIDEO:
+            {
+                mm_camera_buf_video_t * buf = (mm_camera_buf_video_t *)val;
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                &my_obj->ch[ch_type].video.video, evt,
+                                &buf->video);
+                if(!rc && my_obj->ch[ch_type].video.has_main) {
+                    rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                    &my_obj->ch[ch_type].video.main, evt,
+                                    &buf->main);
+                }
+            }
+            break;
+        case MM_CAMERA_CH_SNAPSHOT:
+            {
+                mm_camera_buf_snapshot_t * buf = (mm_camera_buf_snapshot_t *)val;
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                &my_obj->ch[ch_type].snapshot.main, evt,
+                                &buf->main);
+                if(!rc && !my_obj->full_liveshot) {
+                    rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                    &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+                                    & buf->thumbnail);
+                }
+            }
+            break;
+        default:
+            return -1;
+            break;
+        }
+        return rc;
+}
+
+static int32_t mm_camera_ch_util_attr(mm_camera_obj_t *my_obj,
+                                      mm_camera_channel_type_t ch_type,
+                                      mm_camera_channel_attr_t *val)
+{
+    int rc = -MM_CAMERA_E_NOT_SUPPORTED;
+    /*if(ch_type != MM_CAMERA_CH_RAW) {
+        CDBG("%s: attr type %d not support for ch %d\n", __func__, val->type, ch_type);
+        return rc;
+    }*/
+    if(my_obj->ch[ch_type].acquired== 0) {
+      CDBG_ERROR("%s Channel %d not yet acquired ", __func__, ch_type);
+      return -MM_CAMERA_E_INVALID_OPERATION;
+    }
+    switch(val->type) {
+    case MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE:
+        if(val->raw_streaming_mode == MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE) {
+            my_obj->ch[ch_type].raw.mode = val->raw_streaming_mode;
+            rc = MM_CAMERA_OK;
+        }
+        break;
+    case MM_CAMERA_CH_ATTR_BUFFERING_FRAME:
+      /* it's good to check the stream state. TBD later  */
+      memcpy(&my_obj->ch[ch_type].buffering_frame, &val->buffering_frame, sizeof(val->buffering_frame));
+      break;
+    default:
+        break;
+    }
+    return MM_CAMERA_OK;
+}
+
+static int32_t mm_camera_ch_util_reg_buf_cb(mm_camera_obj_t *my_obj,
+                                            mm_camera_channel_type_t ch_type,
+                                            mm_camera_buf_cb_t *val)
+{
+    /* TODOhere: Need to return failure in case of MAX Cb registered
+     * but in order to return fail case need to set up rc.
+     * but the rc value needs to be thread safe
+     */
+    int i;
+    ALOGE("%s: Trying to register",__func__);
+//    pthread_mutex_lock(&my_obj->ch[ch_type].mutex);
+    for( i=0 ;i < MM_CAMERA_BUF_CB_MAX; i++ ) {
+        if(my_obj->ch[ch_type].buf_cb[i].cb==NULL) {
+            memcpy(&my_obj->ch[ch_type].buf_cb[i],val,sizeof(mm_camera_buf_cb_t));
+            break;
+        }
+    }
+//    pthread_mutex_unlock(&my_obj->ch[ch_type].mutex);
+    ALOGE("%s: Done register",__func__);
+    return MM_CAMERA_OK;
+}
+
+static int32_t mm_camera_ch_util_qbuf(mm_camera_obj_t *my_obj,
+                                    mm_camera_channel_type_t ch_type,
+                                    mm_camera_state_evt_type_t evt,
+                                    mm_camera_ch_data_buf_t *val)
+{
+    int32_t rc = -1;
+    mm_camera_stream_t *stream;
+    struct ion_flush_data cache_inv_data;
+    int ion_fd;
+    struct msm_frame *cache_frame;
+    struct msm_frame *cache_frame1 = NULL;
+
+    CDBG("<DEBUG>: %s:ch_type:%d",__func__,ch_type);
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                          &my_obj->ch[ch_type].raw.stream, evt,
+                                                                     &val->def);
+        cache_frame = val->def.frame;
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                         &my_obj->ch[ch_type].preview.stream, evt,
+                                         &val->def);
+        cache_frame = val->def.frame;
+        CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+         val->def.frame->fd, val->def.frame->ion_alloc.len, val->def.frame->buffer);
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        {
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                            &my_obj->ch[ch_type].video.video, evt,
+                            &val->video.video);
+            cache_frame = val->video.video.frame;
+            CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+                 val->video.video.frame->fd, val->video.video.frame->ion_alloc.len, val->video.video.frame->buffer);
+
+            if(!rc && val->video.main.frame) {
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                &my_obj->ch[ch_type].video.main, evt,
+                                &val->video.main);
+                cache_frame1 = val->video.main.frame;
+            }
+        }
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        {
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                            &my_obj->ch[ch_type].snapshot.main, evt,
+                            &val->snapshot.main);
+            cache_frame = val->snapshot.main.frame;
+            CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+                 val->snapshot.main.frame->fd, val->snapshot.main.frame->ion_alloc.len, val->snapshot.main.frame->buffer);
+            if(!rc) {
+                if (my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL)
+                  stream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+                else
+                  stream = &my_obj->ch[ch_type].snapshot.thumbnail;
+                rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                stream, evt,
+                                &val->snapshot.thumbnail);
+                cache_frame1 = val->snapshot.thumbnail.frame;
+                CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+                 val->snapshot.thumbnail.frame->fd, val->snapshot.thumbnail.frame->ion_alloc.len, val->snapshot.thumbnail.frame->buffer);
+            }
+        }
+        break;
+    default:
+        return -1;
+        break;
+    }
+#ifdef USE_ION
+    cache_inv_data.vaddr = cache_frame->buffer;
+    cache_inv_data.fd = cache_frame->fd;
+    cache_inv_data.handle = cache_frame->fd_data.handle;
+    cache_inv_data.length = cache_frame->ion_alloc.len;
+    ion_fd = cache_frame->ion_dev_fd;
+    if(ion_fd > 0) {
+        if(ioctl(ion_fd, ION_IOC_INV_CACHES, &cache_inv_data) < 0)
+            CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+        else {
+            CDBG("%s: Successful cache invalidate\n", __func__);
+            if(cache_frame1) {
+              ion_fd = cache_frame1->ion_dev_fd;
+              cache_inv_data.vaddr = cache_frame1->buffer;
+              cache_inv_data.fd = cache_frame1->fd;
+              cache_inv_data.handle = cache_frame1->fd_data.handle;
+              cache_inv_data.length = cache_frame1->ion_alloc.len;
+              if(ioctl(ion_fd, ION_IOC_INV_CACHES, &cache_inv_data) < 0)
+                CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+              else
+                CDBG("%s: Successful cache invalidate\n", __func__);
+            }
+        }
+    }
+#endif
+
+    return rc;
+}
+
+static int mm_camera_ch_util_get_crop(mm_camera_obj_t *my_obj,
+                                mm_camera_channel_type_t ch_type,
+                                mm_camera_state_evt_type_t evt,
+                                mm_camera_ch_crop_t *crop)
+{
+    int rc = MM_CAMERA_OK;
+    switch(ch_type) {
+    case MM_CAMERA_CH_RAW:
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                       &my_obj->ch[ch_type].raw.stream, evt,
+                                       &crop->crop);
+        break;
+    case MM_CAMERA_CH_PREVIEW:
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                                    &my_obj->ch[ch_type].preview.stream, evt,
+                                    &crop->crop);
+        break;
+    case MM_CAMERA_CH_VIDEO:
+        rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                          &my_obj->ch[ch_type].video.video, evt,
+                          &crop->crop);
+        break;
+    case MM_CAMERA_CH_SNAPSHOT:
+        {
+            rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                          &my_obj->ch[ch_type].snapshot.main, evt,
+                          &crop->snapshot.main_crop);
+            if(!rc && !my_obj->full_liveshot) {
+              ALOGE("%s: should not come here for Live Shot", __func__);
+              rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+                              &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+                              &crop->snapshot.thumbnail_crop);
+            }
+        }
+        break;
+    default:
+        return -1;
+        break;
+    }
+    return rc;
+}
+
+static int mm_camera_ch_util_dispatch_buffered_frame(mm_camera_obj_t *my_obj,
+                mm_camera_channel_type_t ch_type)
+{
+    return mm_camera_poll_dispatch_buffered_frames(my_obj, ch_type);
+}
+
+int mm_camera_channel_get_time_diff(struct timespec *cur_ts, int usec_target, struct timespec *frame_ts)
+{
+    int dtusec = (cur_ts->tv_nsec - frame_ts->tv_nsec)/1000;
+    dtusec += (cur_ts->tv_sec - frame_ts->tv_sec)*1000000 - usec_target;
+    return dtusec;
+}
+
+static int mm_camera_channel_skip_frames(mm_camera_obj_t *my_obj,
+                                          mm_camera_frame_queue_t *mq,
+                                          mm_camera_frame_queue_t *sq,
+                                          mm_camera_stream_t *mstream,
+                                          mm_camera_stream_t *sstream,
+                                          mm_camera_channel_attr_buffering_frame_t *frame_attr)
+{
+    int count = 0;
+    int i = 0;
+    mm_camera_frame_t *mframe = NULL, *sframe = NULL;
+    mm_camera_notify_frame_t notify_frame;
+
+    count = mm_camera_stream_frame_get_q_cnt(mq);
+    if(count < mm_camera_stream_frame_get_q_cnt(sq))
+        count = mm_camera_stream_frame_get_q_cnt(sq);
+    CDBG("%s: Q-size=%d, look_back =%d, M_match=%d, T_match=%d", __func__,
+         count, frame_attr->look_back, mq->match_cnt, sq->match_cnt);
+
+    count -= frame_attr->look_back;
+    CDBG("count=%d, frame_attr->look_back=%d,mq->match_cnt=%d, sq->match_cnt=%d",
+               count, frame_attr->look_back, mq->match_cnt,sq->match_cnt);
+    for(i=0; i < count; i++) {
+        mframe = mm_camera_stream_frame_deq(mq);
+        sframe = mm_camera_stream_frame_deq(sq);
+        if(mframe && sframe && mframe->frame.frame_id ==
+           sframe->frame.frame_id) {
+          mq->match_cnt--;
+          sq->match_cnt--;
+        }
+        if(mframe) {
+            notify_frame.frame = &mframe->frame;
+            notify_frame.idx = mframe->idx;
+            mm_camera_stream_util_buf_done(my_obj, mstream, &notify_frame);
+        }
+        if(sframe) {
+            notify_frame.frame = &sframe->frame;
+            notify_frame.idx = sframe->idx;
+            mm_camera_stream_util_buf_done(my_obj, sstream, &notify_frame);
+        }
+    }
+
+    CDBG("Post %s: Q-size=%d, look_back =%d, M_match=%d, T_match=%d", __func__,
+         count, frame_attr->look_back, mq->match_cnt, sq->match_cnt);
+    return MM_CAMERA_OK;
+}
+
+/*for ZSL mode to send the image pair to client*/
+void mm_camera_dispatch_buffered_frames(mm_camera_obj_t *my_obj,
+                                        mm_camera_channel_type_t ch_type)
+{
+    int mcnt, i, rc = MM_CAMERA_E_GENERAL, scnt;
+    int num_of_req_frame = 0;
+    int j;
+    mm_camera_ch_data_buf_t data;
+    mm_camera_frame_t *mframe = NULL, *sframe = NULL;
+    mm_camera_frame_t *qmframe = NULL, *qsframe = NULL;
+    mm_camera_ch_t *ch = &my_obj->ch[ch_type];
+    mm_camera_frame_queue_t *mq = NULL;
+    mm_camera_frame_queue_t *sq = NULL;
+    mm_camera_stream_t *stream1 = NULL;
+    mm_camera_stream_t *stream2 = NULL;
+    ALOGE("%s: E", __func__);
+    mm_camera_ch_util_get_stream_objs(my_obj, ch_type, &stream1, &stream2);
+    stream2 = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+    if(stream1) {
+      mq = &stream1->frame.readyq;
+    }
+    if(stream2) {
+      sq = &stream2->frame.readyq;
+    }
+    CDBG("mq=%p, sq=%p, stream1=%p, stream2=%p", mq, sq, stream1, stream2);
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+    if (mq && sq && stream1 && stream2) {
+        rc = mm_camera_channel_skip_frames(my_obj, mq, sq, stream1, stream2, &ch->buffering_frame);
+        if(rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s: Error getting right frame!", __func__);
+            goto end;
+        }
+        num_of_req_frame = my_obj->snap_burst_num_by_user;
+        ch->snapshot.pending_cnt = num_of_req_frame;
+
+        CDBG("num_of_req_frame =%d", num_of_req_frame);
+        for(i = 0; i < num_of_req_frame; i++) {
+            mframe = mm_camera_stream_frame_deq(mq);
+            sframe = mm_camera_stream_frame_deq(sq);
+            if(mframe && sframe) {
+                CDBG("%s: frame_id = 0x%x|0x%x, main idx = %d, thumbnail idx = %d", __func__,
+                     mframe->frame.frame_id, sframe->frame.frame_id, mframe->idx, sframe->idx);
+                if(mframe->frame.frame_id != sframe->frame.frame_id) {
+                    CDBG_ERROR("%s: ZSL algorithm error, main and thumbnail "
+                        "frame_ids not same. Need bug fix", __func__);
+                }
+                memset(&data, 0, sizeof(data));
+                data.type = ch_type;
+                data.snapshot.main.frame = &mframe->frame;
+                data.snapshot.main.idx = mframe->idx;
+                data.snapshot.thumbnail.frame = &sframe->frame;
+                data.snapshot.thumbnail.idx = sframe->idx;
+                ch->snapshot.pending_cnt--;
+                mq->match_cnt--;
+                sq->match_cnt--;
+                for(j=0;j<MM_CAMERA_BUF_CB_MAX;j++) {
+                    if( ch->buf_cb[j].cb!=NULL )
+                        ch->buf_cb[j].cb(&data, ch->buf_cb[j].user_data);
+                }
+            } else {
+               CDBG_ERROR("%s: mframe %p, sframe = %p", __func__, mframe, sframe);
+                qmframe = mframe;
+                qsframe = sframe;
+                rc = -1;
+                break;
+            }
+        }
+        if(qmframe) {
+            mm_camera_stream_frame_enq(mq, &stream1->frame.frame[qmframe->idx]);
+            qmframe = NULL;
+        }
+        if(qsframe) {
+            mm_camera_stream_frame_enq(sq, &stream2->frame.frame[qsframe->idx]);
+            qsframe = NULL;
+        }
+    } else {
+      CDBG_ERROR(" mq =%p sq =%p stream1 =%p stream2 =%p", mq, sq , stream1 , stream2);
+
+    }
+    CDBG("%s: burst number: %d, pending_count: %d", __func__,
+        my_obj->snap_burst_num_by_user, ch->snapshot.pending_cnt);
+end:
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+    /* If we are done sending callbacks for all the requested number of snapshots
+       send data delivery done event*/
+    if((rc == MM_CAMERA_OK) && (!ch->snapshot.pending_cnt)) {
+        mm_camera_event_t data;
+        data.event_type = MM_CAMERA_EVT_TYPE_CH;
+        data.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+        data.e.ch.ch = ch_type;
+        mm_camera_poll_send_ch_event(my_obj, &data);
+    }
+}
+
+int32_t mm_camera_ch_fn(mm_camera_obj_t * my_obj,
+        mm_camera_channel_type_t ch_type,
+        mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = MM_CAMERA_OK;
+
+    CDBG("%s:ch = %d, evt=%d\n", __func__, ch_type, evt);
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_ACQUIRE:
+        rc = mm_camera_ch_util_acquire(my_obj, ch_type);
+        break;
+    case MM_CAMERA_STATE_EVT_RELEASE:
+      /* safe code in case no stream off before release. */
+        //mm_camera_poll_thread_release(my_obj, ch_type);
+        rc = mm_camera_ch_util_release(my_obj, ch_type, evt);
+        break;
+    case MM_CAMERA_STATE_EVT_ATTR:
+        rc = mm_camera_ch_util_attr(my_obj, ch_type,
+                                    (mm_camera_channel_attr_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_REG_BUF_CB:
+        rc = mm_camera_ch_util_reg_buf_cb(my_obj, ch_type,
+                                          (mm_camera_buf_cb_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_SET_FMT:
+        rc = mm_camera_ch_util_set_fmt(my_obj, ch_type,
+                                       (mm_camera_ch_image_fmt_parm_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_REG_BUF:
+    case MM_CAMERA_STATE_EVT_REQUEST_BUF:
+    case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+        rc = mm_camera_ch_util_reg_buf(my_obj, ch_type, evt, val);
+        break;
+    case MM_CAMERA_STATE_EVT_UNREG_BUF:
+        rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+        break;
+    case MM_CAMERA_STATE_EVT_STREAM_ON: {
+        if(ch_type == MM_CAMERA_CH_RAW &&
+             my_obj->ch[ch_type].raw.mode == MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE) {
+            if( MM_CAMERA_OK != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+                MSM_V4L2_PID_CAM_MODE, MSM_V4L2_CAM_OP_RAW))) {
+                CDBG("%s:set MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE err=%d\n", __func__, rc);
+                break;
+            }
+        }
+        mm_camera_poll_thread_add_ch(my_obj, ch_type);
+        rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+        if(rc < 0) {
+          CDBG_ERROR("%s: Failed in STREAM ON", __func__);
+          mm_camera_poll_thread_release(my_obj, ch_type);
+        }
+        break;
+    }
+    case MM_CAMERA_STATE_EVT_STREAM_OFF: {
+        mm_camera_poll_thread_del_ch(my_obj, ch_type);
+        rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+        break;
+    }
+    case MM_CAMERA_STATE_EVT_QBUF:
+        rc = mm_camera_ch_util_qbuf(my_obj, ch_type, evt,
+                                    (mm_camera_ch_data_buf_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_GET_CROP:
+      rc = mm_camera_ch_util_get_crop(my_obj, ch_type, evt,
+                                  (mm_camera_ch_crop_t *)val);
+      break;
+    case MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME:
+      rc = mm_camera_ch_util_dispatch_buffered_frame(my_obj, ch_type);
+      break;
+    default:
+        break;
+    }
+    return rc;
+}
diff --git a/camera/mm-camera-interface/mm_camera_dbg.h b/camera/mm-camera-interface/mm_camera_dbg.h
new file mode 100644
index 0000000..5204429
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_dbg.h
@@ -0,0 +1,70 @@
+/*
+Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+//#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-libcamera2"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-libcamera2"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...)  ALOGE(fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/camera/mm-camera-interface/mm_camera_helper.c b/camera/mm-camera-interface/mm_camera_helper.c
new file mode 100644
index 0000000..7d0377d
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_helper.c
@@ -0,0 +1,318 @@
+/*
+Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <errno.h>
+#include <string.h>
+#include "mm_camera_dbg.h"
+#include <time.h>
+#include "mm_camera_interface2.h"
+#include <linux/ion.h>
+#include "camera.h"
+
+#define MM_CAMERA_PROFILE 1
+
+struct file;
+struct inode;
+struct vm_area_struct;
+
+/*===========================================================================
+ * FUNCTION    - do_mmap -
+ *
+ * DESCRIPTION:  retured virtual addresss
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd)
+{
+    void *ret; /* returned virtual address */
+    int  pmem_fd = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+
+    if (pmem_fd <= 0) {
+        CDBG("do_mmap: Open device /dev/pmem_adsp failed!\n");
+        return NULL;
+    }
+    /* to make it page size aligned */
+    size = (size + 4095) & (~4095);
+  ret = mmap(NULL,
+    size,
+    PROT_READ  | PROT_WRITE,
+    MAP_SHARED,
+    pmem_fd,
+    0);
+    if (ret == MAP_FAILED) {
+        CDBG("do_mmap: pmem mmap() failed: %s (%d)\n", strerror(errno), errno);
+        close(pmem_fd);
+        return NULL;
+    }
+    CDBG("do_mmap: pmem mmap fd %d ptr %p len %u\n", pmem_fd, ret, size);
+    *pmemFd = pmem_fd;
+    return(uint8_t *)ret;
+}
+
+/*===========================================================================
+ * FUNCTION    - do_munmap -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap(int pmem_fd, void *addr, size_t size)
+{
+    int rc;
+
+    if (pmem_fd <= 0) {
+        CDBG("%s:invalid fd=%d\n", __func__, pmem_fd);
+        return -1;
+    }
+    size = (size + 4095) & (~4095);
+    CDBG("munmapped size = %d, virt_addr = 0x%p\n",
+    size, addr);
+    rc = (munmap(addr, size));
+    close(pmem_fd);
+    CDBG("do_mmap: pmem munmap fd %d ptr %p len %u rc %d\n", pmem_fd, addr,
+    size, rc);
+    return rc;
+}
+
+#ifdef USE_ION
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+		     struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+  void *ret; /* returned virtual address */
+  int rc = 0;
+  struct ion_handle_data handle_data;
+
+  /* to make it page size aligned */
+  alloc->len = (alloc->len + 4095) & (~4095);
+
+  rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+  if (rc < 0) {
+    CDBG_ERROR("ION allocation failed\n");
+    goto ION_ALLOC_FAILED;
+  }
+
+  ion_info_fd->handle = alloc->handle;
+  rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+  if (rc < 0) {
+    CDBG_ERROR("ION map failed %s\n", strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+  *mapFd = ion_info_fd->fd;
+  ret = mmap(NULL,
+    alloc->len,
+    PROT_READ  | PROT_WRITE,
+    MAP_SHARED,
+    *mapFd,
+    0);
+
+  if (ret == MAP_FAILED) {
+    CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+    goto ION_MAP_FAILED;
+  }
+
+  return ret;
+
+ION_MAP_FAILED:
+  handle_data.handle = ion_info_fd->handle;
+  ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+  return NULL;
+}
+
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+                   void *addr, size_t size)
+{
+  int rc = 0;
+  rc = munmap(addr, size);
+  close(ion_info_fd->fd);
+
+  struct ion_handle_data handle_data;
+  handle_data.handle = ion_info_fd->handle;
+  ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+  return rc;
+}
+#endif
+
+/*============================================================
+   FUNCTION mm_camera_dump_image
+   DESCRIPTION:
+==============================================================*/
+int mm_camera_dump_image(void *addr, uint32_t size, char *filename)
+{
+  int file_fd = open(filename, O_RDWR | O_CREAT, 0777);
+
+  if (file_fd < 0) {
+    CDBG_HIGH("%s: cannot open file\n", __func__);
+		return -1;
+	} else
+    write(file_fd, addr, size);
+  close(file_fd);
+	CDBG("%s: %s, size=%d\n", __func__, filename, size);
+	return 0;
+}
+
+uint32_t mm_camera_get_msm_frame_len(cam_format_t fmt_type,
+                                     camera_mode_t mode,
+                                     int width,
+                                     int height,
+                                     int image_type,
+                                     uint8_t *num_planes,
+                                     uint32_t plane[])
+{
+    uint32_t size;
+    *num_planes = 0;
+    int local_height;
+
+    switch (fmt_type) {
+    case CAMERA_YUV_420_NV12:
+    case CAMERA_YUV_420_NV21:
+        *num_planes = 2;
+        if(CAMERA_MODE_3D == mode) {
+            size = (uint32_t)(PAD_TO_2K(width*height)*3/2);
+            plane[0] = PAD_TO_WORD(width*height);
+        } else {
+            if (image_type == OUTPUT_TYPE_V) {
+                plane[0] = PAD_TO_2K(width * height);
+                plane[1] = PAD_TO_2K(width * height/2);
+            } else if (image_type == OUTPUT_TYPE_P) {
+                plane[0] = PAD_TO_WORD(width * height);
+                plane[1] = PAD_TO_WORD(width * height/2);
+            } else {
+                plane[0] = PAD_TO_WORD(width * CEILING16(height));
+                plane[1] = PAD_TO_WORD(width * CEILING16(height)/2);
+            }
+            size = plane[0] + plane[1];
+        }
+        break;
+    case CAMERA_BAYER_SBGGR10:
+        *num_planes = 1;
+        plane[0] = PAD_TO_WORD(width * height);
+        size = plane[0];
+        break;
+    case CAMERA_YUV_422_NV16:
+    case CAMERA_YUV_422_NV61:
+      if( image_type == OUTPUT_TYPE_S || image_type == OUTPUT_TYPE_V) {
+        local_height = CEILING16(height);
+      } else {
+        local_height = height;
+      }
+        *num_planes = 2;
+        plane[0] = PAD_TO_WORD(width * height);
+        plane[1] = PAD_TO_WORD(width * height);
+        size = plane[0] + plane[1];
+        break;
+    default:
+        CDBG("%s: format %d not supported.\n",
+            __func__, fmt_type);
+        size = 0;
+    }
+    CDBG("%s:fmt=%d,image_type=%d,width=%d,height=%d,frame_len=%d\n",
+        __func__, fmt_type, image_type, width, height, size);
+    return size;
+}
+
+void mm_camera_util_profile(const char *str)
+{
+#if (MM_CAMERA_PROFILE)
+    struct timespec cur_time;
+
+    clock_gettime(CLOCK_REALTIME, &cur_time);
+    CDBG_HIGH("PROFILE %s: %ld.%09ld\n", str,
+    cur_time.tv_sec, cur_time.tv_nsec);
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_do_mmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+  struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+  void *ret; /* returned virtual address */
+  int rc = 0;
+  struct ion_handle_data handle_data;
+
+  /* to make it page size aligned */
+  alloc->len = (alloc->len + 4095) & (~4095);
+
+  rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+  if (rc < 0) {
+    CDBG_ERROR("ION allocation failed %s\n", strerror(errno));
+    goto ION_ALLOC_FAILED;
+  }
+
+  ion_info_fd->handle = alloc->handle;
+  rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+  if (rc < 0) {
+    CDBG_ERROR("ION map failed %s\n", strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+  *mapFd = ion_info_fd->fd;
+  ret = mmap(NULL,
+    alloc->len,
+    PROT_READ  | PROT_WRITE,
+    MAP_SHARED,
+    *mapFd,
+    0);
+
+  if (ret == MAP_FAILED) {
+    CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+    goto ION_MAP_FAILED;
+  }
+
+  return ret;
+
+ION_MAP_FAILED:
+  handle_data.handle = ion_info_fd->handle;
+  ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+  return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_do_munmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+                   void *addr, size_t size)
+{
+  int rc = 0;
+  rc = munmap(addr, size);
+  close(ion_info_fd->fd);
+
+  struct ion_handle_data handle_data;
+  handle_data.handle = ion_info_fd->handle;
+  ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+  return rc;
+}
diff --git a/camera/mm-camera-interface/mm_camera_interface2.c b/camera/mm-camera-interface/mm_camera_interface2.c
new file mode 100644
index 0000000..5a7540e
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_interface2.c
@@ -0,0 +1,995 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/media.h>
+
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+    (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+    ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+static pthread_mutex_t g_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl;
+
+static int mm_camera_util_opcode_2_ch_type(mm_camera_obj_t *my_obj,
+                                        mm_camera_ops_type_t opcode)
+{
+    switch(opcode) {
+    case MM_CAMERA_OPS_PREVIEW:
+        return MM_CAMERA_CH_PREVIEW;
+    case MM_CAMERA_OPS_ZSL:
+    case MM_CAMERA_OPS_SNAPSHOT:
+        return MM_CAMERA_CH_SNAPSHOT;
+    case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+        return MM_CAMERA_CH_SNAPSHOT;
+    case MM_CAMERA_OPS_RAW:
+        return MM_CAMERA_CH_RAW;
+    default:
+        break;
+    }
+    return -1;
+}
+
+const char *mm_camera_util_get_dev_name(mm_camera_obj_t * my_obj)
+{
+    CDBG("%s: Returning %s at index :%d\n",
+        __func__,g_cam_ctrl.camera[my_obj->my_id].video_dev_name,my_obj->my_id);
+    return g_cam_ctrl.camera[my_obj->my_id].video_dev_name;
+}
+
+/* used for querying the camera_info of the given camera_id */
+static const camera_info_t * mm_camera_cfg_query_camera_info (int8_t camera_id)
+{
+    if(camera_id >= MSM_MAX_CAMERA_SENSORS)
+        return NULL;
+    return &g_cam_ctrl.camera[camera_id].camera_info;
+}
+/* check if the parm is supported */
+static uint8_t mm_camera_cfg_is_parm_supported (mm_camera_t * camera,
+                                                mm_camera_parm_type_t parm_type)
+{
+    int is_parm_supported = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        is_parm_supported = GET_PARM_BIT32(parm_type,
+                                           my_obj->properties.parm);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+
+    return is_parm_supported;
+}
+
+/* check if the channel is supported */
+static uint8_t mm_camera_cfg_is_ch_supported (mm_camera_t * camera,
+                                  mm_camera_channel_type_t ch_type)
+{
+    switch(ch_type) {
+    case MM_CAMERA_CH_PREVIEW:
+    case MM_CAMERA_CH_VIDEO:
+    case MM_CAMERA_CH_SNAPSHOT:
+    case MM_CAMERA_CH_RAW:
+        return TRUE;
+    case MM_CAMERA_CH_MAX:
+    default:
+        return FALSE;
+    }
+    return FALSE;
+}
+
+/* set a parmÂ’s current value */
+static int32_t mm_camera_cfg_set_parm (mm_camera_t * camera,
+                                       mm_camera_parm_type_t parm_type,
+                                       void *p_value)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+    mm_camera_parm_t parm = {.parm_type = parm_type, .p_value = p_value};
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_set_parm(my_obj, &parm);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+/* get a parmÂ’s current value */
+static int32_t mm_camera_cfg_get_parm (mm_camera_t * camera,
+                                       mm_camera_parm_type_t parm_type,
+                                       void* p_value)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+    mm_camera_parm_t parm = {.parm_type = parm_type, .p_value = p_value};
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc =  mm_camera_get_parm(my_obj, &parm);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static int32_t mm_camera_cfg_request_buf(mm_camera_t * camera,
+                                         mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc =  mm_camera_request_buf(my_obj, buf);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static int32_t mm_camera_cfg_enqueue_buf(mm_camera_t * camera,
+                                         mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc =  mm_camera_enqueue_buf(my_obj, buf);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static int32_t mm_camera_cfg_prepare_buf(mm_camera_t * camera,
+                                         mm_camera_reg_buf_t *buf)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc =  mm_camera_prepare_buf(my_obj, buf);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+static int32_t mm_camera_cfg_unprepare_buf(mm_camera_t * camera,
+                                           mm_camera_channel_type_t ch_type)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    uint32_t tmp;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc =  mm_camera_unprepare_buf(my_obj,ch_type);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static mm_camera_config_t mm_camera_cfg = {
+  .is_parm_supported = mm_camera_cfg_is_parm_supported,
+  .is_ch_supported = mm_camera_cfg_is_ch_supported,
+  .set_parm = mm_camera_cfg_set_parm,
+  .get_parm = mm_camera_cfg_get_parm,
+  .request_buf = mm_camera_cfg_request_buf,
+  .enqueue_buf = mm_camera_cfg_enqueue_buf,
+  .prepare_buf = mm_camera_cfg_prepare_buf,
+  .unprepare_buf = mm_camera_cfg_unprepare_buf
+};
+
+static uint8_t mm_camera_ops_is_op_supported (mm_camera_t * camera,
+                                              mm_camera_ops_type_t opcode)
+{
+    uint8_t is_ops_supported;
+    mm_camera_obj_t * my_obj = NULL;
+    int index = 0;
+    mm_camera_legacy_ops_type_t legacy_opcode = CAMERA_OPS_MAX;
+
+    /* Temp: We will be translating our new opcode
+       to legacy ops type. This is just a hack to
+       temporarily unblock APT team. New design is
+       under discussion */
+    switch (opcode) {
+    case MM_CAMERA_OPS_PREVIEW:
+        legacy_opcode = CAMERA_OPS_STREAMING_PREVIEW;
+        break;
+    case MM_CAMERA_OPS_VIDEO:
+        legacy_opcode = CAMERA_OPS_STREAMING_VIDEO;
+        break;
+    case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+        legacy_opcode = CAMERA_OPS_PREPARE_SNAPSHOT;
+        break;
+    case MM_CAMERA_OPS_SNAPSHOT:
+        legacy_opcode = CAMERA_OPS_SNAPSHOT;
+        break;
+    case MM_CAMERA_OPS_RAW:
+        legacy_opcode = CAMERA_OPS_RAW_CAPTURE;
+        break;
+    case MM_CAMERA_OPS_ZSL:
+        legacy_opcode = CAMERA_OPS_STREAMING_ZSL;
+        break;
+    case MM_CAMERA_OPS_FOCUS:
+        legacy_opcode = CAMERA_OPS_FOCUS;
+        break;
+    case MM_CAMERA_OPS_GET_BUFFERED_FRAME:
+      legacy_opcode = CAMERA_OPS_LOCAL;
+      is_ops_supported = TRUE;
+      CDBG("MM_CAMERA_OPS_GET_BUFFERED_FRAME not handled");
+      break;
+    default:
+      CDBG_ERROR("%s: case %d not handled", __func__, opcode);
+      legacy_opcode = CAMERA_OPS_LOCAL;
+      is_ops_supported = FALSE;
+      break;
+    }
+    if (legacy_opcode != CAMERA_OPS_LOCAL) {
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        index = legacy_opcode/32;  /* 32 bits */
+        is_ops_supported = ((my_obj->properties.ops[index] &
+            (1<<legacy_opcode)) != 0);
+        pthread_mutex_unlock(&my_obj->mutex);
+      } else {
+        is_ops_supported = FALSE;
+      }
+    }
+
+    return is_ops_supported;
+}
+
+static int32_t mm_camera_ops_action (mm_camera_t * camera, uint8_t start,
+                                    mm_camera_ops_type_t opcode, void *val)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    mm_camera_obj_t * my_obj = NULL;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_action(my_obj, start, opcode, val);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+/* open uses flags to optionally disable jpeg/vpe interface. */
+static int32_t mm_camera_ops_open (mm_camera_t * camera,
+                                   mm_camera_op_mode_type_t op_mode)
+{
+    int8_t camera_id = camera->camera_info.camera_id;
+    int32_t rc = MM_CAMERA_OK;
+
+    CDBG("%s: BEGIN\n", __func__);
+    pthread_mutex_lock(&g_mutex);
+    /* not first open */
+    if(g_cam_ctrl.cam_obj[camera_id]) {
+        g_cam_ctrl.cam_obj[camera_id]->ref_count++;
+    CDBG("%s:  opened alreadyn", __func__);
+        goto end;
+    }
+    g_cam_ctrl.cam_obj[camera_id] =
+    (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+    if(!g_cam_ctrl.cam_obj[camera_id]) {
+        rc = -MM_CAMERA_E_NO_MEMORY;
+     CDBG("%s:  no mem", __func__);
+       goto end;
+    }
+    memset(g_cam_ctrl.cam_obj[camera_id], 0,
+                 sizeof(mm_camera_obj_t));
+    //g_cam_ctrl.cam_obj[camera_id]->ctrl_fd = -1;
+    g_cam_ctrl.cam_obj[camera_id]->ref_count++;
+    g_cam_ctrl.cam_obj[camera_id]->my_id=camera_id;
+
+    pthread_mutex_init(&g_cam_ctrl.cam_obj[camera_id]->mutex, NULL);
+    rc = mm_camera_open(g_cam_ctrl.cam_obj[camera_id], op_mode);
+    if(rc < 0) {
+        CDBG("%s: open failed, rc = %d\n", __func__, rc);
+        pthread_mutex_destroy(&g_cam_ctrl.cam_obj[camera_id]->mutex);
+        g_cam_ctrl.cam_obj[camera_id]->ref_count--;
+        free(g_cam_ctrl.cam_obj[camera_id]);
+        g_cam_ctrl.cam_obj[camera_id]=NULL;
+    CDBG("%s: mm_camera_open err = %d", __func__, rc);
+        goto end;
+    }else{
+        CDBG("%s: open succeded\n", __func__);
+    }
+end:
+    pthread_mutex_unlock(&g_mutex);
+    CDBG("%s: END, rc=%d\n", __func__, rc);
+    return rc;
+}
+
+static void mm_camera_ops_close (mm_camera_t * camera)
+{
+    mm_camera_obj_t * my_obj;
+    int i;
+    int8_t camera_id = camera->camera_info.camera_id;
+
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera_id];
+    if(my_obj) {
+      my_obj->ref_count--;
+      if(my_obj->ref_count > 0) {
+        CDBG("%s: ref_count=%d\n", __func__, my_obj->ref_count);
+      } else {
+        mm_camera_poll_thread_release(my_obj, MM_CAMERA_CH_MAX);
+        (void)mm_camera_close(g_cam_ctrl.cam_obj[camera_id]);
+        pthread_mutex_destroy(&my_obj->mutex);
+        free(my_obj);
+        g_cam_ctrl.cam_obj[camera_id] = NULL;
+      }
+    }
+    pthread_mutex_unlock(&g_mutex);
+}
+
+static int32_t mm_camera_ops_ch_acquire(mm_camera_t * camera,
+                                        mm_camera_channel_type_t ch_type)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    mm_camera_obj_t * my_obj = NULL;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_ch_acquire(my_obj, ch_type);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+
+}
+static void mm_camera_ops_ch_release(mm_camera_t * camera, mm_camera_channel_type_t ch_type)
+{
+    mm_camera_obj_t * my_obj = NULL;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        mm_camera_ch_release(my_obj, ch_type);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+}
+
+static int32_t mm_camera_ops_ch_attr(mm_camera_t * camera,
+                                     mm_camera_channel_type_t ch_type,
+                                     mm_camera_channel_attr_t *attr)
+{
+    mm_camera_obj_t * my_obj = NULL;
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_ch_fn(my_obj, ch_type, MM_CAMERA_STATE_EVT_ATTR,
+                            (void *)attr);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static int32_t mm_camera_ops_sendmsg(mm_camera_t * camera,
+                                     void *msg,
+                                     uint32_t buf_size,
+                                     int sendfd)
+{
+    int32_t rc = -MM_CAMERA_E_GENERAL;
+    mm_camera_obj_t * my_obj = NULL;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_sendmsg(my_obj, msg, buf_size, sendfd);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+
+static mm_camera_ops_t mm_camera_ops = {
+  .is_op_supported = mm_camera_ops_is_op_supported,
+    .action = mm_camera_ops_action,
+    .open = mm_camera_ops_open,
+    .close = mm_camera_ops_close,
+    .ch_acquire = mm_camera_ops_ch_acquire,
+    .ch_release = mm_camera_ops_ch_release,
+    .ch_set_attr = mm_camera_ops_ch_attr,
+    .sendmsg = mm_camera_ops_sendmsg
+};
+
+static uint8_t mm_camera_notify_is_event_supported(mm_camera_t * camera,
+                                mm_camera_event_type_t evt_type)
+{
+  switch(evt_type) {
+  case MM_CAMERA_EVT_TYPE_CH:
+  case MM_CAMERA_EVT_TYPE_CTRL:
+  case MM_CAMERA_EVT_TYPE_STATS:
+  case MM_CAMERA_EVT_TYPE_INFO:
+    return 1;
+  default:
+    return 0;
+  }
+  return 0;
+}
+
+static int32_t mm_camera_notify_register_event_cb(mm_camera_t * camera,
+                                   mm_camera_event_notify_t evt_cb,
+                                    void * user_data,
+                                   mm_camera_event_type_t evt_type)
+{
+  mm_camera_obj_t * my_obj = NULL;
+  mm_camera_buf_cb_t reg ;
+  int rc = -1;
+
+  pthread_mutex_lock(&g_mutex);
+  my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+  pthread_mutex_unlock(&g_mutex);
+  if(my_obj) {
+      pthread_mutex_lock(&my_obj->mutex);
+      rc = mm_camera_reg_event(my_obj, evt_cb, user_data, evt_type);
+      pthread_mutex_unlock(&my_obj->mutex);
+  }
+  return rc;
+}
+
+static int32_t mm_camera_register_buf_notify (
+          mm_camera_t * camera,
+          mm_camera_channel_type_t ch_type,
+          mm_camera_buf_notify_t  buf_cb,
+          mm_camera_register_buf_cb_type_t cb_type,
+          uint32_t cb_count,
+          void * user_data)
+{
+    mm_camera_obj_t * my_obj = NULL;
+    mm_camera_buf_cb_t reg ;
+    int rc = -1;
+
+    reg.cb = buf_cb;
+    reg.user_data = user_data;
+    reg.cb_type=cb_type;
+    reg.cb_count=cb_count;
+    pthread_mutex_lock(&g_mutex);
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    pthread_mutex_unlock(&g_mutex);
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->mutex);
+        rc = mm_camera_ch_fn(my_obj,ch_type,
+                            MM_CAMERA_STATE_EVT_REG_BUF_CB, (void *)&reg);
+        pthread_mutex_unlock(&my_obj->mutex);
+    }
+    return rc;
+}
+static int32_t mm_camera_buf_done(mm_camera_t * camera, mm_camera_ch_data_buf_t * bufs)
+{
+    mm_camera_obj_t * my_obj = NULL;
+    int rc = -1;
+    my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+    if(my_obj) {
+        /*pthread_mutex_lock(&my_obj->mutex);*/
+        rc = mm_camera_ch_fn(my_obj, bufs->type,
+                 MM_CAMERA_STATE_EVT_QBUF,   (void *)bufs);
+        /*pthread_mutex_unlock(&my_obj->mutex);*/
+    }
+    return rc;
+}
+
+static mm_camera_notify_t mm_camera_notify = {
+    .is_event_supported = mm_camera_notify_is_event_supported,
+    .register_event_notify = mm_camera_notify_register_event_cb,
+    .register_buf_notify = mm_camera_register_buf_notify,
+    .buf_done = mm_camera_buf_done
+};
+
+static uint8_t mm_camera_jpeg_is_jpeg_supported (mm_camera_t * camera)
+{
+    return FALSE;
+}
+static int32_t mm_camera_jpeg_set_parm (mm_camera_t * camera,
+                    mm_camera_jpeg_parm_type_t parm_type,
+                    void* p_value)
+{
+    return -1;
+}
+static int32_t mm_camera_jpeg_get_parm (mm_camera_t * camera,
+                    mm_camera_jpeg_parm_type_t parm_type,
+                    void* p_value)
+{
+    return -1;
+}
+static int32_t mm_camera_jpeg_register_event_cb(mm_camera_t * camera,
+                    mm_camera_jpeg_cb_t * evt_cb,
+                    void * user_data)
+{
+    return -1;
+}
+static int32_t mm_camera_jpeg_encode (mm_camera_t * camera, uint8_t start,
+                    mm_camera_jpeg_encode_t *data)
+{
+    return -1;
+}
+
+static mm_camera_jpeg_t mm_camera_jpeg =  {
+    .is_jpeg_supported = mm_camera_jpeg_is_jpeg_supported,
+    .set_parm = mm_camera_jpeg_set_parm,
+    .get_parm = mm_camera_jpeg_get_parm,
+    .register_event_cb = mm_camera_jpeg_register_event_cb,
+    .encode = mm_camera_jpeg_encode,
+};
+
+extern mm_camera_t * mm_camera_query (uint8_t *num_cameras)
+{
+    int i = 0, rc = MM_CAMERA_OK;
+    int dev_fd = 0;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    if (!num_cameras)
+      return NULL;
+    /* lock the mutex */
+    pthread_mutex_lock(&g_mutex);
+    *num_cameras = 0;
+    while (1) {
+      char dev_name[32];
+      snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+      dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+      if (dev_fd < 0) {
+        CDBG("Done discovering media devices\n");
+        break;
+      }
+      num_media_devices++;
+      rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+      if (rc < 0) {
+        CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+        close(dev_fd);
+        break;
+      }
+
+      if(strncmp(mdev_info.model, QCAMERA_NAME, sizeof(mdev_info.model) != 0)) {
+        close(dev_fd);
+        continue;
+      }
+
+      char * mdev_cfg;
+      int cam_type = 0, mount_angle = 0, info_index = 0;
+      mdev_cfg = strtok(mdev_info.serial, "-");
+      while(mdev_cfg != NULL) {
+          if(info_index == 0) {
+              if(strcmp(mdev_cfg, QCAMERA_NAME))
+                  break;
+          } else if(info_index == 1) {
+              mount_angle = atoi(mdev_cfg);
+          } else if(info_index == 2) {
+              cam_type = atoi(mdev_cfg);
+          }
+          mdev_cfg = strtok(NULL, "-");
+          info_index++;
+      }
+
+      if(info_index == 0) {
+          close(dev_fd);
+          continue;
+      }
+
+      int num_entities = 1;
+      while (1) {
+        struct media_entity_desc entity;
+        memset(&entity, 0, sizeof(entity));
+        entity.id = num_entities++;
+        rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+        if (rc < 0) {
+            CDBG("Done enumerating media entities\n");
+            rc = 0;
+            break;
+        }
+        if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+             strncpy(g_cam_ctrl.camera[*num_cameras].video_dev_name,
+                     entity.name, sizeof(entity.name));
+             break;
+        }
+      }
+
+      g_cam_ctrl.camera[*num_cameras].camera_info.camera_id = *num_cameras;
+
+      g_cam_ctrl.camera[*num_cameras].
+          camera_info.modes_supported = CAMERA_MODE_2D;
+      if(cam_type > 1)
+        g_cam_ctrl.camera[*num_cameras].
+          camera_info.modes_supported |= CAMERA_MODE_3D;
+
+      g_cam_ctrl.camera[*num_cameras].camera_info.position =
+        (cam_type == 1) ? FRONT_CAMERA : BACK_CAMERA;
+      g_cam_ctrl.camera[*num_cameras].camera_info.sensor_mount_angle =
+          mount_angle;
+      g_cam_ctrl.camera[*num_cameras].sensor_type = 0;
+      g_cam_ctrl.camera[*num_cameras].cfg = &mm_camera_cfg;
+      g_cam_ctrl.camera[*num_cameras].ops = &mm_camera_ops;
+      g_cam_ctrl.camera[*num_cameras].evt = &mm_camera_notify;
+      g_cam_ctrl.camera[*num_cameras].jpeg_ops = NULL;
+
+      CDBG("%s: dev_info[id=%d,name='%s',pos=%d,modes=0x%x,sensor=%d]\n",
+        __func__, *num_cameras,
+        g_cam_ctrl.camera[*num_cameras].video_dev_name,
+        g_cam_ctrl.camera[*num_cameras].camera_info.position,
+        g_cam_ctrl.camera[*num_cameras].camera_info.modes_supported,
+        g_cam_ctrl.camera[*num_cameras].sensor_type);
+
+      *num_cameras+=1;
+      if (dev_fd > 0) {
+          close(dev_fd);
+      }
+    }
+    *num_cameras = *num_cameras;
+    g_cam_ctrl.num_cam = *num_cameras;
+end:
+    /* unlock the mutex */
+    pthread_mutex_unlock(&g_mutex);
+    CDBG("%s: num_cameras=%d\n", __func__, g_cam_ctrl.num_cam);
+    if(rc == 0)
+        return &g_cam_ctrl.camera[0];
+    else
+        return NULL;
+}
+
+
+static mm_camera_t * get_camera_by_id( int cam_id)
+{
+  mm_camera_t * mm_cam;
+  if( cam_id < 0 || cam_id >= g_cam_ctrl.num_cam) {
+     mm_cam = NULL;
+  } else {
+    mm_cam = & g_cam_ctrl.camera[cam_id];
+  }
+  return mm_cam;
+}
+
+/*configure methods*/
+uint8_t cam_config_is_parm_supported(
+  int cam_id,
+  mm_camera_parm_type_t parm_type)
+{
+  uint8_t rc = 0;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam && mm_cam->cfg) {
+    rc = mm_cam->cfg->is_parm_supported(mm_cam, parm_type);
+  }
+  return rc;
+}
+
+uint8_t cam_config_is_ch_supported(
+  int cam_id,
+  mm_camera_channel_type_t ch_type)
+{
+  uint8_t rc = 0;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->is_ch_supported(mm_cam, ch_type);
+  }
+  return rc;
+
+}
+
+/* set a parmÂ’s current value */
+int32_t cam_config_set_parm(
+  int cam_id,
+  mm_camera_parm_type_t parm_type,
+  void* p_value)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->set_parm(mm_cam, parm_type, p_value);
+  }
+  return rc;
+}
+
+/* get a parmÂ’s current value */
+int32_t cam_config_get_parm(
+  int cam_id,
+  mm_camera_parm_type_t parm_type,
+  void* p_value)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->get_parm(mm_cam, parm_type, p_value);
+  }
+  return rc;
+}
+
+int32_t cam_config_request_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->request_buf(mm_cam, buf);
+  }
+  return rc;
+}
+
+int32_t cam_config_enqueue_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->enqueue_buf(mm_cam, buf);
+  }
+  return rc;
+}
+
+int32_t cam_config_prepare_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->prepare_buf(mm_cam, buf);
+  }
+  return rc;
+}
+int32_t cam_config_unprepare_buf(int cam_id, mm_camera_channel_type_t ch_type)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->cfg->unprepare_buf(mm_cam, ch_type);
+  }
+  return rc;
+}
+
+/*operation methods*/
+uint8_t cam_ops_is_op_supported(int cam_id, mm_camera_ops_type_t opcode)
+{
+  uint8_t rc = 0;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->ops->is_op_supported(mm_cam, opcode);
+  }
+  return rc;
+}
+/* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+int32_t cam_ops_action(int cam_id, uint8_t start,
+  mm_camera_ops_type_t opcode, void *val)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->ops->action(mm_cam, start, opcode, val);
+  }
+  return rc;
+}
+
+int32_t cam_ops_open(int cam_id, mm_camera_op_mode_type_t op_mode)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->ops->open(mm_cam, op_mode);
+  }
+  return rc;
+}
+
+void cam_ops_close(int cam_id)
+{
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    mm_cam->ops->close(mm_cam);
+  }
+}
+
+int32_t cam_ops_ch_acquire(int cam_id, mm_camera_channel_type_t ch_type)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->ops->ch_acquire(mm_cam, ch_type);
+  }
+  return rc;
+}
+
+void cam_ops_ch_release(int cam_id, mm_camera_channel_type_t ch_type)
+{
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    mm_cam->ops->ch_release(mm_cam, ch_type);
+  }
+}
+
+int32_t cam_ops_ch_set_attr(int cam_id, mm_camera_channel_type_t ch_type,
+  mm_camera_channel_attr_t *attr)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->ops->ch_set_attr(mm_cam, ch_type, attr);
+  }
+  return rc;
+}
+
+int32_t cam_ops_sendmsg(int cam_id, void *msg, uint32_t buf_size, int sendfd)
+{
+    int32_t rc = -1;
+    mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+    if (mm_cam) {
+      rc = mm_cam->ops->sendmsg(mm_cam, msg, buf_size, sendfd);
+    }
+    return rc;
+}
+
+/*call-back notify methods*/
+uint8_t cam_evt_is_event_supported(int cam_id, mm_camera_event_type_t evt_type)
+{
+  uint8_t rc = 0;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->evt->is_event_supported(mm_cam, evt_type);
+  }
+  return rc;
+}
+
+int32_t cam_evt_register_event_notify(int cam_id,
+  mm_camera_event_notify_t evt_cb,
+  void * user_data,
+  mm_camera_event_type_t evt_type)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->evt->register_event_notify(
+      mm_cam, evt_cb, user_data, evt_type);
+  }
+  return rc;
+}
+
+int32_t cam_evt_register_buf_notify(int cam_id,
+  mm_camera_channel_type_t ch_type,
+  mm_camera_buf_notify_t buf_cb,
+  mm_camera_register_buf_cb_type_t cb_type,
+  uint32_t cb_count,
+  void * user_data)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->evt->register_buf_notify(
+      mm_cam, ch_type, buf_cb, cb_type,
+      cb_count, user_data);
+  }
+  return rc;
+}
+
+int32_t cam_evt_buf_done(int cam_id, mm_camera_ch_data_buf_t *bufs)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->evt->buf_done(mm_cam, bufs);
+  }
+  return rc;
+}
+
+/*camera JPEG methods*/
+uint8_t cam_jpeg_is_jpeg_supported(int cam_id)
+{
+  uint8_t rc = 0;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->jpeg_ops->is_jpeg_supported(mm_cam);
+  }
+  return rc;
+}
+
+int32_t cam_jpeg_set_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+  void* p_value)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->jpeg_ops->set_parm(mm_cam, parm_type, p_value);
+  }
+  return rc;
+}
+
+int32_t cam_jpeg_get_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+  void* p_value)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->jpeg_ops->get_parm(mm_cam, parm_type, p_value);
+  }
+  return rc;
+}
+int32_t cam_jpeg_register_event_cb(int cam_id, mm_camera_jpeg_cb_t * evt_cb,
+  void * user_data)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->jpeg_ops->register_event_cb(mm_cam, evt_cb, user_data);
+  }
+  return rc;
+}
+int32_t cam_jpeg_encode(int cam_id, uint8_t start,
+  mm_camera_jpeg_encode_t *data)
+{
+  int32_t rc = -1;
+  mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+  if (mm_cam) {
+    rc = mm_cam->jpeg_ops->encode(mm_cam, start, data);
+  }
+  return rc;
+}
+
diff --git a/camera/mm-camera-interface/mm_camera_interface2.h b/camera/mm-camera-interface/mm_camera_interface2.h
new file mode 100644
index 0000000..c614314
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_interface2.h
@@ -0,0 +1,526 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_INTERFACE2_H__
+#define __MM_CAMERA_INTERFACE2_H__
+#include <linux/ion.h>
+#include "mm_omx_jpeg_encoder.h"
+
+#define MM_CAMERA_MAX_NUM_FRAMES        16
+
+typedef struct {
+    int num;
+    uint32_t frame_len;
+    struct msm_frame frame[MM_CAMERA_MAX_NUM_FRAMES];
+} mm_cameara_stream_buf_t;
+
+typedef struct {
+    int32_t width;
+    int32_t height;
+}mm_camera_dimension_t;
+
+typedef enum {
+    MM_CAMERA_OK,
+    MM_CAMERA_E_GENERAL,
+    MM_CAMERA_E_NO_MEMORY,
+    MM_CAMERA_E_NOT_SUPPORTED,
+    MM_CAMERA_E_INVALID_INPUT,
+    MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+    MM_CAMERA_E_ENCODE,
+    MM_CAMERA_E_BUFFER_REG,
+    MM_CAMERA_E_PMEM_ALLOC,
+    MM_CAMERA_E_CAPTURE_FAILED,
+    MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+}mm_camera_status_type_t;
+
+typedef enum {
+    MM_CAMERA_OP_MODE_NOTUSED,
+    MM_CAMERA_OP_MODE_CAPTURE,
+    MM_CAMERA_OP_MODE_VIDEO,
+    MM_CAMERA_OP_MODE_ZSL,
+    MM_CAMERA_OP_MODE_MAX
+}mm_camera_op_mode_type_t;
+
+#define MM_CAMERA_PARM_SUPPORT_SET      0x01
+#define MM_CAMERA_PARM_SUPPORT_GET      0x02
+#define MM_CAMERA_PARM_SUPPORT_BOTH     0x03
+
+typedef struct  {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} mm_camera_rect_t;
+
+typedef enum {
+    WHITE_BALANCE_AUTO         = 1,
+    WHITE_BALANCE_INCANDESCENT = 3,
+    WHITE_BALANCE_FLUORESCENT  = 4,
+    WHITE_BALANCE_DAYLIGHT     = 5,
+    WHITE_BALANCE_CLOUDY       = 6,
+    WHITE_BALANCE_OFF          = 9,
+} White_Balance_modes;
+
+typedef enum {
+    MM_CAMERA_CH_PREVIEW,
+    MM_CAMERA_CH_VIDEO,
+    MM_CAMERA_CH_SNAPSHOT,
+    MM_CAMERA_CH_RAW,
+    MM_CAMERA_CH_MAX
+} mm_camera_channel_type_t;
+
+typedef enum {
+    MM_CAMERA_WHITE_BALANCE_AUTO         = 1,
+    MM_CAMERA_WHITE_BALANCE_OFF          = 2,
+    MM_CAMERA_WHITE_BALANCE_DAYLIGHT     = 3,
+    MM_CAMERA_WHITE_BALANCE_INCANDESCENT = 4,
+    MM_CAMERA_WHITE_BALANCE_FLUORESCENT  = 5,
+} mm_camera_white_balance_mode_type_t;
+/* MM_CAMERA_PARM_RAW_IMAGE_FMT */
+typedef struct {
+    cam_format_t fmt;
+    mm_camera_dimension_t dim;
+} mm_camera_image_fmt_t;
+
+typedef struct {
+    mm_camera_image_fmt_t main;
+    mm_camera_image_fmt_t thumbnail;
+} mm_camera_ch_image_fmt_snapshot_t;
+
+typedef enum {
+    MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE,
+    MM_CAMERA_RAW_STREAMING_MAX
+} mm_camera_raw_streaming_type_t;
+
+typedef struct {
+    mm_camera_image_fmt_t main;
+    mm_camera_image_fmt_t video;
+} mm_camera_ch_image_fmt_video_t;
+
+typedef struct {
+    mm_camera_channel_type_t ch_type;
+    union {
+        mm_camera_image_fmt_t def;
+        mm_camera_ch_image_fmt_snapshot_t snapshot;
+        mm_camera_ch_image_fmt_video_t video;
+    };
+} mm_camera_ch_image_fmt_parm_t;
+typedef struct {
+    mm_camera_rect_t main_crop;
+    mm_camera_rect_t thumbnail_crop;
+} mm_camera_crop_snapshot_t;
+
+typedef struct {
+    mm_camera_channel_type_t ch_type;
+    union {
+    mm_camera_rect_t crop;
+    mm_camera_crop_snapshot_t snapshot;
+    };
+} mm_camera_ch_crop_t;
+
+typedef struct {
+    uint8_t name[32];
+    int32_t min_value;
+    int32_t max_value;
+    int32_t step;
+    int32_t default_value;
+} mm_camera_ctrl_cap_sharpness_t;
+
+typedef struct {
+    int16_t *zoom_ratio_tbl;
+    int32_t size;
+} mm_camera_zoom_tbl_t;
+
+#define MM_CAMERA_MAX_FRAME_NUM 16
+
+typedef struct {
+    uint32_t *frame_offset;
+    struct msm_frame *frame;
+} mm_camera_sp_buf_t;
+
+typedef struct {
+    int8_t num_planes;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    uint32_t frame_offset;
+    struct msm_frame frame;
+    int idx; /* index to stream frame */
+} mm_camera_mp_buf_t;
+
+typedef struct {
+  int8_t num;
+  union {
+      mm_camera_sp_buf_t sp;
+      mm_camera_mp_buf_t *mp;
+  }buf;
+} mm_camera_buf_def_t;
+
+typedef struct {
+    mm_camera_buf_def_t thumbnail;
+    mm_camera_buf_def_t main;
+} mm_camera_buf_snapshot_t;
+
+typedef struct {
+    mm_camera_buf_def_t video;
+    mm_camera_buf_def_t main;
+} mm_camera_buf_video_t;
+
+typedef struct {
+    mm_camera_channel_type_t ch_type;
+    union {
+        mm_camera_buf_def_t def;
+        mm_camera_buf_def_t preview;
+        mm_camera_buf_snapshot_t snapshot;
+        mm_camera_buf_video_t video;
+    };
+} mm_camera_reg_buf_t;
+
+typedef enum {
+    MM_CAMERA_OPS_PREVIEW,                    // start/stop preview
+    MM_CAMERA_OPS_VIDEO,                      // start/stop video
+    MM_CAMERA_OPS_PREPARE_SNAPSHOT,           // prepare capture in capture mode
+    MM_CAMERA_OPS_SNAPSHOT,                   // take snapshot (HDR,ZSL,live shot)
+    MM_CAMERA_OPS_RAW,                        // take raw streaming (raw snapshot, etc)
+    MM_CAMERA_OPS_ZSL,                        // start/stop zsl
+    // mm_camera_ops_parm_get_buffered_frame_t is used for MM_CAMERA_OPS_GET_BUFFERED_FRAME
+    MM_CAMERA_OPS_GET_BUFFERED_FRAME,         // channel to dispatch buffered frame to app through call back
+    MM_CAMERA_OPS_FOCUS,                      // change focus,isp3a_af_mode_t* used in val
+    MM_CAMERA_OPS_MAX                         // max ops
+}mm_camera_ops_type_t;
+
+/* Temp: We are declaring it here so that we can still use the
+    legacy GET_CAPABILITIES call to config thread. A new design
+    to query capabilities based on V4L2 interface is being
+    discussed. */
+typedef enum {
+    CAMERA_OPS_LOCAL = -1,  /*no need to query mm-camera*/
+    CAMERA_OPS_STREAMING_PREVIEW = 0,
+    CAMERA_OPS_STREAMING_ZSL,
+    CAMERA_OPS_STREAMING_VIDEO,
+    CAMERA_OPS_CAPTURE, /*not supported*/
+    CAMERA_OPS_FOCUS,
+    CAMERA_OPS_GET_PICTURE, /*5*/
+    CAMERA_OPS_PREPARE_SNAPSHOT,
+    CAMERA_OPS_SNAPSHOT,
+    CAMERA_OPS_LIVESHOT,
+    CAMERA_OPS_RAW_SNAPSHOT,
+    CAMERA_OPS_VIDEO_RECORDING, /*10*/
+    CAMERA_OPS_REGISTER_BUFFER,
+    CAMERA_OPS_UNREGISTER_BUFFER,
+    CAMERA_OPS_CAPTURE_AND_ENCODE,
+    CAMERA_OPS_RAW_CAPTURE,
+    CAMERA_OPS_ENCODE, /*15*/
+    CAMERA_OPS_ZSL_STREAMING_CB,
+    /* add new above*/
+    CAMERA_OPS_MAX
+}mm_camera_legacy_ops_type_t;
+
+typedef enum {
+    MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE,
+    MM_CAMERA_CH_ATTR_BUFFERING_FRAME,
+    MM_CAMERA_CH_ATTR_MAX
+} mm_camera_channel_attr_type_t;
+
+typedef struct {
+    /* how deep the circular frame queue */
+    int water_mark;
+    int look_back;
+    int interval;  /*skipping n-1 frames*/
+} mm_camera_channel_attr_buffering_frame_t;
+
+typedef struct {
+    mm_camera_channel_attr_type_t type;
+    union {
+        /* add more if needed */
+        mm_camera_raw_streaming_type_t raw_streaming_mode;
+        mm_camera_channel_attr_buffering_frame_t buffering_frame;
+    };
+} mm_camera_channel_attr_t;
+
+typedef struct {
+    mm_camera_channel_type_t ch_type;
+} mm_camera_ops_parm_get_buffered_frame_t;
+
+typedef struct mm_camera mm_camera_t;
+
+typedef struct {
+    /* if the parm is supported */
+    uint8_t (*is_parm_supported)(mm_camera_t *camera, mm_camera_parm_type_t parm_type);
+    /* if the channel is supported */
+    uint8_t (*is_ch_supported)(mm_camera_t *camera, mm_camera_channel_type_t ch_type);
+    /* set a parmÂ’s current value */
+    int32_t (*set_parm)(mm_camera_t *camera, mm_camera_parm_type_t parm_type,
+          void* p_value);
+    /* get a parmÂ’s current value */
+    int32_t (*get_parm)(mm_camera_t *camera, mm_camera_parm_type_t parm_type,
+          void* p_value);
+    int32_t (*request_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+    int32_t (*enqueue_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+    int32_t (*prepare_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+    int32_t (*unprepare_buf) (mm_camera_t *camera, mm_camera_channel_type_t ch_type);
+} mm_camera_config_t;
+
+typedef struct {
+    uint8_t (*is_op_supported)(mm_camera_t * camera, mm_camera_ops_type_t opcode);
+    /* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+    int32_t (*action)(mm_camera_t * camera, uint8_t start,
+                    mm_camera_ops_type_t opcode, void *val);
+    int32_t (*open)(mm_camera_t * camera, mm_camera_op_mode_type_t op_mode);
+    void (*close)(mm_camera_t * camera);
+    int32_t (*ch_acquire)(mm_camera_t * camera, mm_camera_channel_type_t ch_type);
+    void (*ch_release)(mm_camera_t * camera, mm_camera_channel_type_t ch_type);
+    int32_t (*ch_set_attr)(mm_camera_t * camera, mm_camera_channel_type_t ch_type,
+                                                 mm_camera_channel_attr_t *attr);
+    int32_t (*sendmsg)(mm_camera_t * camera, void *msg, uint32_t buf_size, int sendfd);
+} mm_camera_ops_t;
+
+typedef struct {
+    int type;
+    uint32_t length;
+    void *value;
+} mm_camera_stats_t;
+
+typedef struct {
+    int idx;
+    struct msm_frame *frame;
+} mm_camera_notify_frame_t;
+
+typedef struct {
+    mm_camera_notify_frame_t video;
+    mm_camera_notify_frame_t main;
+} mm_camera_notify_video_buf_t;
+
+typedef struct {
+    mm_camera_notify_frame_t thumbnail;
+    mm_camera_notify_frame_t main;
+} mm_camera_notify_snapshot_buf_t;
+
+typedef struct {
+    mm_camera_channel_type_t type;
+//    union {
+        mm_camera_notify_snapshot_buf_t snapshot;
+        mm_camera_notify_video_buf_t video;
+        mm_camera_notify_frame_t def;
+//    };
+} mm_camera_ch_data_buf_t;
+
+
+typedef enum {
+    MM_CAMERA_REG_BUF_CB_ONCE,
+    MM_CAMERA_REG_BUF_CB_COUNT,
+    MM_CAMERA_REG_BUF_CB_INFINITE
+} mm_camera_register_buf_cb_type_t;
+
+
+typedef void (*mm_camera_event_notify_t)(mm_camera_event_t *evt,
+    void *user_data);
+
+typedef void (*mm_camera_buf_notify_t)(mm_camera_ch_data_buf_t *bufs,
+    void *user_data);
+
+typedef struct {
+    uint8_t (*is_event_supported)(mm_camera_t * camera,
+                                 mm_camera_event_type_t evt_type);
+    int32_t (*register_event_notify)(mm_camera_t * camera,
+                                    mm_camera_event_notify_t evt_cb,
+                                     void * user_data,
+                                    mm_camera_event_type_t evt_type);
+    int32_t (*register_buf_notify)(mm_camera_t * camera,
+                                mm_camera_channel_type_t ch_type,
+                                mm_camera_buf_notify_t buf_cb,
+                                mm_camera_register_buf_cb_type_t cb_type,
+                                uint32_t cb_count,
+                                void * user_data);
+    int32_t (*buf_done)(mm_camera_t * camera, mm_camera_ch_data_buf_t *bufs);
+} mm_camera_notify_t;
+
+typedef enum {
+    MM_CAMERA_JPEG_PARM_ROTATION,
+    MM_CAMERA_JPEG_PARM_MAINIMG_QUALITY,
+    MM_CAMERA_JPEG_PARM_THUMB_QUALITY,
+    MM_CAMERA_JPEG_PARM_MAX
+} mm_camera_jpeg_parm_type_t;
+
+typedef struct {
+    uint8_t* ptr;
+    uint32_t filled_size;
+    uint32_t size;
+    int32_t fd;
+    uint32_t offset;
+}mm_camera_buffer_t;
+
+typedef struct {
+    exif_tags_info_t* exif_data;
+    int exif_numEntries;
+    mm_camera_buffer_t* p_output_buffer;
+    uint8_t buffer_count;
+    uint32_t rotation;
+    uint32_t quality;
+    int y_offset;
+    int cbcr_offset;
+    /* bitmask for the images to be encoded. if capture_and_encode
+     * option is selected, all the images will be encoded irrespective
+     * of bitmask.
+     */
+    uint8_t encodeBitMask;
+    uint32_t output_picture_width;
+    uint32_t output_picture_height;
+    int format3d;
+}encode_params_t;
+
+typedef struct {
+    void * src_img1_buf;            // input main image buffer
+    uint32_t src_img1_size;     // input main image size
+    void * src_img2_buf;            // input thumbnail image buffer
+    uint32_t src_img2_size;     // input thumbnail image size
+    void* out_jpeg1_buf;            // out jpeg buffer
+    uint32_t out_jpeg1_size;    // IN/OUT-result buf size/jpeg image size
+    void* out_jpeg2_buf;            // out jpeg buffer
+    uint32_t out_jpeg2_size;    // IN/OUT-result buf size/jpeg image size
+    mm_camera_status_type_t status; // result status place holder
+} mm_camera_jpeg_encode_t;
+
+typedef void (*mm_camera_jpeg_cb_t)(mm_camera_jpeg_encode_t *result,
+    void *user_data);
+
+typedef struct {
+    uint8_t (*is_jpeg_supported)( mm_camera_t * camera);
+    int32_t (*set_parm)(mm_camera_t * camera, mm_camera_jpeg_parm_type_t parm_type,
+             void* p_value);
+    int32_t (*get_parm)(mm_camera_t * camera, mm_camera_jpeg_parm_type_t parm_type,
+                        void* p_value);
+    int32_t (* register_event_cb)(mm_camera_t * camera, mm_camera_jpeg_cb_t * evt_cb,
+             void * user_data);
+    int32_t (*encode)(mm_camera_t * camera, uint8_t start,
+        mm_camera_jpeg_encode_t *data);
+} mm_camera_jpeg_t;
+
+struct mm_camera {
+    mm_camera_config_t *cfg;                // config interface
+    mm_camera_ops_t *ops;                   // operation interface
+    mm_camera_notify_t *evt;                // evt callback interface
+    mm_camera_jpeg_t *jpeg_ops;         // jpeg config and encoding interface
+    camera_info_t camera_info;      // postion, mount_angle, etc.
+    enum sensor_type_t sensor_type; // BAYER, YUV, JPEG_SOC, etc.
+    char video_dev_name[32];           // device node name, e.g. /dev/video1
+};
+
+typedef enum {
+    MM_CAMERA_PAD_WORD,
+    MM_CAMERA_PAD_2K,
+    MM_CAMERA_PAD_MAX
+} mm_camera_pad_type_t;
+
+typedef struct
+{
+    struct camera_size_type *sizes_tbl;
+    uint32_t tbl_size;
+}default_sizes_tbl_t;
+
+/*configure methods*/
+uint8_t cam_config_is_parm_supported(
+  int cam_id,
+  mm_camera_parm_type_t parm_type);
+uint8_t cam_config_is_ch_supported(
+  int cam_id,
+  mm_camera_channel_type_t ch_type);
+/* set a parmÂ’s current value */
+int32_t cam_config_set_parm(
+  int cam_id,
+  mm_camera_parm_type_t parm_type,
+  void* p_value);
+/* get a parmÂ’s current value */
+int32_t cam_config_get_parm(
+  int cam_id,
+  mm_camera_parm_type_t parm_type,
+  void* p_value);
+int32_t cam_config_request_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_enqueue_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_prepare_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_unprepare_buf(int cam_id, mm_camera_channel_type_t ch_type);
+
+/*operation methods*/
+uint8_t cam_ops_is_op_supported(int cam_id, mm_camera_ops_type_t opcode);
+/* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+int32_t cam_ops_action(int cam_id, uint8_t start,
+  mm_camera_ops_type_t opcode, void *val);
+int32_t cam_ops_open(int cam_id, mm_camera_op_mode_type_t op_mode);
+void cam_ops_close(int cam_id);
+int32_t cam_ops_ch_acquire(int cam_id, mm_camera_channel_type_t ch_type);
+void cam_ops_ch_release(int cam_id, mm_camera_channel_type_t ch_type);
+int32_t cam_ops_ch_set_attr(int cam_id, mm_camera_channel_type_t ch_type,
+  mm_camera_channel_attr_t *attr);
+int32_t cam_ops_sendmsg(int cam_id, void *msg, uint32_t buf_size, int sendfd);
+
+/*call-back notify methods*/
+uint8_t cam_evt_is_event_supported(int cam_id, mm_camera_event_type_t evt_type);
+int32_t cam_evt_register_event_notify(int cam_id,
+  mm_camera_event_notify_t evt_cb,
+  void * user_data,
+  mm_camera_event_type_t evt_type);
+int32_t cam_evt_register_buf_notify(int cam_id,
+  mm_camera_channel_type_t ch_type,
+  mm_camera_buf_notify_t buf_cb,
+  mm_camera_register_buf_cb_type_t cb_type,
+  uint32_t cb_count,
+  void * user_data);
+
+int32_t cam_evt_buf_done(int cam_id, mm_camera_ch_data_buf_t *bufs);
+
+/*camera JPEG methods*/
+uint8_t cam_jpeg_is_jpeg_supported(int cam_id);
+int32_t cam_jpeg_set_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+  void* p_value);
+int32_t cam_jpeg_get_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+  void* p_value);
+int32_t cam_jpeg_register_event_cb(int cam_id, mm_camera_jpeg_cb_t * evt_cb,
+  void * user_data);
+int32_t cam_jpeg_encode(int cam_id, uint8_t start,
+  mm_camera_jpeg_encode_t *data);
+
+extern mm_camera_t * mm_camera_query(uint8_t *num_cameras);
+extern uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd);
+extern int mm_camera_do_munmap(int pmem_fd, void *addr, size_t size);
+extern uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+		     struct ion_fd_data *ion_info_fd, int *mapFd);
+extern int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+                   void *addr, size_t size);
+extern int mm_camera_dump_image(void *addr, uint32_t size, char *filename);
+extern uint32_t mm_camera_get_msm_frame_len(cam_format_t fmt_type,
+                                            camera_mode_t mode,
+                                            int width,
+                                            int height,
+                                            int image_type,
+                                            uint8_t *num_planes,
+                                            uint32_t planes[]);
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+  struct ion_fd_data *ion_info_fd, int *mapFd);
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+                   void *addr, size_t size);
+extern void mm_camera_util_profile(const char *str);
+#endif /*__MM_CAMERA_INTERFACE2_H__*/
diff --git a/camera/mm-camera-interface/mm_camera_notify.c b/camera/mm-camera-interface/mm_camera_notify.c
new file mode 100644
index 0000000..3825820
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_notify.c
@@ -0,0 +1,838 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/ion.h>
+#include <camera.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#if 0
+#undef CDBG
+#undef LOG_TAG
+#define CDBG ALOGE
+#define LOG_TAG "NotifyLogs"
+#endif
+
+static void mm_camera_read_raw_frame(mm_camera_obj_t * my_obj)
+{
+    int rc = 0;
+    int idx;
+    int i;
+    mm_camera_stream_t *stream;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+    stream = &my_obj->ch[MM_CAMERA_CH_RAW].raw.stream;
+    idx =  mm_camera_read_msm_frame(my_obj, stream);
+    if (idx < 0) {
+        return;
+    }
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_RAW].mutex);
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if((my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].cb) &&
+                (my_obj->poll_threads[MM_CAMERA_CH_RAW].data.used == 1)){
+            data[i].type = MM_CAMERA_CH_RAW;
+            data[i].def.idx = idx;
+            data[i].def.frame = &my_obj->ch[MM_CAMERA_CH_RAW].raw.stream.frame.frame[idx].frame;
+            my_obj->ch[MM_CAMERA_CH_RAW].raw.stream.frame.ref_count[idx]++;
+            CDBG("%s:calling data notify cb 0x%x, 0x%x\n", __func__,
+                     (uint32_t)my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].cb,
+                     (uint32_t)my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].user_data);
+            memcpy(&buf_cb[i], &my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i],
+               sizeof(mm_camera_buf_cb_t) * MM_CAMERA_BUF_CB_MAX);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_RAW].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_RAW].data.used == 1){
+            buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+        }
+    }
+}
+
+int mm_camera_zsl_frame_cmp_and_enq(mm_camera_obj_t * my_obj,
+                               mm_camera_frame_t *node,
+                               mm_camera_stream_t *mystream)
+{
+    int watermark, interval;
+    mm_camera_frame_queue_t *myq;
+    mm_camera_frame_queue_t *peerq;
+    mm_camera_stream_t *peerstream;
+    int rc = 0;
+    int deliver_done = 0;
+    mm_camera_frame_t *peer_frame;
+    mm_camera_frame_t *peer_frame_prev;
+    mm_camera_frame_t *peer_frame_tmp;
+    mm_camera_notify_frame_t notify_frame;
+    uint32_t expected_id;
+    mm_camera_ch_data_buf_t data;
+    mm_camera_frame_t *my_frame = NULL;
+    int i;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+    if(mystream->stream_type == MM_CAMERA_STREAM_PREVIEW) {
+        peerstream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+    } else
+        peerstream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+    myq = &mystream->frame.readyq;
+    peerq = &peerstream->frame.readyq;
+    watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+    interval = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.interval;
+    expected_id = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id;
+    peer_frame = peerq->tail;
+    /* for 30-120 fps streaming no need to consider the wrapping back of frame_id
+       expected_matching_id is used when requires skipping bwtween frames */
+    if(!peer_frame || (node->frame.frame_id > peer_frame->frame.frame_id &&
+        node->frame.frame_id >= expected_id)) {
+        /* new frame is newer than all stored peer frames. simply keep the node */
+        /* in case the frame_id wraps back, the peer frame's frame_id will be
+           larger than the new frame's frame id */
+        CDBG("%s New frame. Just enqueue it into the queue ", __func__);
+        mm_camera_stream_frame_enq_no_lock(myq, node);
+        node->valid_entry = 1;
+    }
+    CDBG("%s Need to find match for the frame id %d ,exped_id =%d, strm type =%d",
+         __func__, node->frame.frame_id, expected_id, mystream->stream_type);
+    /* the node is older than the peer, we will either find a match or drop it */
+    peer_frame = peerq->head;
+    peer_frame_prev = NULL;
+    peer_frame_tmp = NULL;
+    while(peer_frame) {
+        CDBG("%s peer frame_id = %d node frame_id = %d, expected_id =%d, interval=%d", __func__,
+             peer_frame->frame.frame_id, node->frame.frame_id,
+             expected_id, interval);
+        if(peer_frame->match) {
+            CDBG("%s Peer frame already matched, keep looking in the list ",
+                 __func__);
+            /* matched frame., skip */
+            peer_frame_prev = peer_frame;
+            peer_frame = peer_frame->next;
+            continue;
+        }
+        if(peer_frame->frame.frame_id == node->frame.frame_id &&
+           node->frame.frame_id >= my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id) {
+            /* find a match keep the frame */
+            node->match = 1;
+            peer_frame->match = 1;
+            CDBG("%s Found match, add to myq, frame_id=%d ", __func__, node->frame.frame_id);
+            mm_camera_stream_frame_enq_no_lock(myq, node);
+            myq->match_cnt++;
+            peerq->match_cnt++;
+            /*set next min matching id*/
+            my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id =
+              node->frame.frame_id + interval;
+            goto water_mark;
+        } else {
+            /* no match */
+            if(node->frame.frame_id > peer_frame->frame.frame_id) {
+                /* the incoming frame is newer than the peer's unmatched frame.
+                   drop the peer frame */
+                CDBG("%s node frame is newer, release old peer frame ",
+                     __func__);
+                if(!peer_frame_prev) {
+                    /* this is the head */
+                    peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    mm_camera_stream_util_buf_done(my_obj, peerstream,
+                                                   &notify_frame);
+                    peer_frame = peerq->head;
+                    peer_frame_prev = NULL;
+                    continue;
+                } else {
+                    /* this is not the head. */
+                    peer_frame_tmp = peer_frame;
+                    peer_frame_prev->next = peer_frame->next;
+                    if(peer_frame == peerq->tail) {
+                        /* peer_frame is the tail */
+                        peerq->tail = peer_frame_prev;
+                    }
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    mm_camera_stream_util_buf_done(my_obj, peerstream,
+                                                   &notify_frame);
+                    peer_frame = peer_frame_prev->next;
+                    peerq->cnt--;
+                    continue;
+                }
+            } else {
+                /* Current frame is older than peer's unmatched frame, dont add
+                 * it into the queue. just drop it */
+                CDBG("%s node frame is older than peer's unmatched frame. "
+                     "Drop the current frame.", __func__);
+                notify_frame.frame = &node->frame;
+                notify_frame.idx = node->idx;
+                mm_camera_stream_util_buf_done(my_obj, mystream, &notify_frame);
+                goto end;
+            }
+        }
+    }
+    if(!node->match && !node->valid_entry) {
+        /* if no match and not a valid entry.
+         * the node is not added into the queue. it's dirty node */
+        CDBG_ERROR("%s: stream type = %d and fd = %d, frame 0x%x is dirty"
+                   " and queue back kernel", __func__, mystream->stream_type,
+                    mystream->fd, node->frame.frame_id);
+        notify_frame.frame = &node->frame;
+        notify_frame.idx = node->idx;
+        mm_camera_stream_util_buf_done(my_obj, mystream, &notify_frame);
+    }
+water_mark:
+    while((myq->match_cnt > watermark) && (peerq->match_cnt > watermark)) {
+        peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+        if (NULL == peer_frame_tmp) {
+            break;
+        }
+        notify_frame.frame = &peer_frame_tmp->frame;
+        notify_frame.idx = peer_frame_tmp->idx;
+        CDBG("%s match_cnt %d > watermark %d, buf_done on "
+                   "peer frame idx %d id = %d", __func__,
+                   myq->match_cnt, watermark, notify_frame.idx,
+                   notify_frame.frame->frame_id);
+        mm_camera_stream_util_buf_done(my_obj, peerstream, &notify_frame);
+        peerq->match_cnt--;
+        peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(myq);
+        notify_frame.frame = &peer_frame_tmp->frame;
+        notify_frame.idx = peer_frame_tmp->idx;
+        mm_camera_stream_util_buf_done(my_obj, mystream, &notify_frame);
+        myq->match_cnt--;
+    }
+end:
+    CDBG("%s myQ->cnt = %d myQ->match_cnt = %d ", __func__,
+         myq->cnt, myq->match_cnt);
+    if(myq->cnt > myq->match_cnt + 1) {
+        /* drop the first unmatched frame */
+        mm_camera_frame_t *peer_frame = myq->head;;
+        mm_camera_frame_t *peer_frame_prev = NULL;
+        while(peer_frame) {
+            CDBG("%s myQ->cnt = %d myQ->match_cnt = %d ", __func__,
+                   myq->cnt, myq->match_cnt);
+            if(peer_frame->match == 0) {
+                /* first unmatched frame */
+                if(!peer_frame_prev) {
+                    /* this is the head */
+                    peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(myq);
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    CDBG("%s Head Issuing buf_done on my frame idx %d id %d",
+                         __func__, notify_frame.idx,
+                         notify_frame.frame->frame_id);
+                    mm_camera_stream_util_buf_done(my_obj, mystream,
+                                                   &notify_frame);
+                } else {
+                    /* this is not the head. */
+                    peer_frame_tmp = peer_frame;
+                    peer_frame_prev->next = peer_frame->next;
+                    if(peer_frame == peerq->tail) {
+                        /* peer_frame is the tail */
+                        myq->tail = peer_frame_prev;
+                    }
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    CDBG("%s Issuing buf_done on my frame idx %d id = %d",
+                         __func__, notify_frame.idx,
+                         notify_frame.frame->frame_id);
+                    mm_camera_stream_util_buf_done(my_obj, mystream,
+                                                   &notify_frame);
+                    myq->cnt--;
+                }
+                break;
+            } else {
+                peer_frame_prev= peer_frame;
+                peer_frame = peer_frame_prev->next;
+            }
+        }
+    }
+    CDBG("%s peerQ->cnt = %d peerQ->match_cnt = %d ", __func__,
+         peerq->cnt, peerq->match_cnt);
+    if(peerq->cnt > peerq->match_cnt + 1) {
+        /* drop the first unmatched frame */
+        mm_camera_frame_t *peer_frame = peerq->head;
+        mm_camera_frame_t *peer_frame_prev = NULL;
+        while(peer_frame) {
+            CDBG("%s Traverse peerq list frame idx %d frame_id = %d match %d ",
+                  __func__, peer_frame->idx, peer_frame->frame.frame_id,
+                  peer_frame->match);
+            if(peer_frame->match == 0) {
+                /* first unmatched frame */
+                if(!peer_frame_prev) {
+                    /* this is the head */
+                    peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    CDBG("%s Head Issuing buf_done on peer frame idx %d "
+                         "id = %d", __func__, notify_frame.idx,
+                         notify_frame.frame->frame_id);
+                    mm_camera_stream_util_buf_done(my_obj, peerstream,
+                                                   &notify_frame);
+                } else {
+                    /* this is not the head. */
+                    peer_frame_tmp = peer_frame;
+                    peer_frame_prev->next = peer_frame->next;
+                    if(peer_frame == peerq->tail) {
+                        /* peer_frame is the tail */
+                        peerq->tail = peer_frame_prev;
+                    }
+                    notify_frame.frame = &peer_frame_tmp->frame;
+                    notify_frame.idx = peer_frame_tmp->idx;
+                    CDBG("%s Issuing buf_done on peer frame idx %d id = %d",
+                         __func__, notify_frame.idx,
+                         notify_frame.frame->frame_id);
+                    mm_camera_stream_util_buf_done(my_obj, peerstream,
+                                                   &notify_frame);
+                    peerq->cnt--;
+                }
+                break;
+            } else {
+                peer_frame_prev= peer_frame;
+                peer_frame = peer_frame_prev->next;
+            }
+        }
+    }
+
+    CDBG("%s Dispatching ZSL frame ", __func__);
+    if(my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt > 0) {
+        if(!myq->match_cnt || !peerq->match_cnt) {
+            pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+            pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+            return 0;
+        }
+        /* dequeue one by one and then pass to HAL */
+        my_frame = mm_camera_stream_frame_deq_no_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq);
+        peer_frame = mm_camera_stream_frame_deq_no_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.readyq);
+        if (!my_frame || !peer_frame) {
+            pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+            pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+            return 0;
+        }
+        myq->match_cnt--;
+        peerq->match_cnt--;
+        CDBG("%s: Dequeued frame: main frame idx: %d thumbnail "
+             "frame idx: %d", __func__, my_frame->idx, peer_frame->idx);
+        /* dispatch this pair of frames */
+        memset(&data, 0, sizeof(data));
+        data.type = MM_CAMERA_CH_SNAPSHOT;
+        data.snapshot.main.frame = &my_frame->frame;
+        data.snapshot.main.idx = my_frame->idx;
+        data.snapshot.thumbnail.frame = &peer_frame->frame;
+        data.snapshot.thumbnail.idx = peer_frame->idx;
+        my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt--;
+        memcpy(&buf_cb[0], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[0], 
+               sizeof(mm_camera_buf_cb_t)* MM_CAMERA_BUF_CB_MAX);
+        if(my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt == 0)
+            deliver_done = 1;
+        pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+        pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+
+        goto send_to_hal;
+    }
+
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+    return rc;
+
+send_to_hal:
+    for( i=0;i < MM_CAMERA_BUF_CB_MAX;i++) {
+        if (buf_cb[i].cb && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)
+          buf_cb[i].cb(&data,buf_cb[i].user_data);
+    }
+    if(deliver_done > 0) {
+        mm_camera_event_t data_evt;
+        CDBG("%s: ZSL delivered", __func__);
+        data_evt.event_type = MM_CAMERA_EVT_TYPE_CH;
+        data_evt.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+        data_evt.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+        mm_camera_poll_send_ch_event(my_obj, &data_evt);
+    }
+    return rc;
+}
+
+static void mm_camera_read_preview_frame(mm_camera_obj_t * my_obj)
+{
+    int rc = 0;
+    int idx;
+    int i;
+    mm_camera_stream_t *stream;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+    if (!my_obj->ch[MM_CAMERA_CH_PREVIEW].acquired) {
+        ALOGE("Preview channel is not in acquired state \n");
+        return;
+    }
+    stream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+    idx =  mm_camera_read_msm_frame(my_obj, stream);
+    if (idx < 0) {
+        return;
+    }
+    CDBG("%s Read Preview frame %d ", __func__, idx);
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if((my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb) &&
+                (my_obj->poll_threads[MM_CAMERA_CH_PREVIEW].data.used == 1)) {
+            data[i].type = MM_CAMERA_CH_PREVIEW;
+            data[i].def.idx = idx;
+            data[i].def.frame = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.frame[idx].frame;
+            /* Since the frame is originating here, reset the ref count to either
+             * 2(ZSL case) or 1(non-ZSL case). */
+            if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL)
+                my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.ref_count[idx] = 2;
+            else
+                my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.ref_count[idx] = 1;
+            CDBG("%s:calling data notify cb 0x%x, 0x%x\n", __func__,
+                     (uint32_t)my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb,
+                     (uint32_t)my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].user_data);
+            /*my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb(&data,
+                                        my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].user_data);*/
+            memcpy(&buf_cb[i], &my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i],
+                   sizeof(mm_camera_buf_cb_t) * MM_CAMERA_BUF_CB_MAX);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_PREVIEW].data.used == 1) {
+            buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+        }
+    }
+
+    if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL) {
+        /* Reset match to 0. */
+        stream->frame.frame[idx].match = 0;
+        stream->frame.frame[idx].valid_entry = 0;
+        mm_camera_zsl_frame_cmp_and_enq(my_obj,
+          &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.frame[idx],
+          stream);
+    }
+}
+
+static void mm_camera_snapshot_send_liveshot_notify(mm_camera_obj_t * my_obj)
+{
+    int delivered = 0;
+    mm_camera_frame_queue_t *s_q;
+    int i;
+//    mm_camera_frame_queue_t *s_q, *t_q;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+    mm_camera_frame_t *frame;
+    s_q =   &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(s_q->cnt && my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb) {
+            data[i].type = MM_CAMERA_CH_SNAPSHOT;
+            frame = mm_camera_stream_frame_deq(s_q);
+            data[i].snapshot.main.frame = &frame->frame;
+            data[i].snapshot.main.idx = frame->idx;
+            data[i].snapshot.thumbnail.frame = NULL;
+            my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[data[i].snapshot.main.idx]++;
+            /*my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb(&data,
+                                    my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].user_data);*/
+            memcpy(&buf_cb[i], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i],
+                   sizeof(mm_camera_buf_cb_t) * MM_CAMERA_BUF_CB_MAX);
+            my_obj->snap_burst_num_by_user -= 1;
+            CDBG("%s: burst number =%d", __func__, my_obj->snap_burst_num_by_user);
+            delivered = 1;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1) {
+            buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+        }
+    }
+
+    if(delivered) {
+      mm_camera_event_t data;
+      data.event_type = MM_CAMERA_EVT_TYPE_CH;
+      data.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+      data.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+      mm_camera_poll_send_ch_event(my_obj, &data);
+    }
+}
+
+static void mm_camera_snapshot_send_snapshot_notify(mm_camera_obj_t * my_obj)
+{
+    int delivered = 0;
+    int i;
+    mm_camera_frame_queue_t *s_q, *t_q;
+    mm_camera_frame_t *frame;
+    //mm_camera_buf_cb_t buf_cb;
+
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+    memset(&buf_cb, 0, sizeof(buf_cb));
+    s_q =   &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+    t_q =   &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        CDBG("%s Got notify: s_q->cnt = %d, t_q->cnt = %d, buf_cb = %x, "
+             "data.used = %d ", __func__, s_q->cnt, t_q->cnt,
+             (uint32_t)my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb,
+             my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used);
+        if((s_q->cnt && t_q->cnt && my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb) &&
+                (my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)) {
+            data[i].type = MM_CAMERA_CH_SNAPSHOT;
+            frame = mm_camera_stream_frame_deq(s_q);
+            data[i].snapshot.main.frame = &frame->frame;
+            data[i].snapshot.main.idx = frame->idx;
+            frame = mm_camera_stream_frame_deq(t_q);
+            data[i].snapshot.thumbnail.frame = &frame->frame;
+            data[i].snapshot.thumbnail.idx = frame->idx;
+            my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[data[i].snapshot.main.idx]++;
+            my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.ref_count[data[i].snapshot.thumbnail.idx]++;
+            if(my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1){
+                //bu = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i];
+                memcpy(&buf_cb[i], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i],
+                   sizeof(mm_camera_buf_cb_t) * MM_CAMERA_BUF_CB_MAX);
+                //buf_cb.cb(&data,buf_cb.user_data);
+                my_obj->snap_burst_num_by_user -= 1;
+                CDBG("%s: burst number =%d", __func__, my_obj->snap_burst_num_by_user);
+                delivered = 1;
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1) {
+            buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+        }
+    }
+
+    CDBG("%s Delivered = %d ", __func__, delivered );
+    if(delivered) {
+        mm_camera_event_t edata;
+        /*for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++){
+            buf_cb = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i];
+            if((buf_cb) && (my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)) {
+                buf_cb->cb(&data,buf_cb->user_data);
+            }
+        }*/
+        edata.event_type = MM_CAMERA_EVT_TYPE_CH;
+        edata.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+        edata.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+        mm_camera_poll_send_ch_event(my_obj, &edata);
+    }
+}
+
+static void mm_camera_read_snapshot_main_frame(mm_camera_obj_t * my_obj)
+{
+    int rc = 0;
+    int idx;
+    mm_camera_stream_t *stream;
+    mm_camera_frame_queue_t *q;
+    if (!my_obj->ch[MM_CAMERA_CH_SNAPSHOT].acquired) {
+        ALOGE("Snapshot channel is not in acquired state \n");
+        return;
+    }
+    q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+    stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+    idx =  mm_camera_read_msm_frame(my_obj,stream);
+    if (idx < 0)
+        return;
+
+    CDBG("%s Read Snapshot frame %d ", __func__, idx);
+    if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL) {
+        my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[idx]++;
+        /* Reset match to 0. */
+        stream->frame.frame[idx].match = 0;
+        stream->frame.frame[idx].valid_entry = 0;
+        mm_camera_zsl_frame_cmp_and_enq(my_obj,
+          &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.frame[idx], stream);
+    } else {
+        /* send to HAL */
+        mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+        if (!my_obj->full_liveshot)
+          mm_camera_snapshot_send_snapshot_notify(my_obj);
+        else
+          mm_camera_snapshot_send_liveshot_notify(my_obj);
+    }
+}
+static void mm_camera_read_snapshot_thumbnail_frame(mm_camera_obj_t * my_obj)
+{
+    int idx, rc = 0;
+    mm_camera_stream_t *stream;
+    mm_camera_frame_queue_t *q;
+
+    if (!my_obj->ch[MM_CAMERA_CH_SNAPSHOT].acquired) {
+        ALOGE("Snapshot channel is not in acquired state \n");
+        return;
+    }
+    q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+    stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail;
+    idx =  mm_camera_read_msm_frame(my_obj,stream);
+    if (idx < 0)
+        return;
+    if(my_obj->op_mode != MM_CAMERA_OP_MODE_ZSL) {
+        mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+        mm_camera_snapshot_send_snapshot_notify(my_obj);
+    } else {
+//        CDBG("%s: ZSL does not use thumbnail stream",  __func__);
+        rc = mm_camera_stream_qbuf(my_obj, stream, idx);
+//        CDBG("%s Q back thumbnail buffer rc = %d ", __func__, rc);
+    }
+}
+
+static void mm_camera_read_video_frame(mm_camera_obj_t * my_obj)
+{
+    int idx, rc = 0;
+    mm_camera_stream_t *stream;
+    mm_camera_frame_queue_t *q;
+    int i;
+    mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+    mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+    if (!my_obj->ch[MM_CAMERA_CH_VIDEO].acquired) {
+        ALOGE("Snapshot channel is not in acquired state \n");
+        return;
+    }
+    stream = &my_obj->ch[MM_CAMERA_CH_VIDEO].video.video;
+    idx =  mm_camera_read_msm_frame(my_obj,stream);
+    if (idx < 0)
+        return;
+    ALOGE("Video thread locked");
+    pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_VIDEO].mutex);
+    for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if((my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb) &&
+                (my_obj->poll_threads[MM_CAMERA_CH_VIDEO].data.used == 1)){
+            data[i].type = MM_CAMERA_CH_VIDEO;
+            data[i].video.main.frame = NULL;
+            data[i].video.main.idx = -1;
+            data[i].video.video.idx = idx;
+            data[i].video.video.frame = &my_obj->ch[MM_CAMERA_CH_VIDEO].video.video.
+                frame.frame[idx].frame;
+            my_obj->ch[MM_CAMERA_CH_VIDEO].video.video.frame.ref_count[idx]++;
+            ALOGE("Video thread callback issued");
+            //my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb(&data,
+            //                        my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].user_data);
+            memcpy(&buf_cb[i], &my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i],
+                   sizeof(mm_camera_buf_cb_t) * MM_CAMERA_BUF_CB_MAX);
+            ALOGE("Video thread callback returned");
+            if( my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb_type==MM_CAMERA_BUF_CB_COUNT ) {
+                ALOGE("<DEBUG>:%s: Additional cb called for buffer %p:%d",__func__,stream,idx);
+                if(--(my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb_count) == 0 )
+                    my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb=NULL;
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_VIDEO].mutex);
+
+     for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+        if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_VIDEO].data.used == 1) {
+            buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+        }
+        /*if( buf_cb[i].cb_type==MM_CAMERA_BUF_CB_COUNT ) {
+                ALOGE("<DEBUG>:%s: Additional cb called for buffer %p:%d",__func__,stream,idx);
+                if(--(buf_cb[i].cb_count) == 0 )
+                    buf_cb[i].cb=NULL;
+        }*/
+    }
+
+    ALOGE("Video thread unlocked");
+}
+
+static void mm_camera_read_video_main_frame(mm_camera_obj_t * my_obj)
+{
+    int rc = 0;
+    return;rc;
+}
+
+static void mm_camera_read_zsl_main_frame(mm_camera_obj_t * my_obj)
+{
+    int idx, rc = 0;
+    mm_camera_stream_t *stream;
+    mm_camera_frame_queue_t *q;
+    mm_camera_frame_t *frame;
+    int cnt, watermark;
+
+    q =   &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+    stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+    idx =  mm_camera_read_msm_frame(my_obj,stream);
+    if (idx < 0)
+        return;
+
+    CDBG("%s: Enqueuing frame id: %d", __func__, idx);
+    mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+    cnt = mm_camera_stream_frame_get_q_cnt(q);
+    watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+
+    CDBG("%s: Watermark: %d Queue in a frame: %d", __func__, watermark, cnt);
+    if(watermark < cnt) {
+        /* water overflow, queue head back to kernel */
+        frame = mm_camera_stream_frame_deq(q);
+        if(frame) {
+            rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+            if(rc < 0) {
+                CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                     __func__, frame->idx, rc);
+                return;
+            }
+        }
+    }
+    mm_camera_check_pending_zsl_frames(my_obj, MM_CAMERA_CH_SNAPSHOT);
+}
+
+static void mm_camera_read_zsl_postview_frame(mm_camera_obj_t * my_obj)
+{
+    int idx, rc = 0;
+    mm_camera_stream_t *stream;
+    mm_camera_frame_queue_t *q;
+    mm_camera_frame_t *frame;
+    int cnt, watermark;
+    q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+    stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail;
+    idx =  mm_camera_read_msm_frame(my_obj,stream);
+    if (idx < 0)
+        return;
+    mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+    watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+    cnt = mm_camera_stream_frame_get_q_cnt(q);
+    if(watermark < cnt) {
+        /* water overflow, queue head back to kernel */
+        frame = mm_camera_stream_frame_deq(q);
+        if(frame) {
+            rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+            if(rc < 0) {
+                CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                     __func__, frame->idx, rc);
+                return;
+            }
+        }
+    }
+    mm_camera_check_pending_zsl_frames(my_obj, MM_CAMERA_CH_SNAPSHOT);
+}
+
+void mm_camera_msm_data_notify(mm_camera_obj_t * my_obj, int fd,
+                               mm_camera_stream_type_t stream_type)
+{
+    switch(stream_type) {
+    case MM_CAMERA_STREAM_RAW:
+        mm_camera_read_raw_frame(my_obj);
+        break;
+    case MM_CAMERA_STREAM_PREVIEW:
+        mm_camera_read_preview_frame(my_obj);
+        break;
+    case MM_CAMERA_STREAM_SNAPSHOT:
+        mm_camera_read_snapshot_main_frame(my_obj);
+        break;
+    case MM_CAMERA_STREAM_THUMBNAIL:
+        mm_camera_read_snapshot_thumbnail_frame(my_obj);
+        break;
+    case MM_CAMERA_STREAM_VIDEO:
+        mm_camera_read_video_frame(my_obj);
+        break;
+    case MM_CAMERA_STREAM_VIDEO_MAIN:
+        mm_camera_read_video_main_frame(my_obj);
+        break;
+    default:
+        break;
+    }
+}
+
+static mm_camera_channel_type_t mm_camera_image_mode_to_ch(int image_mode)
+{
+    switch(image_mode) {
+    case MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW:
+        return MM_CAMERA_CH_PREVIEW;
+    case MSM_V4L2_EXT_CAPTURE_MODE_MAIN:
+    case MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL:
+        return MM_CAMERA_CH_SNAPSHOT;
+    case MSM_V4L2_EXT_CAPTURE_MODE_VIDEO:
+        return MM_CAMERA_CH_VIDEO;
+    case MSM_V4L2_EXT_CAPTURE_MODE_RAW:
+        return MM_CAMERA_CH_RAW;
+    default:
+        return MM_CAMERA_CH_MAX;
+    }
+}
+
+void mm_camera_dispatch_app_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event)
+{
+    int i;
+    mm_camera_evt_obj_t evtcb;
+
+    if(event->event_type <  MM_CAMERA_EVT_TYPE_MAX) {
+      pthread_mutex_lock(&my_obj->mutex);
+      memcpy(&evtcb,
+       &my_obj->evt[event->event_type],
+       sizeof(mm_camera_evt_obj_t));
+      pthread_mutex_unlock(&my_obj->mutex);
+      for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+        if(evtcb.evt[i].evt_cb) {
+          evtcb.evt[i].evt_cb(event, evtcb.evt[i].user_data);
+        }
+      }
+    }
+}
+
+void mm_camera_msm_evt_notify(mm_camera_obj_t * my_obj, int fd)
+{
+    struct v4l2_event ev;
+    int rc;
+    mm_camera_event_t *evt = NULL;
+
+    memset(&ev, 0, sizeof(ev));
+    rc = ioctl(fd, VIDIOC_DQEVENT, &ev);
+    evt = (mm_camera_event_t *)ev.u.data;
+
+    if (rc >= 0) {
+        if(ev.type == V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT) {
+            evt->event_type = MM_CAMERA_EVT_TYPE_CTRL;
+            evt->e.ctrl.evt = MM_CAMERA_CTRL_EVT_ERROR;
+        }
+        switch(evt->event_type) {
+        case MM_CAMERA_EVT_TYPE_INFO:
+           break;
+        case MM_CAMERA_EVT_TYPE_STATS:
+           break;
+        case MM_CAMERA_EVT_TYPE_CTRL:
+           break;
+        default:
+            break;
+        }
+        mm_camera_dispatch_app_event(my_obj, evt);
+    }
+}
diff --git a/camera/mm-camera-interface/mm_camera_poll_thread.c b/camera/mm-camera-interface/mm_camera_poll_thread.c
new file mode 100644
index 0000000..8f433d4
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_poll_thread.c
@@ -0,0 +1,448 @@
+/*
+Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+typedef enum {
+    /* ask the channel to flash out the queued frames. */
+    MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME,
+    /* ask ctrl fd to generate ch event to HAL */
+    MM_CAMERA_PIPE_CMD_CH_EVENT,
+    /*start*/
+    MM_CAMERA_PIPE_CMD_ADD_CH,
+
+    /*stop*/
+    MM_CAMERA_PIPE_CMD_DEL_CH,
+
+    /* exit */
+    MM_CAMERA_PIPE_CMD_EXIT,
+    /* max count */
+    MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TASK_STATE_POLL,     /* polling pid in polling state. */
+    MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+    uint8_t cmd;
+    mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    int len;
+    CDBG("%s: begin", __func__);
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+    /* send cmd to worker */
+    len = write(poll_cb->data.pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if(len < 1) {
+      CDBG("%s: len = %d, errno = %d", __func__, len, errno);
+      //pthread_mutex_unlock(&poll_cb->mutex);
+      //return -1;
+    }
+    CDBG("%s: begin IN mutex write done, len = %d", __func__, len);
+    /* wait till worker task gives positive signal */
+    if(FALSE == poll_cb->status) {
+      CDBG("%s: wait", __func__);
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    /* done */
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: end, len = %d, size = %d", __func__, len, sizeof(cmd_evt));
+    return MM_CAMERA_OK;
+}
+
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = TRUE;
+    pthread_cond_signal(&poll_cb->cond_v);
+    CDBG("%s: done, in mutex", __func__);
+    pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+static int32_t mm_camera_poll_proc_msm(mm_camera_poll_thread_t *poll_cb, struct pollfd *fds)
+{
+   int i;
+
+    for(i = 0; i < poll_cb->data.num_fds-1; i++) {
+        /*Checking for data events*/
+        if((poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_CH) &&
+           (fds[i].revents & POLLIN) &&
+           (fds[i].revents & POLLRDNORM)) {
+            if(poll_cb->data.used) {
+                mm_camera_msm_data_notify(poll_cb->data.my_obj,
+                                        fds[i].fd,
+                                        poll_cb->data.poll_streams[i]->stream_type);
+            }
+
+        }
+        /*Checking for ctrl events*/
+        if((poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+           (fds[i].revents & POLLPRI)) {
+          CDBG("%s: mm_camera_msm_evt_notify\n", __func__);
+          mm_camera_msm_evt_notify(poll_cb->data.my_obj, fds[i].fd);
+        }
+
+    }
+    return 0;
+}
+
+static void cm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+                                     mm_camera_poll_task_state_type_t state)
+{
+    poll_cb->data.state = state;
+}
+
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+    ssize_t read_len;
+    int i;
+    mm_camera_sig_evt_t cmd_evt;
+    read_len = read(poll_cb->data.pfds[0], &cmd_evt, sizeof(cmd_evt));
+    CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d",
+         __func__, poll_cb->data.pfds[0], (int)read_len, (int)sizeof(cmd_evt));
+    switch(cmd_evt.cmd) {
+    case MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME:
+      mm_camera_dispatch_buffered_frames(poll_cb->data.my_obj,
+                                         poll_cb->data.ch_type);
+      break;
+    case MM_CAMERA_PIPE_CMD_CH_EVENT: {
+      mm_camera_event_t *event = &cmd_evt.event;
+      CDBG("%s: ch event, type=0x%x, ch=%d, evt=%d",
+           __func__, event->event_type, event->e.ch.ch, event->e.ch.evt);
+      mm_camera_dispatch_app_event(poll_cb->data.my_obj, event);
+      break;
+    }
+    case MM_CAMERA_PIPE_CMD_ADD_CH:
+        if(poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_CH) {
+            for(i = 0; i < MM_CAMERA_CH_STREAM_MAX; i++) {
+                if(poll_cb->data.poll_streams[i]) {
+                    poll_cb->data.poll_fd[poll_cb->data.num_fds + i] = poll_cb->data.poll_streams[i]->fd;
+                }
+            }
+        }
+        poll_cb->data.num_fds += mm_camera_ch_util_get_num_stream(poll_cb->data.my_obj,
+                                                                      poll_cb->data.ch_type);
+        poll_cb->data.used = 1;
+        CDBG("Num fds after MM_CAMERA_PIPE_CMD_ADD_CH = %d",poll_cb->data.num_fds);
+        break;
+
+    case MM_CAMERA_PIPE_CMD_DEL_CH:
+        poll_cb->data.num_fds -= mm_camera_ch_util_get_num_stream(poll_cb->data.my_obj,
+                                                                  poll_cb->data.ch_type);
+        poll_cb->data.used = 0;
+        CDBG("Num fds after MM_CAMERA_PIPE_CMD_DEL_CH = %d",poll_cb->data.num_fds);
+        break;
+
+    case MM_CAMERA_PIPE_CMD_EXIT:
+    default:
+        cm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_MAX);
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    }
+}
+
+static int mm_camera_poll_ch_busy(mm_camera_obj_t * my_obj, int ch_type)
+{
+    int i;
+    int used = 0;
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    pthread_mutex_lock(&poll_cb->mutex);
+    used = poll_cb->data.used;
+    pthread_mutex_unlock(&poll_cb->mutex);
+    if(used)
+        return 1;
+    else
+        return 0;
+}
+int32_t mm_camera_poll_dispatch_buffered_frames(mm_camera_obj_t * my_obj, int ch_type)
+{
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    mm_camera_sig_evt_t cmd;
+    int len;
+
+    cmd.cmd = MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME;
+    memset(&cmd.event, 0, sizeof(cmd.event));
+    pthread_mutex_lock(&poll_cb->mutex);
+    len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+    pthread_mutex_unlock(&poll_cb->mutex);
+    return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_busy(mm_camera_obj_t * my_obj)
+{
+    int i;
+    mm_camera_poll_thread_t *poll_cb;
+    for(i = 0; i < (MM_CAMERA_POLL_THRAED_MAX - 1); i++) {
+        if(mm_camera_poll_ch_busy(my_obj,  i) > 0)
+          return 1;
+    }
+    return 0;
+}
+
+int mm_camera_poll_send_ch_event(mm_camera_obj_t * my_obj, mm_camera_event_t *event)
+{
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[MM_CAMERA_CH_MAX];
+    mm_camera_sig_evt_t cmd;
+    int len;
+
+    cmd.cmd = MM_CAMERA_PIPE_CMD_CH_EVENT;
+    memcpy(&cmd.event, event, sizeof(cmd.event));
+    CDBG("%s: ch event, type=0x%x, ch=%d, evt=%d, poll_type = %d, read_fd=%d, write_fd=%d",
+        __func__, event->event_type, event->e.ch.ch, event->e.ch.evt, poll_cb->data.poll_type,
+        poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+    pthread_mutex_lock(&poll_cb->mutex);
+    len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+    pthread_mutex_unlock(&poll_cb->mutex);
+    return MM_CAMERA_OK;
+}
+
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+    int rc = 0, i;
+    struct pollfd fds[MM_CAMERA_CH_STREAM_MAX+1];
+    int timeoutms;
+    CDBG("%s: poll type = %d, num_fd = %d\n",
+         __func__, poll_cb->data.poll_type, poll_cb->data.num_fds);
+    do {
+        for(i = 0; i < poll_cb->data.num_fds; i++) {
+            fds[i].fd = poll_cb->data.poll_fd[i];
+            fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+        }
+        timeoutms = poll_cb->data.timeoutms;
+        rc = poll(fds, poll_cb->data.num_fds, timeoutms);
+        if(rc > 0) {
+            if((fds[0].revents & POLLIN) && (fds[0].revents & POLLRDNORM))
+                mm_camera_poll_proc_pipe(poll_cb);
+            else
+                mm_camera_poll_proc_msm(poll_cb, &fds[1]);
+        } else {
+            /* in error case sleep 10 us and then continue. hard coded here */
+            usleep(10);
+            continue;
+        }
+    } while (poll_cb->data.state == MM_CAMERA_POLL_TASK_STATE_POLL);
+    return NULL;
+}
+
+static void *mm_camera_poll_thread(void *data)
+{
+    int rc = 0;
+    int i;
+    void *ret = NULL;
+    mm_camera_poll_thread_t *poll_cb = data;
+
+    poll_cb->data.poll_fd[poll_cb->data.num_fds++] = poll_cb->data.pfds[0];
+    switch(poll_cb->data.poll_type) {
+    case MM_CAMERA_POLL_TYPE_EVT:
+        poll_cb->data.poll_fd[poll_cb->data.num_fds++] =
+          ((mm_camera_obj_t *)(poll_cb->data.my_obj))->ctrl_fd;
+        break;
+    case MM_CAMERA_POLL_TYPE_CH:
+    default:
+        break;
+    }
+    mm_camera_poll_sig_done(poll_cb);
+    ret = mm_camera_poll_fn(poll_cb);
+    return ret;
+}
+
+int mm_camera_poll_start(mm_camera_obj_t * my_obj,  mm_camera_poll_thread_t *poll_cb)
+{
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = 0;
+    pthread_create(&poll_cb->data.pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+    if(!poll_cb->status) {
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    pthread_mutex_unlock(&poll_cb->mutex);
+    return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_stop(mm_camera_obj_t * my_obj, mm_camera_poll_thread_t *poll_cb)
+{
+    CDBG("%s, my_obj=0x%x\n", __func__, (uint32_t)my_obj);
+    mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+    if (pthread_join(poll_cb->data.pid, NULL) != 0) {
+        CDBG("%s: pthread dead already\n", __func__);
+    }
+    return MM_CAMERA_OK;
+}
+
+
+int mm_camera_poll_thread_add_ch(mm_camera_obj_t * my_obj, int ch_type)
+{
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    mm_camera_sig_evt_t cmd;
+    int len;
+
+    if(poll_cb->data.used == 1){
+        CDBG_ERROR("%s : Thread is Active",__func__);
+        return MM_CAMERA_OK;
+    }
+    CDBG("Run thread for ch_type = %d ",ch_type);
+    cmd.cmd = MM_CAMERA_PIPE_CMD_ADD_CH;
+    poll_cb->data.ch_type = ch_type;
+
+    pthread_mutex_lock(&poll_cb->mutex);
+    len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+    pthread_mutex_unlock(&poll_cb->mutex);
+    poll_cb->data.used = 1;
+    return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_thread_del_ch(mm_camera_obj_t * my_obj, int ch_type)
+{
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    mm_camera_sig_evt_t cmd;
+    int len;
+
+    if(poll_cb->data.used == 0){
+        CDBG_ERROR("%s : Thread is Not Active",__func__);
+        return MM_CAMERA_OK;
+    }
+    CDBG("Stop thread for ch_type = %d ",ch_type);
+    cmd.cmd = MM_CAMERA_PIPE_CMD_DEL_CH;
+    poll_cb->data.ch_type = (mm_camera_channel_type_t)ch_type;
+
+    pthread_mutex_lock(&poll_cb->mutex);
+    len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+    pthread_mutex_unlock(&poll_cb->mutex);
+    poll_cb->data.used = 0;
+    return MM_CAMERA_OK;
+
+}
+
+
+int mm_camera_poll_thread_launch(mm_camera_obj_t * my_obj, int ch_type)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    if(mm_camera_poll_ch_busy(my_obj, ch_type) > 0) {
+        CDBG_ERROR("%s: err, poll thread of channel %d already running. cam_id=%d\n",
+             __func__, ch_type, my_obj->my_id);
+        return -MM_CAMERA_E_INVALID_OPERATION;
+    }
+    poll_cb->data.ch_type = ch_type;
+    rc = pipe(poll_cb->data.pfds);
+    if(rc < 0) {
+        CDBG_ERROR("%s: camera_id = %d, pipe open rc=%d\n", __func__, my_obj->my_id, rc);
+        rc = - MM_CAMERA_E_GENERAL;
+    }
+    CDBG("%s: ch = %d, poll_type = %d, read fd = %d, write fd = %d",
+        __func__, ch_type, poll_cb->data.poll_type,
+        poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+    poll_cb->data.my_obj = my_obj;
+    poll_cb->data.used = 0;
+    poll_cb->data.timeoutms = -1;  /* Infinite seconds */
+
+    if(ch_type < MM_CAMERA_CH_MAX) {
+        poll_cb->data.poll_type = MM_CAMERA_POLL_TYPE_CH;
+        mm_camera_ch_util_get_stream_objs(my_obj, ch_type,
+                                      &poll_cb->data.poll_streams[0],
+                                      &poll_cb->data.poll_streams[1]);
+    } else{
+        poll_cb->data.poll_type = MM_CAMERA_POLL_TYPE_EVT;
+    }
+
+    ALOGE("%s: ch_type = %d, poll_type = %d, read fd = %d, write fd = %d",
+         __func__, ch_type, poll_cb->data.poll_type,
+         poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+    /* launch the thread */
+    rc = mm_camera_poll_start(my_obj, poll_cb);
+    return rc;
+}
+
+int mm_camera_poll_thread_release(mm_camera_obj_t * my_obj, int ch_type)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+    if(MM_CAMERA_POLL_TASK_STATE_MAX == poll_cb->data.state) {
+        CDBG("%s: err, poll thread of channel % is not running. cam_id=%d\n",
+             __func__, ch_type, my_obj->my_id);
+        return -MM_CAMERA_E_INVALID_OPERATION;
+    }
+    rc = mm_camera_poll_stop(my_obj, poll_cb);
+
+    if(poll_cb->data.pfds[0]) {
+        close(poll_cb->data.pfds[0]);
+    }
+    if(poll_cb->data.pfds[1]) {
+        close(poll_cb->data.pfds[1]);
+    }
+    memset(&poll_cb->data, 0, sizeof(poll_cb->data));
+    return MM_CAMERA_OK;
+}
+
+void mm_camera_poll_threads_init(mm_camera_obj_t * my_obj)
+{
+    int i;
+    mm_camera_poll_thread_t *poll_cb;
+
+    for(i = 0; i < MM_CAMERA_POLL_THRAED_MAX; i++) {
+        poll_cb = &my_obj->poll_threads[i];
+        pthread_mutex_init(&poll_cb->mutex, NULL);
+        pthread_cond_init(&poll_cb->cond_v, NULL);
+    }
+}
+
+void mm_camera_poll_threads_deinit(mm_camera_obj_t * my_obj)
+{
+    int i;
+    mm_camera_poll_thread_t *poll_cb;
+
+    for(i = 0; i < MM_CAMERA_POLL_THRAED_MAX; i++) {
+        poll_cb = &my_obj->poll_threads[i];
+        if(poll_cb->data.used)
+            mm_camera_poll_stop(my_obj, poll_cb);
+        pthread_mutex_destroy(&poll_cb->mutex);
+        pthread_cond_destroy(&poll_cb->cond_v);
+        memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+    }
+}
diff --git a/camera/mm-camera-interface/mm_camera_sock.c b/camera/mm-camera-interface/mm_camera_sock.c
new file mode 100644
index 0000000..6d85a66
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_sock.c
@@ -0,0 +1,224 @@
+/*
+Copyright (c) 2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/uio.h>
+#include <linux/un.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_socket_create -
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ *                        int cam_id: camera ID
+  *                       mm_camera_sock_type_t sock_type: socket type, TCP/UDP
+ * retured fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+    int socket_fd;
+    struct sockaddr_un sock_addr;
+    int sktype;
+    int rc;
+
+    switch (sock_type)
+    {
+      case MM_CAMERA_SOCK_TYPE_UDP:
+        sktype = SOCK_DGRAM;
+        break;
+      case MM_CAMERA_SOCK_TYPE_TCP:
+        sktype = SOCK_STREAM;
+        break;
+      default:
+        CDBG_ERROR("%s: unknown socket type =%d", __func__, sock_type);
+        return -1;
+    }
+    socket_fd = socket(AF_UNIX, sktype, 0);
+    if (socket_fd < 0) {
+        CDBG_ERROR("%s: error create socket fd =%d", __func__, socket_fd);
+        return socket_fd;
+    }
+
+    memset(&sock_addr, 0, sizeof(sock_addr));
+    sock_addr.sun_family = AF_UNIX;
+    snprintf(sock_addr.sun_path, UNIX_PATH_MAX, "/data/cam_socket%d", cam_id);
+    if((rc = connect(socket_fd, (struct sockaddr *) &sock_addr,
+      sizeof(sock_addr))) != 0) {
+      close(socket_fd);
+      socket_fd = -1;
+      CDBG_ERROR("%s: socket_fd=%d %s ", __func__, socket_fd, strerror(errno));
+    }
+
+    CDBG("%s: socket_fd=%d %s", __func__, socket_fd, sock_addr.sun_path);
+    return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_socket_close -
+ *
+ * DESCRIPTION:  close domain socket by its fd
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+    if (fd > 0) {
+      close(fd);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_socket_sendmsg -
+ *
+ * DESCRIPTION:  send msg through domain socket
+ *                         int fd: socket fd
+ *                         mm_camera_sock_msg_packet_t *msg: pointer to msg to be sent over domain socket
+ *                         int sendfd: file descriptors to be sent
+ * return the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int sendfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr * cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+
+    if (msg == NULL) {
+      CDBG("%s: msg is NULL", __func__);
+      return -1;
+    }
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+    CDBG("%s: iov_len=%d", __func__, iov[0].iov_len);
+
+    msgh.msg_control = NULL;
+    msgh.msg_controllen = 0;
+
+    // if sendfd is vlaid, we need to pass it through control msg
+    if( sendfd > 0) {
+      msgh.msg_control = control;
+      msgh.msg_controllen = sizeof(control);
+      cmsghp = CMSG_FIRSTHDR(&msgh);
+      if (cmsghp != NULL) {
+        CDBG("%s: Got ctrl msg pointer", __func__);
+        cmsghp->cmsg_level = SOL_SOCKET;
+        cmsghp->cmsg_type = SCM_RIGHTS;
+        cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+        *((int *)CMSG_DATA(cmsghp)) = sendfd;
+        CDBG("%s: cmsg data=%d", __func__, *((int *) CMSG_DATA(cmsghp)));
+      } else {
+        CDBG("%s: ctrl msg NULL", __func__);
+        return -1;
+      }
+    }
+
+    return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION    - mm_camera_socket_recvmsg -
+ *
+ * DESCRIPTION:  receive msg from domain socket.
+ *                         int fd: socket fd
+ *                         void *msg: pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ *                                    need be allocated by the caller
+ *                         uint32_t buf_size: the size of the buf that holds incoming msg
+ *                         int *rcvdfd: pointer to hold recvd file descriptor if not NULL.
+ * return the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr *cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+    int rcvd_fd = -1;
+    int rcvd_len = 0;
+
+    if ( (msg == NULL) || (buf_size <= 0) ) {
+      CDBG_ERROR(" %s: msg buf is NULL", __func__);
+      return -1;
+    }
+
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+    msgh.msg_control = control;
+    msgh.msg_controllen = sizeof(control);
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+
+    if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+      CDBG_ERROR(" %s: recvmsg failed", __func__);
+      return rcvd_len;
+    }
+
+    CDBG("%s:  msg_ctrl %p len %d", __func__, msgh.msg_control, msgh.msg_controllen);
+
+    if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+		    (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+      if (cmsghp->cmsg_level == SOL_SOCKET &&
+        cmsghp->cmsg_type == SCM_RIGHTS) {
+        CDBG("%s:  CtrlMsg is valid", __func__);
+        rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+        CDBG("%s:  Receieved fd=%d", __func__, rcvd_fd);
+      } else {
+        CDBG_ERROR("%s:  Unexpected Control Msg. Line=%d", __func__, __LINE__);
+      }
+    }
+
+    if (rcvdfd) {
+      *rcvdfd = rcvd_fd;
+    }
+
+    return rcvd_len;
+}
+
diff --git a/camera/mm-camera-interface/mm_camera_sock.h b/camera/mm-camera-interface/mm_camera_sock.h
new file mode 100644
index 0000000..9519754
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_sock.h
@@ -0,0 +1,57 @@
+/*
+Copyright (c) 2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+#include <inttypes.h>
+
+typedef enum {
+    MM_CAMERA_SOCK_TYPE_UDP,
+    MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int sendfd);
+
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/camera/mm-camera-interface/mm_camera_stream.c b/camera/mm-camera-interface/mm_camera_stream.c
new file mode 100644
index 0000000..4b12757
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_stream.c
@@ -0,0 +1,887 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <time.h>
+
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+static void mm_camera_stream_util_set_state(mm_camera_stream_t *stream,
+                         mm_camera_stream_state_type_t state);
+
+int mm_camera_stream_init_q(mm_camera_frame_queue_t *q)
+{
+    pthread_mutex_init(&q->mutex, NULL);
+    return MM_CAMERA_OK;
+}
+int mm_camera_stream_deinit_q(mm_camera_frame_queue_t *q)
+{
+    pthread_mutex_destroy(&q->mutex);
+    return MM_CAMERA_OK;
+}
+
+int mm_camera_stream_frame_get_q_cnt(mm_camera_frame_queue_t *q)
+{
+    int cnt;
+    pthread_mutex_lock(&q->mutex);
+    cnt = q->cnt;
+    pthread_mutex_unlock(&q->mutex);
+    return cnt;
+}
+
+mm_camera_frame_t *mm_camera_stream_frame_deq_no_lock(mm_camera_frame_queue_t *q)
+{
+    mm_camera_frame_t *tmp;
+
+    tmp = q->head;
+
+    if(tmp == NULL) goto end;
+    if(q->head == q->tail) {
+        q->head = NULL;
+        q->tail = NULL;
+    } else {
+        q->head = tmp->next;
+    }
+    tmp->next = NULL;
+    q->cnt--;
+end:
+    return tmp;
+}
+
+void mm_camera_stream_frame_enq_no_lock(mm_camera_frame_queue_t *q, mm_camera_frame_t *node)
+{
+    node->next = NULL;
+    if(q->head == NULL) {
+        q->head = node;
+        q->tail = node;
+    } else {
+        q->tail->next = node;
+        q->tail = node;
+    }
+    q->cnt++;
+}
+
+mm_camera_frame_t *mm_camera_stream_frame_deq(mm_camera_frame_queue_t *q)
+{
+    mm_camera_frame_t *tmp;
+
+    pthread_mutex_lock(&q->mutex);
+    tmp = q->head;
+
+    if(tmp == NULL) goto end;
+    if(q->head == q->tail) {
+        q->head = NULL;
+        q->tail = NULL;
+    } else {
+        q->head = tmp->next;
+    }
+    tmp->next = NULL;
+    q->cnt--;
+end:
+    pthread_mutex_unlock(&q->mutex);
+    return tmp;
+}
+
+void mm_camera_stream_frame_enq(mm_camera_frame_queue_t *q, mm_camera_frame_t *node)
+{
+    pthread_mutex_lock(&q->mutex);
+    node->next = NULL;
+    if(q->head == NULL) {
+        q->head = node;
+        q->tail = node;
+    } else {
+        q->tail->next = node;
+        q->tail = node;
+    }
+    q->cnt++;
+    pthread_mutex_unlock(&q->mutex);
+}
+
+void mm_stream_frame_flash_q(mm_camera_frame_queue_t *q)
+{
+    pthread_mutex_lock(&q->mutex);
+    q->cnt = 0;
+    q->match_cnt = 0;
+    q->head = NULL;
+    q->tail = NULL;
+    pthread_mutex_unlock(&q->mutex);
+}
+
+void mm_camera_stream_frame_refill_q(mm_camera_frame_queue_t *q, mm_camera_frame_t *node, int num)
+{
+    int i;
+
+    mm_stream_frame_flash_q(q);
+    for(i = 0; i < num; i++)
+        mm_camera_stream_frame_enq(q, &node[i]);
+    CDBG("%s: q=0x%x, num = %d, q->cnt=%d\n",
+             __func__,(uint32_t)q,num, mm_camera_stream_frame_get_q_cnt(q));
+}
+
+void mm_camera_stream_deinit_frame(mm_camera_stream_frame_t *frame)
+{
+    pthread_mutex_destroy(&frame->mutex);
+    mm_camera_stream_deinit_q(&frame->readyq);
+    memset(frame, 0, sizeof(mm_camera_stream_frame_t));
+}
+
+void mm_camera_stream_init_frame(mm_camera_stream_frame_t *frame)
+{
+    memset(frame, 0, sizeof(mm_camera_stream_frame_t));
+    pthread_mutex_init(&frame->mutex, NULL);
+    mm_camera_stream_init_q(&frame->readyq);
+}
+
+void mm_camera_stream_release(mm_camera_stream_t *stream)
+{
+    mm_camera_stream_deinit_frame(&stream->frame);
+    if(stream->fd > 0) close(stream->fd);
+    memset(stream, 0, sizeof(*stream));
+    //stream->fd = -1;
+    mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_NOTUSED);
+}
+
+int mm_camera_stream_is_active(mm_camera_stream_t *stream)
+{
+    return (stream->state == MM_CAMERA_STREAM_STATE_ACTIVE)? TRUE : FALSE;
+}
+
+static void mm_camera_stream_util_set_state(mm_camera_stream_t *stream,
+                                 mm_camera_stream_state_type_t state)
+{
+    CDBG("%s:stream fd=%d, stream type=%d, cur_state=%d,new_state=%d\n",
+      __func__, stream->fd, stream->stream_type, stream->state, state);
+    stream->state = state;
+}
+
+int mm_camera_read_msm_frame(mm_camera_obj_t * my_obj,
+                        mm_camera_stream_t *stream)
+{
+    int idx = -1, rc = MM_CAMERA_OK;
+    uint32_t i = 0;
+    struct v4l2_buffer vb;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+
+    memset(&vb,  0,  sizeof(vb));
+    vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    vb.memory = V4L2_MEMORY_USERPTR;
+    vb.m.planes = &planes[0];
+    vb.length = stream->fmt.fmt.pix_mp.num_planes;
+
+    CDBG("%s: VIDIOC_DQBUF ioctl call\n", __func__);
+    rc = ioctl(stream->fd, VIDIOC_DQBUF, &vb);
+    if (rc < 0)
+        return idx;
+    idx = vb.index;
+    for(i = 0; i < vb.length; i++) {
+        CDBG("%s plane %d addr offset: %d data offset:%d\n",
+             __func__, i, vb.m.planes[i].reserved[0],
+             vb.m.planes[i].data_offset);
+        stream->frame.frame[idx].planes[i].reserved[0] =
+            vb.m.planes[i].reserved[0];
+        stream->frame.frame[idx].planes[i].data_offset =
+            vb.m.planes[i].data_offset;
+    }
+
+    stream->frame.frame[idx].frame.frame_id = vb.sequence;
+    stream->frame.frame[idx].frame.ts.tv_sec  = vb.timestamp.tv_sec;
+    stream->frame.frame[idx].frame.ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+    return idx;
+}
+
+static int mm_camera_stream_util_proc_get_crop(mm_camera_obj_t *my_obj,
+                            mm_camera_stream_t *stream,
+                            mm_camera_rect_t *val)
+{
+  struct v4l2_crop crop;
+  int rc = MM_CAMERA_OK;
+  memset(&crop, 0, sizeof(crop));
+  crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+  rc = ioctl(stream->fd, VIDIOC_G_CROP, &crop);
+  if (rc < 0)
+      return rc;
+  val->left = crop.c.left;
+  val->top = crop.c.top;
+  val->width = crop.c.width;
+  val->height = crop.c.height;
+  return rc;
+}
+
+int32_t mm_camera_util_s_ctrl( int32_t fd,  uint32_t id, int32_t value)
+{
+    int rc = MM_CAMERA_OK;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    control.value = value;
+    rc = ioctl (fd, VIDIOC_S_CTRL, &control);
+
+    if(rc) {
+        CDBG("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %ld\n",
+                 __func__, fd, id, (uint32_t)value, rc);
+        rc = MM_CAMERA_E_GENERAL;
+    }
+    return rc;
+}
+
+int32_t mm_camera_util_private_s_ctrl(int32_t fd,  uint32_t id, int32_t value)
+{
+    int rc = MM_CAMERA_OK;
+    struct msm_camera_v4l2_ioctl_t v4l2_ioctl;
+
+    memset(&v4l2_ioctl, 0, sizeof(v4l2_ioctl));
+    v4l2_ioctl.id = id;
+    v4l2_ioctl.ioctl_ptr = value;
+    rc = ioctl (fd, MSM_CAM_V4L2_IOCTL_PRIVATE_S_CTRL, &v4l2_ioctl);
+
+    if(rc) {
+        CDBG_ERROR("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %ld\n",
+                 __func__, fd, id, (uint32_t)value, rc);
+        rc = MM_CAMERA_E_GENERAL;
+    }
+    return rc;
+}
+
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+    int rc = MM_CAMERA_OK;
+  struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    control.value = (int32_t)value;
+    rc = ioctl (fd, VIDIOC_G_CTRL, &control);
+    if(rc) {
+        CDBG("%s: fd=%d, G_CTRL, id=0x%x, rc = %d\n", __func__, fd, id, rc);
+        rc = MM_CAMERA_E_GENERAL;
+    }
+    *value = control.value;
+    return rc;
+}
+
+static uint32_t mm_camera_util_get_v4l2_fmt(cam_format_t fmt,
+                                            uint8_t *num_planes)
+{
+    uint32_t val;
+    switch(fmt) {
+    case CAMERA_YUV_420_NV12:
+        val = V4L2_PIX_FMT_NV12;
+        *num_planes = 2;
+        break;
+    case CAMERA_YUV_420_NV21:
+        val = V4L2_PIX_FMT_NV21;
+        *num_planes = 2;
+        break;
+    case CAMERA_BAYER_SBGGR10:
+        val= V4L2_PIX_FMT_SBGGR10;
+        *num_planes = 1;
+        break;
+    case CAMERA_YUV_422_NV61:
+        val= V4L2_PIX_FMT_NV61;
+        *num_planes = 2;
+        break;
+    default:
+        val = 0;
+        *num_planes = 0;
+        break;
+    }
+    return val;
+}
+
+static int mm_camera_stream_util_set_ext_mode(mm_camera_stream_t *stream)
+{
+    int rc = 0;
+    struct v4l2_streamparm s_parm;
+    s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+        switch(stream->stream_type) {
+        case MM_CAMERA_STREAM_PREVIEW:
+            s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW;
+            break;
+        case MM_CAMERA_STREAM_SNAPSHOT:
+            s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_MAIN;
+            break;
+        case MM_CAMERA_STREAM_THUMBNAIL:
+            s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL;
+            break;
+        case MM_CAMERA_STREAM_VIDEO:
+            s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_VIDEO;
+            break;
+        case MM_CAMERA_STREAM_RAW:
+                s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_MAIN; //MSM_V4L2_EXT_CAPTURE_MODE_RAW;
+                break;
+        case MM_CAMERA_STREAM_VIDEO_MAIN:
+        default:
+            return 0;
+        }
+
+    rc = ioctl(stream->fd, VIDIOC_S_PARM, &s_parm);
+        CDBG("%s:stream fd=%d,type=%d,rc=%d,extended_mode=%d\n",
+                 __func__, stream->fd, stream->stream_type, rc,
+                 s_parm.parm.capture.extendedmode);
+    return rc;
+}
+
+static int mm_camera_util_set_op_mode(int fd, int opmode)
+{
+    int rc = 0;
+    struct v4l2_control s_ctrl;
+    s_ctrl.id = MSM_V4L2_PID_CAM_MODE;
+    s_ctrl.value = opmode;
+
+        rc = ioctl(fd, VIDIOC_S_CTRL, &s_ctrl);
+    if (rc < 0)
+        CDBG("%s: VIDIOC_S_CTRL failed, rc=%d\n",
+                         __func__, rc);
+    return rc;
+}
+
+int mm_camera_stream_qbuf(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream,
+  int idx)
+{
+  int32_t i, rc = MM_CAMERA_OK;
+  int *ret;
+  struct v4l2_buffer buffer;
+
+  memset(&buffer, 0, sizeof(buffer));
+  buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+  buffer.memory = V4L2_MEMORY_USERPTR;
+  buffer.index = idx;
+  buffer.m.planes = &(stream->frame.frame[idx].planes[0]);
+  buffer.length = stream->frame.frame[idx].num_planes;
+
+  CDBG("%s Ref : PREVIEW=%d VIDEO=%d SNAPSHOT=%d THUMB=%d ", __func__,
+    MM_CAMERA_STREAM_PREVIEW, MM_CAMERA_STREAM_VIDEO,
+    MM_CAMERA_STREAM_SNAPSHOT, MM_CAMERA_STREAM_THUMBNAIL);
+  CDBG("%s:fd=%d,type=%d,frame idx=%d,num planes %d\n", __func__,
+    stream->fd, stream->stream_type, idx, buffer.length);
+
+  rc = ioctl(stream->fd, VIDIOC_QBUF, &buffer);
+  if (rc < 0) {
+      CDBG_ERROR("%s: VIDIOC_QBUF error = %d, stream type=%d\n", __func__, rc, stream->stream_type);
+      return rc;
+  }
+  CDBG("%s: X idx: %d, stream_type:%d", __func__, idx, stream->stream_type);
+  return rc;
+}
+
+/* This function let kernel know amount of buffers will be registered */
+static int mm_camera_stream_util_request_buf(mm_camera_obj_t * my_obj,
+                      mm_camera_stream_t *stream,
+                      int8_t buf_num)
+{
+    int32_t rc = MM_CAMERA_OK;
+    struct v4l2_requestbuffers bufreq;
+
+    if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+        rc = -MM_CAMERA_E_GENERAL;
+        CDBG("%s: buf num %d > max limit %d\n",
+                 __func__, buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+        goto end;
+    }
+
+    memset(&bufreq, 0, sizeof(bufreq));
+    bufreq.count = buf_num;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+      CDBG("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+        __func__, stream->fd, rc);
+      goto end;
+    }
+    ALOGE("%s: stream fd=%d, ioctl VIDIOC_REQBUFS: memtype = %d, num_frames = %d, rc=%d\n",
+        __func__, stream->fd, bufreq.memory, bufreq.count, rc);
+
+end:
+    return rc;
+}
+
+/* This function enqueue existing buffers (not first time allocated buffers from Surface) to kernel */
+static int mm_camera_stream_util_enqueue_buf(mm_camera_obj_t * my_obj,
+                      mm_camera_stream_t *stream,
+                      mm_camera_buf_def_t *vbuf)
+{
+    int32_t i, rc = MM_CAMERA_OK, j;
+
+    if(vbuf->num > MM_CAMERA_MAX_NUM_FRAMES) {
+        rc = -MM_CAMERA_E_GENERAL;
+        CDBG("%s: buf num %d > max limit %d\n",
+                 __func__, vbuf->num, MM_CAMERA_MAX_NUM_FRAMES);
+        goto end;
+    }
+
+    for(i = 0; i < vbuf->num; i++){
+        int idx = vbuf->buf.mp[i].idx;
+        ALOGE("%s: enqueue buf index = %d\n",__func__, idx);
+        if(idx < MM_CAMERA_MAX_NUM_FRAMES) {
+            ALOGE("%s: stream_fd = %d, frame_fd = %d, frame ID = %d, offset = %d\n",
+                     __func__, stream->fd, stream->frame.frame[i].frame.fd,
+                     idx, stream->frame.frame_offset[idx]);
+            rc = mm_camera_stream_qbuf(my_obj, stream, stream->frame.frame[idx].idx);
+            if (rc < 0) {
+                CDBG("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+                goto end;
+            }
+            stream->frame.ref_count[idx] = 0;
+        }
+    }
+    stream->frame.qbuf = 1;
+end:
+    return rc;
+}
+
+static int mm_camera_stream_util_reg_buf(mm_camera_obj_t * my_obj,
+                      mm_camera_stream_t *stream,
+                      mm_camera_buf_def_t *vbuf)
+{
+    int32_t i, rc = MM_CAMERA_OK, j;
+    int *ret;
+    struct v4l2_requestbuffers bufreq;
+    int image_type;
+    uint8_t num_planes;
+    uint32_t planes[VIDEO_MAX_PLANES];
+
+    if(vbuf->num > MM_CAMERA_MAX_NUM_FRAMES) {
+        rc = -MM_CAMERA_E_GENERAL;
+        CDBG_ERROR("%s: buf num %d > max limit %d\n",
+                 __func__, vbuf->num, MM_CAMERA_MAX_NUM_FRAMES);
+        goto end;
+    }
+    switch(stream->stream_type) {
+    case MM_CAMERA_STREAM_PREVIEW:
+      image_type = OUTPUT_TYPE_P;
+      break;
+    case MM_CAMERA_STREAM_SNAPSHOT:
+    case MM_CAMERA_STREAM_RAW:
+      image_type = OUTPUT_TYPE_S;
+      break;
+    case MM_CAMERA_STREAM_THUMBNAIL:
+      image_type = OUTPUT_TYPE_T;
+      break;
+    case MM_CAMERA_STREAM_VIDEO:
+    default:
+      image_type = OUTPUT_TYPE_V;
+      break;
+    }
+    stream->frame.frame_len = mm_camera_get_msm_frame_len(stream->cam_fmt,
+                              my_obj->current_mode,
+                              stream->fmt.fmt.pix.width,
+                              stream->fmt.fmt.pix.height,
+                              image_type, &num_planes, planes);
+    if(stream->frame.frame_len == 0) {
+        CDBG_ERROR("%s:incorrect frame size = %d\n", __func__, stream->frame.frame_len);
+        rc = -1;
+        goto end;
+    }
+    stream->frame.num_frame = vbuf->num;
+    bufreq.count = stream->frame.num_frame;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    CDBG("%s: calling VIDIOC_REQBUFS - fd=%d, num_buf=%d, type=%d, memory=%d\n",
+             __func__,stream->fd, bufreq.count, bufreq.type, bufreq.memory);
+    rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+      CDBG_ERROR("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+        __func__, stream->fd, rc);
+      goto end;
+    }
+    CDBG("%s: stream fd=%d, ioctl VIDIOC_REQBUFS: memtype = %d,"
+      "num_frames = %d, rc=%d\n", __func__, stream->fd, bufreq.memory,
+      bufreq.count, rc);
+
+    for(i = 0; i < vbuf->num; i++){
+        vbuf->buf.mp[i].idx = i; /* remember the index to stream frame if first time qbuf */
+        memcpy(&stream->frame.frame[i].frame, &(vbuf->buf.mp[i].frame),
+                     sizeof(vbuf->buf.mp[i].frame));
+        stream->frame.frame[i].idx = i;
+        stream->frame.frame[i].num_planes = vbuf->buf.mp[i].num_planes;
+        for(j = 0; j < vbuf->buf.mp[i].num_planes; j++) {
+            stream->frame.frame[i].planes[j] = vbuf->buf.mp[i].planes[j];
+        }
+
+        if(vbuf->buf.mp[i].frame_offset) {
+            stream->frame.frame_offset[i] = vbuf->buf.mp[i].frame_offset;
+        } else {
+            stream->frame.frame_offset[i] = 0;
+        }
+
+        rc = mm_camera_stream_qbuf(my_obj, stream, stream->frame.frame[i].idx);
+        if (rc < 0) {
+            CDBG_ERROR("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+            goto end;
+        }
+        stream->frame.ref_count[i] = 0;
+        CDBG("%s: stream_fd = %d, frame_fd = %d, frame ID = %d, offset = %d\n",
+          __func__, stream->fd, stream->frame.frame[i].frame.fd,
+          i, stream->frame.frame_offset[i]);
+    }
+    stream->frame.qbuf = 1;
+end:
+    return rc;
+}
+static int mm_camera_stream_util_unreg_buf(mm_camera_obj_t * my_obj,
+                          mm_camera_stream_t *stream)
+{
+    struct v4l2_requestbuffers bufreq;
+    int32_t i, rc = MM_CAMERA_OK;
+
+    bufreq.count = 0;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+        CDBG_ERROR("%s: fd=%d, VIDIOC_REQBUFS failed, rc=%d\n",
+              __func__, stream->fd, rc);
+        return rc;
+    }
+    mm_stream_frame_flash_q(&stream->frame.readyq);
+    memset(stream->frame.ref_count,0,(stream->frame.num_frame * sizeof(int8_t)));
+    stream->frame.qbuf = 0;
+    CDBG("%s:fd=%d,type=%d,rc=%d\n", __func__, stream->fd,
+      stream->stream_type, rc);
+    return rc;
+}
+
+static int32_t mm_camera_stream_fsm_notused(mm_camera_obj_t * my_obj,
+                         mm_camera_stream_t *stream,
+                         mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = 0;
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_ACQUIRE:
+        snprintf(dev_name, sizeof(dev_name), "/dev/%s", mm_camera_util_get_dev_name(my_obj));
+        CDBG("%s: open dev '%s', stream type = %d\n",
+                 __func__, dev_name, *((mm_camera_stream_type_t *)val));
+        stream->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if(stream->fd <= 0){
+            CDBG("%s: open dev returned %d\n", __func__, stream->fd);
+            return -1;
+        }
+        stream->stream_type = *((mm_camera_stream_type_t *)val);
+        rc = mm_camera_stream_util_set_ext_mode(stream);
+        CDBG("%s: fd=%d, stream type=%d, mm_camera_stream_util_set_ext_mode() err=%d\n",
+                 __func__, stream->fd, stream->stream_type, rc);
+        if(rc == MM_CAMERA_OK) {
+            mm_camera_stream_init_frame(&stream->frame);
+            mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_ACQUIRED);
+        } else if(stream->fd > 0) {
+            close(stream->fd);
+            stream->fd = 0;
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+          stream->state);
+        return -1;
+    }
+    return rc;
+}
+
+static int32_t mm_camera_stream_util_proc_fmt(mm_camera_obj_t *my_obj,
+    mm_camera_stream_t *stream,
+    mm_camera_image_fmt_t *fmt)
+{
+    int32_t rc = MM_CAMERA_OK;
+
+    if(fmt->dim.width == 0 || fmt->dim.height == 0) {
+        rc = -MM_CAMERA_E_INVALID_INPUT;
+        CDBG("%s:invalid input[w=%d,h=%d,fmt=%d]\n",
+                 __func__, fmt->dim.width, fmt->dim.height, fmt->fmt);
+        goto end;
+    }
+    CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,raw_w=%d,raw_h=%d,fmt=%d\n",
+       __func__,
+       my_obj->dim.display_width,my_obj->dim.display_height,
+       my_obj->dim.video_width,my_obj->dim.video_height,
+       my_obj->dim.picture_width,my_obj->dim.picture_height,
+       my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+       my_obj->dim.raw_picture_width,my_obj->dim.raw_picture_height,fmt->fmt);
+    stream->cam_fmt = fmt->fmt;
+    stream->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    stream->fmt.fmt.pix_mp.width = fmt->dim.width;
+    stream->fmt.fmt.pix_mp.height= fmt->dim.height;
+    stream->fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
+    stream->fmt.fmt.pix_mp.pixelformat =
+            mm_camera_util_get_v4l2_fmt(stream->cam_fmt,
+                                      &(stream->fmt.fmt.pix_mp.num_planes));
+    rc = ioctl(stream->fd, VIDIOC_S_FMT, &stream->fmt);
+    if (rc < 0) {
+        CDBG("%s: ioctl VIDIOC_S_FMT failed: rc=%d\n", __func__, rc);
+        rc = -MM_CAMERA_E_GENERAL;
+    }
+end:
+    CDBG("%s:fd=%d,type=%d,rc=%d\n",
+             __func__, stream->fd, stream->stream_type, rc);
+    return rc;
+}
+static int32_t mm_camera_stream_fsm_acquired(mm_camera_obj_t * my_obj,
+                  mm_camera_stream_t *stream,
+                  mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = 0;
+
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_SET_FMT:
+        rc = mm_camera_stream_util_proc_fmt(my_obj,stream,
+                    (mm_camera_image_fmt_t *)val);
+        if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_CFG);
+        break;
+    case MM_CAMERA_STATE_EVT_RELEASE:
+        mm_camera_stream_release(stream);
+        break;
+    case MM_CAMERA_STATE_EVT_GET_CROP:
+      rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+      break;
+    default:
+        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+          stream->state);
+        return -1;
+    }
+    return rc;
+}
+static int32_t mm_camera_stream_fsm_cfg(mm_camera_obj_t * my_obj,
+                         mm_camera_stream_t *stream,
+                         mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = 0;
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_RELEASE:
+        mm_camera_stream_release(stream);
+        break;
+    case MM_CAMERA_STATE_EVT_SET_FMT:
+        rc = mm_camera_stream_util_proc_fmt(my_obj,stream,
+                    (mm_camera_image_fmt_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_REG_BUF:
+        rc = mm_camera_stream_util_reg_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+        if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+        break;
+    case MM_CAMERA_STATE_EVT_GET_CROP:
+      rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+      break;
+    case MM_CAMERA_STATE_EVT_REQUEST_BUF:
+        rc = mm_camera_stream_util_request_buf(my_obj, stream, ((mm_camera_buf_def_t *)val)->num);
+        break;
+    case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+        rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+        if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+          stream->state);
+        return -1;
+    }
+    return rc;
+}
+
+int32_t mm_camera_stream_util_buf_done(mm_camera_obj_t * my_obj,
+                    mm_camera_stream_t *stream,
+                    mm_camera_notify_frame_t *frame)
+{
+    int32_t rc = MM_CAMERA_OK;
+    pthread_mutex_lock(&stream->frame.mutex);
+
+    if(stream->frame.ref_count[frame->idx] == 0) {
+        rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+        CDBG_ERROR("%s: Error Trying to free second time?(idx=%d) count=%d, stream type=%d\n",
+                   __func__, frame->idx, stream->frame.ref_count[frame->idx], stream->stream_type);
+        rc = -1;
+    }else{
+        stream->frame.ref_count[frame->idx]--;
+        if(0 == stream->frame.ref_count[frame->idx]) {
+            CDBG("<DEBUG> : Buf done for buffer:%p:%d",stream,frame->idx);
+            rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+            if(rc < 0)
+                CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                     __func__, frame->idx, rc);
+        }else{
+            CDBG("<DEBUG> : Still ref count pending count :%d",stream->frame.ref_count[frame->idx]);
+            CDBG("<DEBUG> : for buffer:%p:%d, stream type=%d",stream,frame->idx, stream->stream_type);
+        }
+    }
+
+#if 0
+    stream->frame.ref_count[frame->idx]--;
+    if(stream->frame.ref_count[frame->idx] == 0) {
+        CDBG("%s: Queue the buffer (idx=%d) count=%d frame id = %d\n",
+                 __func__, frame->idx, stream->frame.ref_count[frame->idx],
+                 frame->frame->frame_id);
+        rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+        if(rc < 0)
+          CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n", __func__,
+            frame->idx, rc);
+    } else if(stream->frame.ref_count[frame->idx] == 1) {
+        ALOGE("<DEBUG> : Buf done for buffer:%p:%d",stream,frame->idx);
+        rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+        if(rc < 0)
+            CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+                 __func__, frame->idx, rc);
+    } else {
+        CDBG_ERROR("%s: Error Trying to free second time?(idx=%d) count=%d\n",
+          __func__, frame->idx, stream->frame.ref_count[frame->idx]);
+        rc = -1;
+    }
+#endif
+    pthread_mutex_unlock(&stream->frame.mutex);
+    return rc;
+}
+
+static int32_t mm_camera_stream_fsm_reg(mm_camera_obj_t * my_obj,
+                             mm_camera_stream_t *stream,
+                             mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = 0;
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_GET_CROP:
+      rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+      break;
+    case MM_CAMERA_STATE_EVT_QBUF:
+        break;
+    case MM_CAMERA_STATE_EVT_RELEASE:
+        mm_camera_stream_release(stream);
+        break;
+    case MM_CAMERA_STATE_EVT_UNREG_BUF:
+        rc = mm_camera_stream_util_unreg_buf(my_obj, stream);
+        if(!rc)
+            mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_CFG);
+        break;
+    case MM_CAMERA_STATE_EVT_STREAM_ON:
+        {
+            enum v4l2_buf_type buf_type;
+            int i = 0;
+            mm_camera_frame_t *frame;
+            if(stream->frame.qbuf == 0) {
+                for(i = 0; i < stream->frame.num_frame; i++) {
+                    rc = mm_camera_stream_qbuf(my_obj, stream,
+                             stream->frame.frame[i].idx);
+                    if (rc < 0) {
+                        CDBG_ERROR("%s: ioctl VIDIOC_QBUF error=%d, stream->type=%d\n",
+                           __func__, rc, stream->stream_type);
+                        return rc;
+                    }
+                    stream->frame.ref_count[i] = 0;
+                }
+                stream->frame.qbuf = 1;
+            }
+            buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+            CDBG("%s: STREAMON,fd=%d,stream_type=%d\n",
+                     __func__, stream->fd, stream->stream_type);
+            rc = ioctl(stream->fd, VIDIOC_STREAMON, &buf_type);
+            if (rc < 0) {
+                    CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
+                        __func__, rc);
+            }
+            else
+                mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_ACTIVE);
+        }
+        break;
+    case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+        rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+          stream->state);
+        return -1;
+    }
+    return rc;
+}
+static int32_t mm_camera_stream_fsm_active(mm_camera_obj_t * my_obj,
+               mm_camera_stream_t *stream,
+               mm_camera_state_evt_type_t evt, void *val)
+{
+    int32_t rc = 0;
+    switch(evt) {
+    case MM_CAMERA_STATE_EVT_GET_CROP:
+      rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+      break;
+    case MM_CAMERA_STATE_EVT_QBUF:
+        rc = mm_camera_stream_util_buf_done(my_obj, stream,
+          (mm_camera_notify_frame_t *)val);
+        break;
+    case MM_CAMERA_STATE_EVT_RELEASE:
+        mm_camera_stream_release(stream);
+        break;
+    case MM_CAMERA_STATE_EVT_STREAM_OFF:
+        {
+            enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+            CDBG("%s: STREAMOFF,fd=%d,type=%d\n",
+                __func__, stream->fd, stream->stream_type);
+            rc = ioctl(stream->fd, VIDIOC_STREAMOFF, &buf_type);
+            if (rc < 0) {
+                    CDBG_ERROR("%s: STREAMOFF failed: %s\n",
+                        __func__, strerror(errno));
+            }
+            else {
+                stream->frame.qbuf = 0;
+                mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+            }
+        }
+        break;
+    case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+        rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+          stream->state);
+        return -1;
+    }
+    return rc;
+}
+
+typedef int32_t (*mm_camera_stream_fsm_fn_t) (mm_camera_obj_t * my_obj,
+                    mm_camera_stream_t *stream,
+                    mm_camera_state_evt_type_t evt, void *val);
+
+static mm_camera_stream_fsm_fn_t mm_camera_stream_fsm_fn[MM_CAMERA_STREAM_STATE_MAX] = {
+    mm_camera_stream_fsm_notused,
+    mm_camera_stream_fsm_acquired,
+    mm_camera_stream_fsm_cfg,
+    mm_camera_stream_fsm_reg,
+    mm_camera_stream_fsm_active
+};
+int32_t mm_camera_stream_fsm_fn_vtbl (mm_camera_obj_t * my_obj,
+                   mm_camera_stream_t *stream,
+                   mm_camera_state_evt_type_t evt, void *val)
+{
+    CDBG("%s: stream fd=%d, type = %d, state=%d, evt\n",
+                 __func__, stream->fd, stream->stream_type, stream->state, evt);
+    return mm_camera_stream_fsm_fn[stream->state] (my_obj, stream, evt, val);
+}
+
diff --git a/camera/mm-camera-interface/mm_jpeg_encoder.c b/camera/mm-camera-interface/mm_jpeg_encoder.c
new file mode 100644
index 0000000..a53ae15
--- /dev/null
+++ b/camera/mm-camera-interface/mm_jpeg_encoder.c
@@ -0,0 +1,717 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <sys/time.h>
+#include <pthread.h>
+#include <semaphore.h>
+#include <errno.h>
+#include "mm_jpeg_encoder.h"
+#include "mm_camera_dbg.h"
+#include <sys/system_properties.h>
+#include "mm_camera_interface2.h"
+
+#ifdef JPG_DBG
+#undef CDBG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera jpeg"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+    #endif
+#endif
+
+#define JPEG_DEFAULT_MAINIMAGE_QUALITY 75
+#define JPEG_DEFAULT_THUMBNAIL_QUALITY 75
+
+int is_encoding = 0;
+pthread_mutex_t jpege_mutex = PTHREAD_MUTEX_INITIALIZER;
+pthread_mutex_t jpegcb_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+int rc;
+jpege_src_t jpege_source;
+jpege_dst_t jpege_dest;
+jpege_cfg_t jpege_config;
+jpege_img_data_t main_img_info, tn_img_info;
+jpeg_buffer_t temp;
+jpege_obj_t jpeg_encoder;
+exif_info_obj_t exif_info;
+exif_tag_entry_t sample_tag;
+struct timeval tdBefore, tdAfter;
+struct timezone tz;
+static uint32_t jpegMainimageQuality = JPEG_DEFAULT_MAINIMAGE_QUALITY;
+static uint32_t jpegThumbnailQuality = JPEG_DEFAULT_THUMBNAIL_QUALITY;
+static uint32_t jpegRotation = 0;
+static int8_t usethumbnail = 1;
+static int8_t use_thumbnail_padding = 0;
+#ifdef HW_ENCODE
+static uint8_t hw_encode = true;
+#else
+static uint8_t hw_encode = false;
+#endif
+static int8_t is_3dmode = 0;
+static cam_3d_frame_format_t img_format_3d;
+jpegfragment_callback_t mmcamera_jpegfragment_callback = NULL;
+jpeg_callback_t mmcamera_jpeg_callback = NULL;
+
+void* user_data = NULL;
+#define JPEGE_FRAGMENT_SIZE (64*1024)
+
+/*===========================================================================
+FUNCTION      jpege_event_handler
+
+DESCRIPTION   Handler function for jpeg encoder events
+===========================================================================*/
+inline void jpege_use_thumb_padding(uint8_t a_use_thumb_padding)
+{
+  use_thumbnail_padding = a_use_thumb_padding;
+}
+
+void mm_jpeg_encoder_cancel()
+{
+    pthread_mutex_lock(&jpegcb_mutex);
+    mmcamera_jpegfragment_callback = NULL;
+    mmcamera_jpeg_callback = NULL;
+    user_data = NULL;
+    pthread_mutex_unlock(&jpegcb_mutex);
+    mm_jpeg_encoder_join();
+}
+
+void set_callbacks(
+   jpegfragment_callback_t fragcallback,
+   jpeg_callback_t eventcallback,
+   void* userdata
+
+){
+    pthread_mutex_lock(&jpegcb_mutex);
+    mmcamera_jpegfragment_callback = fragcallback;
+    mmcamera_jpeg_callback = eventcallback;
+    user_data = userdata;
+    pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+/*===========================================================================
+FUNCTION      jpege_event_handler
+
+DESCRIPTION   Handler function for jpeg encoder events
+===========================================================================*/
+void mm_jpege_event_handler(void *p_user_data, jpeg_event_t event, void *p_arg)
+{
+  uint32_t buf_size;
+  uint8_t *buf_ptr = NULL;
+  int mainimg_fd, thumbnail_fd;
+
+  if (event == JPEG_EVENT_DONE) {
+
+    jpeg_buffer_t thumbnail_buffer, snapshot_buffer;
+
+    thumbnail_buffer = tn_img_info.p_fragments[0].color.yuv.luma_buf;
+    thumbnail_fd = jpeg_buffer_get_pmem_fd(thumbnail_buffer);
+    jpeg_buffer_get_actual_size(thumbnail_buffer, &buf_size);
+    jpeg_buffer_get_addr(thumbnail_buffer, &buf_ptr);
+
+    snapshot_buffer = main_img_info.p_fragments[0].color.yuv.luma_buf;
+    mainimg_fd = jpeg_buffer_get_pmem_fd(snapshot_buffer);
+    jpeg_buffer_get_actual_size(snapshot_buffer, &buf_size);
+    jpeg_buffer_get_addr(snapshot_buffer, &buf_ptr);
+
+#if 0
+    gettimeofday(&tdAfter, &tz);
+    CDBG("Profiling: JPEG encoding latency %ld microseconds\n",
+      1000000 * (tdAfter.tv_sec - tdBefore.tv_sec) + tdAfter.tv_usec -
+      tdBefore.tv_usec);
+#endif
+//    mmcamera_util_profile("encoder done");
+  }
+
+  if(mmcamera_jpeg_callback)
+    mmcamera_jpeg_callback(event, user_data);
+}
+
+/*===========================================================================
+FUNCTION      jpege_output_produced_handler
+
+DESCRIPTION   Handler function for when jpeg encoder has output produced
+===========================================================================*/
+void mm_jpege_output_produced_handler(void *p_user_data, void *p_arg,
+  jpeg_buffer_t buffer)
+{
+  uint32_t buf_size;
+  uint8_t *buf_ptr;
+
+  /*  The mutex is to prevent the very rare case where the file writing is */
+  /*  so slow that the next ping-pong output is delivered before the */
+  /*  current one is finished writing, in which case the writing of the new */
+  /*  buffer will be performed after the first one finishes (because of the lock) */
+
+  jpeg_buffer_get_actual_size(buffer, &buf_size);
+  jpeg_buffer_get_addr(buffer, &buf_ptr);
+
+  pthread_mutex_lock(&jpegcb_mutex);
+  if(mmcamera_jpegfragment_callback)
+    mmcamera_jpegfragment_callback(buf_ptr, buf_size, user_data);
+  pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+#if !defined(_TARGET_7x2x_) && !defined(_TARGET_7x27A_)
+/*===========================================================================
+FUNCTION      jpege_output_produced_handler2
+
+DESCRIPTION   Handler function for when jpeg encoder has output produced
+===========================================================================*/
+int mm_jpege_output_produced_handler2(void *p_user_data, void *p_arg,
+  jpeg_buffer_t buffer, uint8_t last_buf_flag)
+{
+  uint32_t buf_size;
+  uint8_t *buf_ptr;
+  int rv;
+
+  /*  The mutex is to prevent the very rare case where the file writing is */
+  /*  so slow that the next ping-pong output is delivered before the */
+  /*  current one is finished writing, in which case the writing of the new */
+  /*  buffer will be performed after the first one finishes (because of the lock) */
+  jpeg_buffer_get_actual_size(buffer, &buf_size);
+  jpeg_buffer_get_addr(buffer, &buf_ptr);
+
+  pthread_mutex_lock(&jpegcb_mutex);
+  if(mmcamera_jpegfragment_callback)
+    mmcamera_jpegfragment_callback(buf_ptr, buf_size, user_data);
+  pthread_mutex_unlock(&jpegcb_mutex);
+
+  rv = jpeg_buffer_set_actual_size(buffer, 0);
+  if(rv == JPEGERR_SUCCESS){
+      rv = jpege_enqueue_output_buffer(
+          jpeg_encoder,
+          &buffer, 1);
+  }
+  return rv;
+}
+#endif
+
+static int jpeg_encoder_initialized = 0;
+
+void mm_jpeg_encoder_set_3D_info(cam_3d_frame_format_t format)
+{
+  pthread_mutex_lock(&jpege_mutex);
+  is_3dmode = 1;
+  img_format_3d = format;
+  pthread_mutex_unlock(&jpege_mutex);
+}
+
+extern int8_t mm_jpeg_encoder_init()
+{
+  pthread_mutex_lock(&jpege_mutex);
+  is_3dmode = 0;
+  /*  Initialize jpeg encoder */
+  rc = jpege_init(&jpeg_encoder, mm_jpege_event_handler, NULL);
+  if (rc) {
+    //CDBG("jpege_init failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+
+  jpeg_encoder_initialized = 1;
+  pthread_mutex_unlock(&jpege_mutex);
+
+  return TRUE;
+}
+
+void mm_jpeg_encoder_join(void)
+{
+  pthread_mutex_lock(&jpege_mutex);
+  if (jpeg_encoder_initialized) {
+    jpeg_encoder_initialized = 0;
+    pthread_mutex_destroy(&jpege_mutex);
+    jpege_abort(jpeg_encoder);
+    jpeg_buffer_destroy(&temp);
+    if (usethumbnail) {
+        jpeg_buffer_destroy(&tn_img_info.p_fragments[0].color.yuv.luma_buf);
+        jpeg_buffer_destroy(&tn_img_info.p_fragments[0].color.yuv.chroma_buf);
+    }
+    jpeg_buffer_destroy(&main_img_info.p_fragments[0].color.yuv.luma_buf);
+    jpeg_buffer_destroy(&main_img_info.p_fragments[0].color.yuv.chroma_buf);
+    jpeg_buffer_destroy(&jpege_dest.buffers[0]);
+    jpeg_buffer_destroy(&jpege_dest.buffers[1]);
+    exif_destroy(&exif_info);
+    jpege_destroy(&jpeg_encoder);
+  }
+  is_3dmode = 0;
+  pthread_mutex_unlock(&jpege_mutex);
+}
+/* This function returns the Yoffset and CbCr offset requirements for the Jpeg encoding*/
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+                                      uint32_t* p_y_offset, uint32_t* p_cbcr_offset,
+                                      uint32_t* p_buf_size, uint8_t *num_planes,
+                                      uint32_t planes[])
+{
+  CDBG("jpeg_encoder_get_buffer_offset");
+  if ((NULL == p_y_offset) || (NULL == p_cbcr_offset)) {
+    return FALSE;
+  }
+  /* Hardcode num planes and planes array for now. TBD Check if this
+   * needs to be set based on format. */
+  *num_planes = 2;
+  if (hw_encode) {
+    int cbcr_offset = 0;
+    uint32_t actual_size = width*height;
+    uint32_t padded_size = width * CEILING16(height);
+    *p_y_offset = 0;
+    *p_cbcr_offset = 0;
+    if ((jpegRotation == 90) || (jpegRotation == 180)) {
+      *p_y_offset = padded_size - actual_size;
+      *p_cbcr_offset = ((padded_size - actual_size) >> 1);
+    }
+    *p_buf_size = padded_size * 3/2;
+    planes[0] = width * CEILING16(height);
+    planes[1] = width * CEILING16(height)/2;
+  } else {
+    *p_y_offset = 0;
+    *p_cbcr_offset = PAD_TO_WORD(width*CEILING16(height));
+    *p_buf_size = *p_cbcr_offset * 3/2;
+    planes[0] = PAD_TO_WORD(width*CEILING16(height));
+    planes[1] = PAD_TO_WORD(width*CEILING16(height)/2);
+  }
+  return TRUE;
+}
+
+int8_t mm_jpeg_encoder_encode(const cam_ctrl_dimension_t * dimension,
+                              const uint8_t * thumbnail_buf,
+                              int thumbnail_fd, uint32_t thumbnail_offset,
+                              const uint8_t * snapshot_buf,
+                              int snapshot_fd,
+                              uint32_t snapshot_offset,
+                              common_crop_t *scaling_params,
+                              exif_tags_info_t *exif_data,
+                              int exif_numEntries,
+                              const int32_t a_cbcroffset,
+                              cam_point_t* main_crop_offset,
+                              cam_point_t* thumb_crop_offset)
+{
+  int buf_size = 0;
+  int ret = 0;
+  int i = 0;
+  int cbcroffset = 0;
+  int actual_size = 0, padded_size = 0;
+  usethumbnail = thumbnail_buf ? 1 : 0;
+  int w_scale_factor = (is_3dmode && img_format_3d == SIDE_BY_SIDE_FULL) ? 2 : 1;
+
+  pthread_mutex_lock(&jpege_mutex);
+  //mmcamera_util_profile("encoder configure");
+
+  /*  Do not allow snapshot if the previous one is not done */
+  /*  Alternately we can queue the snapshot to be done after the one in progress is completed, */
+  /*  but it involves more complex logic */
+  if (is_encoding) {
+    CDBG("Previous Jpeg Encoding is not done!\n");
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+  CDBG("jpeg_encoder_encode: thumbnail_fd = %d snapshot_fd = %d usethumbnail %d\n",
+    thumbnail_fd, snapshot_fd, usethumbnail);
+
+  gettimeofday(&tdBefore, &tz);
+  /*  Initialize exif info */
+  exif_init(&exif_info);
+  /*  Zero out supporting structures */
+  memset(&main_img_info, 0, sizeof(jpege_img_data_t));
+  memset(&tn_img_info, 0, sizeof(jpege_img_data_t));
+  memset(&jpege_source, 0, sizeof(jpege_src_t));
+  memset(&jpege_dest, 0, sizeof(jpege_dst_t));
+
+  /*  Initialize JPEG buffers */
+  jpege_dest.buffer_cnt = 2;
+  if ((rc = jpeg_buffer_init(&temp)) ||
+    (usethumbnail && (rc = jpeg_buffer_init(&tn_img_info.p_fragments[0].color.yuv.luma_buf))) ||
+    (usethumbnail && (rc = jpeg_buffer_init(&tn_img_info.p_fragments[0].color.yuv.chroma_buf))) ||
+    (rc = jpeg_buffer_init(&main_img_info.p_fragments[0].color.yuv.luma_buf)) ||
+    (rc = jpeg_buffer_init(&main_img_info.p_fragments[0].color.yuv.chroma_buf))
+    || (rc = jpeg_buffer_init(&jpege_dest.buffers[0]))
+    || (rc = jpeg_buffer_init(&jpege_dest.buffers[1]))) {
+    CDBG_ERROR("jpeg_buffer_init failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    jpege_dest.buffer_cnt = 0;
+    return FALSE;
+  }
+#if !defined(_TARGET_7x2x_) && !defined(_TARGET_7x27A_)
+  jpege_dest.p_buffer = &jpege_dest.buffers[0];
+#endif
+
+#if defined(_TARGET_7x27A_)
+  /*  Allocate 2 ping-pong buffers on the heap for jpeg encoder outputs */
+  if ((rc = jpeg_buffer_allocate(jpege_dest.buffers[0], JPEGE_FRAGMENT_SIZE, 1)) ||
+    (rc = jpeg_buffer_allocate(jpege_dest.buffers[1], JPEGE_FRAGMENT_SIZE, 1))) {
+    CDBG("jpeg_buffer_allocate failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+#else
+  /*  Allocate 2 ping-pong buffers on the heap for jpeg encoder outputs */
+  if ((rc = jpeg_buffer_allocate(jpege_dest.buffers[0], JPEGE_FRAGMENT_SIZE, 0)) ||
+    (rc = jpeg_buffer_allocate(jpege_dest.buffers[1], JPEGE_FRAGMENT_SIZE, 0))) {
+    CDBG("jpeg_buffer_allocate failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+#endif
+
+
+  if (usethumbnail) {
+    tn_img_info.width = dimension->thumbnail_width * w_scale_factor;
+    tn_img_info.height = dimension->thumbnail_height;
+    buf_size = tn_img_info.width * tn_img_info.height * 2;
+    tn_img_info.fragment_cnt = 1;
+    tn_img_info.color_format = YCRCBLP_H2V2;
+    tn_img_info.p_fragments[0].width = tn_img_info.width;
+    tn_img_info.p_fragments[0].height = CEILING16(dimension->thumbnail_height);
+    jpeg_buffer_reset(tn_img_info.p_fragments[0].color.yuv.luma_buf);
+    jpeg_buffer_reset(tn_img_info.p_fragments[0].color.yuv.chroma_buf);
+
+    CDBG("%s: Thumbnail: fd: %d offset: %d  Main: fd: %d offset: %d", __func__,
+         thumbnail_fd, thumbnail_offset, snapshot_fd, snapshot_offset);
+    rc = jpeg_buffer_use_external_buffer(
+              tn_img_info.p_fragments[0].color.yuv.luma_buf,
+              (uint8_t *)thumbnail_buf, buf_size,
+              thumbnail_fd);
+
+    if (rc == JPEGERR_EFAILED) {
+      CDBG_ERROR("jpeg_buffer_use_external_buffer Thumbnail pmem failed...\n");
+      pthread_mutex_unlock(&jpege_mutex);
+      return FALSE;
+    }
+
+    cbcroffset = PAD_TO_WORD(tn_img_info.width * tn_img_info.height);
+    if (hw_encode) {
+      actual_size = dimension->thumbnail_width * dimension->thumbnail_height;
+      padded_size = dimension->thumbnail_width *
+        CEILING16(dimension->thumbnail_height);
+      cbcroffset = padded_size;
+    }
+
+    // The chroma plane in YUV4:2:0 semiplanar is at the end of the luma plane,
+    // so we attach the chroma buf to the luma buffer, which we've allocated to
+    // be large enough to hold the entire YUV image.
+    //
+    jpeg_buffer_attach_existing(tn_img_info.p_fragments[0].color.yuv.chroma_buf,
+      tn_img_info.p_fragments[0].color.yuv.luma_buf,
+      cbcroffset);
+    jpeg_buffer_set_actual_size(tn_img_info.p_fragments[0].color.yuv.luma_buf,
+      tn_img_info.width * tn_img_info.height);
+    jpeg_buffer_set_actual_size(
+      tn_img_info.p_fragments[0].color.yuv.chroma_buf, tn_img_info.width *
+      tn_img_info.height / 2);
+
+    if (hw_encode) {
+      if ((jpegRotation == 90) || (jpegRotation == 180)) {
+        jpeg_buffer_set_start_offset(tn_img_info.p_fragments[0].color.yuv.luma_buf, (padded_size - actual_size));
+        jpeg_buffer_set_start_offset(tn_img_info.p_fragments[0].color.yuv.chroma_buf, ((padded_size - actual_size) >> 1));
+      }
+    }
+  }
+
+  /* Set phy offset */
+  jpeg_buffer_set_phy_offset(tn_img_info.p_fragments[0].color.yuv.luma_buf, thumbnail_offset);
+
+  CDBG("jpeg_encoder_encode size %dx%d\n",dimension->orig_picture_dx,dimension->orig_picture_dy);
+  main_img_info.width = dimension->orig_picture_dx * w_scale_factor;
+  main_img_info.height = dimension->orig_picture_dy;
+  buf_size = main_img_info.width * main_img_info.height * 2;
+  main_img_info.fragment_cnt = 1;
+  main_img_info.color_format = YCRCBLP_H2V2;
+  main_img_info.p_fragments[0].width = main_img_info.width;
+  main_img_info.p_fragments[0].height = CEILING16(main_img_info.height);
+  jpeg_buffer_reset(main_img_info.p_fragments[0].color.yuv.luma_buf);
+  jpeg_buffer_reset(main_img_info.p_fragments[0].color.yuv.chroma_buf);
+
+  rc =
+    jpeg_buffer_use_external_buffer(
+            main_img_info.p_fragments[0].color.yuv.luma_buf,
+            (uint8_t *)snapshot_buf, buf_size,
+            snapshot_fd);
+
+  if (rc == JPEGERR_EFAILED) {
+    CDBG("jpeg_buffer_use_external_buffer Snapshot pmem failed...\n");
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+  cbcroffset = PAD_TO_WORD(main_img_info.width * CEILING16(main_img_info.height));
+  actual_size = 0;
+  padded_size = 0;
+  if (a_cbcroffset >= 0) {
+    cbcroffset = a_cbcroffset;
+  } else {
+    if (hw_encode) {
+      actual_size = dimension->orig_picture_dx * dimension->orig_picture_dy;
+      padded_size = dimension->orig_picture_dx * CEILING16(dimension->orig_picture_dy);
+      cbcroffset = padded_size;
+    }
+  }
+
+  CDBG("jpeg_encoder_encode: cbcroffset %d",cbcroffset);
+  jpeg_buffer_attach_existing(main_img_info.p_fragments[0].color.yuv.chroma_buf,
+    main_img_info.p_fragments[0].color.yuv.luma_buf,
+    cbcroffset);
+  jpeg_buffer_set_actual_size(main_img_info.p_fragments[0].color.yuv.luma_buf, main_img_info.width * main_img_info.height);
+  jpeg_buffer_set_actual_size(main_img_info.p_fragments[0].color.yuv.chroma_buf, main_img_info.width * main_img_info.height / 2);
+
+  if (hw_encode) {
+    if ((jpegRotation == 90) || (jpegRotation == 180)) {
+      jpeg_buffer_set_start_offset(main_img_info.p_fragments[0].color.yuv.luma_buf, (padded_size - actual_size));
+      jpeg_buffer_set_start_offset(main_img_info.p_fragments[0].color.yuv.chroma_buf, ((padded_size - actual_size) >> 1));
+    }
+  }
+
+  jpeg_buffer_set_phy_offset(main_img_info.p_fragments[0].color.yuv.luma_buf, snapshot_offset);
+
+  /*  Set Source */
+  jpege_source.p_main = &main_img_info;
+  if (usethumbnail) {
+    jpege_source.p_thumbnail = &tn_img_info;
+    CDBG("fragment_cnt: thumb %d \n", jpege_source.p_thumbnail->fragment_cnt);
+  }
+
+  CDBG("fragment_cnt: main %d \n", jpege_source.p_main->fragment_cnt);
+
+  rc = jpege_set_source(jpeg_encoder, &jpege_source);
+  if (rc) {
+    CDBG("jpege_set_source failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+
+#if defined(_TARGET_7x2x_) || defined(_TARGET_7x27A_)
+  jpege_dest.p_output_handler = (jpege_output_handler_t) mm_jpege_output_produced_handler;
+#else
+  jpege_dest.p_output_handler = mm_jpege_output_produced_handler2;
+#endif
+
+  jpege_dest.buffer_cnt = 2;
+  rc = jpege_set_destination(jpeg_encoder, &jpege_dest);
+  if (rc) {
+    CDBG("jpege_set_desination failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+  /*  Get default configuration */
+  jpege_get_default_config(&jpege_config);
+  jpege_config.thumbnail_present = usethumbnail;
+  if(hw_encode)
+    jpege_config.preference = JPEG_ENCODER_PREF_HW_ACCELERATED_PREFERRED;
+  else
+    jpege_config.preference = JPEG_ENCODER_PREF_SOFTWARE_ONLY;
+
+  CDBG("%s: preference %d ", __func__, jpege_config.preference);
+  jpege_config.main_cfg.quality = jpegMainimageQuality;
+  jpege_config.thumbnail_cfg.quality = jpegThumbnailQuality;
+
+  CDBG("Scaling params thumb in1_w %d in1_h %d out1_w %d out1_h %d "
+       "main_img in2_w %d in2_h %d out2_w %d out2_h %d\n",
+         scaling_params->in1_w, scaling_params->in1_h,
+         scaling_params->out1_w, scaling_params->out1_h,
+         scaling_params->in2_w, scaling_params->in2_h,
+         scaling_params->out2_w, scaling_params->out2_h);
+
+  if(scaling_params->in2_w && scaling_params->in2_h) {
+
+    if(jpegRotation)
+      jpege_config.preference = JPEG_ENCODER_PREF_SOFTWARE_ONLY;
+
+    /* Scaler information  for main image */
+    jpege_config.main_cfg.scale_cfg.enable = TRUE;
+
+    jpege_config.main_cfg.scale_cfg.input_width = CEILING2(scaling_params->in2_w);
+    jpege_config.main_cfg.scale_cfg.input_height = CEILING2(scaling_params->in2_h);
+
+    if (main_crop_offset) {
+      jpege_config.main_cfg.scale_cfg.h_offset = main_crop_offset->x;
+      jpege_config.main_cfg.scale_cfg.v_offset = main_crop_offset->y;
+    } else {
+      jpege_config.main_cfg.scale_cfg.h_offset = 0;
+      jpege_config.main_cfg.scale_cfg.v_offset = 0;
+    }
+
+    jpege_config.main_cfg.scale_cfg.output_width = scaling_params->out2_w;
+    jpege_config.main_cfg.scale_cfg.output_height = scaling_params->out2_h;
+  } else {
+    CDBG("There is no scaling information for JPEG main image scaling.");
+  }
+
+  if(scaling_params->in1_w  && scaling_params->in1_h) {
+    /* Scaler information  for thumbnail */
+    jpege_config.thumbnail_cfg.scale_cfg.enable = TRUE;
+
+    jpege_config.thumbnail_cfg.scale_cfg.input_width = CEILING2(scaling_params->in1_w);
+    jpege_config.thumbnail_cfg.scale_cfg.input_height = CEILING2(scaling_params->in1_h);
+
+    if (thumb_crop_offset) {
+      jpege_config.thumbnail_cfg.scale_cfg.h_offset = thumb_crop_offset->x;
+      jpege_config.thumbnail_cfg.scale_cfg.v_offset = thumb_crop_offset->y;
+    } else {
+      jpege_config.thumbnail_cfg.scale_cfg.h_offset = 0;
+      jpege_config.thumbnail_cfg.scale_cfg.v_offset = 0;
+    }
+
+    jpege_config.thumbnail_cfg.scale_cfg.output_width = scaling_params->out1_w;
+    jpege_config.thumbnail_cfg.scale_cfg.output_height = scaling_params->out1_h;
+  } else {
+    CDBG("There is no scaling information for JPEG thumbnail upscaling.");
+  }
+
+  /* Set rotation based on the mode selected */
+  CDBG(" Setting Jpeg Rotation mode to %d ", jpegRotation );
+  jpege_config.main_cfg.rotation_degree_clk = jpegRotation;
+  jpege_config.thumbnail_cfg.rotation_degree_clk = jpegRotation;
+
+  if( exif_data != NULL) {
+    for(i = 0; i < exif_numEntries; i++) {
+      rc = exif_set_tag(exif_info, exif_data[i].tag_id,
+                         &(exif_data[i].tag_entry));
+      if (rc) {
+        CDBG("exif_set_tag failed: %d\n", rc);
+        pthread_mutex_unlock(&jpege_mutex);
+        return FALSE;
+      }
+    }
+  }
+
+#if 0 /* Enable when JPS/MPO is ready */
+  /* 3D config */
+  CDBG("%s: is_3dmode %d ", __func__, is_3dmode );
+  if (is_3dmode) {
+    jps_cfg_3d_t cfg_3d;
+    if (jpege_config.main_cfg.scale_cfg.enable ||
+      (jpege_config.main_cfg.rotation_degree_clk > 0)) {
+      CDBG("%s: img_format_3d %d ", __func__, img_format_3d );
+      return FALSE;
+    }
+    CDBG("%s: img_format_3d %d ", __func__, img_format_3d );
+
+    switch (img_format_3d) {
+    case TOP_DOWN_HALF:
+      cfg_3d.layout = OVER_UNDER;
+      cfg_3d.width_flag = FULL_WIDTH;
+      cfg_3d.height_flag = HALF_HEIGHT;
+      cfg_3d.field_order = LEFT_FIELD_FIRST;
+      cfg_3d.separation = 0;
+      break;
+    case TOP_DOWN_FULL:
+      cfg_3d.layout = OVER_UNDER;
+      cfg_3d.width_flag = FULL_WIDTH;
+      cfg_3d.height_flag = FULL_HEIGHT;
+      cfg_3d.field_order = LEFT_FIELD_FIRST;
+      cfg_3d.separation = 0;
+      break;
+    case SIDE_BY_SIDE_HALF:
+      cfg_3d.layout = SIDE_BY_SIDE;
+      cfg_3d.width_flag = HALF_WIDTH;
+      cfg_3d.height_flag = FULL_HEIGHT;
+      cfg_3d.field_order = LEFT_FIELD_FIRST;
+      cfg_3d.separation = 0;
+      break;
+    default:
+    case SIDE_BY_SIDE_FULL:
+      cfg_3d.layout = SIDE_BY_SIDE;
+      cfg_3d.width_flag = FULL_WIDTH;
+      cfg_3d.height_flag = FULL_HEIGHT;
+      cfg_3d.field_order = LEFT_FIELD_FIRST;
+      cfg_3d.separation = 0;
+      break;
+    }
+
+    rc = jpse_config_3d(jpeg_encoder, cfg_3d);
+    if (rc) {
+     CDBG_ERROR("%s: jpse_config_3d failed: %d\n", __func__, rc);
+      pthread_mutex_unlock(&jpege_mutex);
+      return FALSE;
+    }
+  }
+#endif
+
+  /*  Start encoder */
+/*
+  if( jpege_config.main_cfg.scale_cfg.enable) {
+    mmcamera_util_profile("SW encoder starting encoding");
+  } else {
+    mmcamera_util_profile("HW encoder starting encoding");
+  }
+*/
+  rc = jpege_start(jpeg_encoder, &jpege_config, &exif_info);
+  if (rc) {
+    CDBG("jpege_start failed: %d\n", rc);
+    pthread_mutex_unlock(&jpege_mutex);
+    return FALSE;
+  }
+
+  pthread_mutex_unlock(&jpege_mutex);
+  return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality)
+{
+  pthread_mutex_lock(&jpege_mutex);
+  CDBG(" jpeg_encoder_setMainImageQuality current main inage quality %d ," \
+       " new quality : %d\n", jpegMainimageQuality, quality);
+  if (quality <= 100)
+    jpegMainimageQuality = quality;
+  pthread_mutex_unlock(&jpege_mutex);
+  return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality)
+{
+  pthread_mutex_lock(&jpege_mutex);
+  CDBG(" jpeg_encoder_setThumbnailQuality current thumbnail quality %d ," \
+       " new quality : %d\n", jpegThumbnailQuality, quality);
+  if (quality <= 100)
+    jpegThumbnailQuality = quality;
+  pthread_mutex_unlock(&jpege_mutex);
+  return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setRotation(int rotation)
+{
+  pthread_mutex_lock(&jpege_mutex);
+  /* Set rotation configuration */
+  switch(rotation)
+  {
+      case 0:
+      case 90:
+      case 180:
+      case 270:
+          jpegRotation = rotation;
+          break;
+      default:
+          /* Invalid rotation mode, set to default */
+          CDBG(" Setting Default rotation mode ");
+          jpegRotation = 0;
+          break;
+  }
+  pthread_mutex_unlock(&jpege_mutex);
+  return TRUE;
+}
diff --git a/camera/mm-camera-interface/mm_jpeg_encoder.h b/camera/mm-camera-interface/mm_jpeg_encoder.h
new file mode 100644
index 0000000..87517d6
--- /dev/null
+++ b/camera/mm-camera-interface/mm_jpeg_encoder.h
@@ -0,0 +1,76 @@
+/*
+Copyright (c) 2011-2012, Code Aurora Forum. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of Code Aurora Forum, Inc. nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef MM_JPEG_ENCODER_H
+#define MM_JPEG_ENCODER_H
+#include <linux/ion.h>
+#include "camera.h"
+#include "jpege.h"
+#include "exif.h"
+#include "camera_defs_i.h"
+
+extern void mm_jpege_event_handler(void*, jpeg_event_t event, void *p_arg);
+
+extern void mm_jpege_output_produced_handler(void*, void *, jpeg_buffer_t);
+extern int mm_jpege_output_produced_handler2(void*, void *, jpeg_buffer_t, uint8_t);
+
+int8_t mm_jpeg_encoder_init(void);
+extern int8_t mm_jpeg_encoder_encode(const cam_ctrl_dimension_t * dimension,
+                                     const uint8_t * thumbnail_buf,
+                                     int thumbnail_fd, uint32_t thumbnail_offset,
+                                     const uint8_t * snapshot_buf,
+                                     int snapshot_fd,
+                                     uint32_t snapshot_offset,
+                                     common_crop_t *crop,
+                                     exif_tags_info_t *exif_data,
+                                     int exif_numEntries,
+                                     const int32_t a_cbcroffset,
+                                     cam_point_t* main_crop_offset,
+                                     cam_point_t* thumb_crop_offset);
+
+extern int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality);
+extern int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality);
+extern int8_t mm_jpeg_encoder_setRotation(int rotation);
+extern void mm_jpeg_encoder_join(void);
+extern int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height, uint32_t* p_y_offset,
+  uint32_t* p_cbcr_offset, uint32_t* p_buf_size, uint8_t *num_planes, uint32_t planes[]);
+extern void mm_jpeg_encoder_set_3D_info(cam_3d_frame_format_t format);
+typedef void (*jpegfragment_callback_t)(uint8_t * buff_ptr,
+                                        uint32_t buff_size,
+                                        void* user_data);
+typedef void (*jpeg_callback_t)(jpeg_event_t, void *);
+
+extern void set_callbacks(
+   jpegfragment_callback_t fragcallback,
+   jpeg_callback_t eventcallback,
+   void* userdata
+);
+
+extern void mm_jpeg_encoder_cancel();
+#endif //MMCAMERA_JPEG_ENCODER_H
diff --git a/camera/mm-camera-interface/mm_omx_jpeg_encoder.c b/camera/mm-camera-interface/mm_omx_jpeg_encoder.c
new file mode 100644
index 0000000..bd21495
--- /dev/null
+++ b/camera/mm-camera-interface/mm_omx_jpeg_encoder.c
@@ -0,0 +1,814 @@
+/* Copyright (c) 2012, Code Aurora Forum. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of Code Aurora Forum, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/types.h>
+#include <fcntl.h>
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "omx_debug.h"
+#include "omx_jpeg_ext.h"
+#include "mm_omx_jpeg_encoder.h"
+
+static uint8_t hw_encode = true;
+static int jpegRotation = 0;
+static int isZSLMode = 0;
+static int jpegThumbnailQuality = 75;
+static int jpegMainimageQuality = 85;
+static uint32_t phy_offset;
+void *user_data;
+
+static int encoding = 0;
+pthread_mutex_t jpege_mutex = PTHREAD_MUTEX_INITIALIZER;
+pthread_mutex_t jpegcb_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+jpegfragment_callback_t mmcamera_jpegfragment_callback;
+jpeg_callback_t mmcamera_jpeg_callback;
+
+
+#define INPUT_PORT 0
+#define OUTPUT_PORT 1
+#define INPUT_PORT1 2
+#define DEFAULT_COLOR_FORMAT YCRCBLP_H2V2
+
+typedef struct {
+  cam_format_t         isp_format;
+  jpeg_color_format_t  jpg_format;
+} color_format_map_t;
+
+
+static const color_format_map_t color_format_map[] = {
+  {CAMERA_YUV_420_NV21, YCRCBLP_H2V2}, /*default*/
+  {CAMERA_YUV_420_NV21_ADRENO, YCRCBLP_H2V2},
+  {CAMERA_YUV_420_NV12, YCBCRLP_H2V2},
+  {CAMERA_YUV_420_YV12, YCBCRLP_H2V2},
+  {CAMERA_YUV_422_NV61, YCRCBLP_H2V1},
+  {CAMERA_YUV_422_NV16, YCBCRLP_H2V1},
+};
+
+static OMX_HANDLETYPE pHandle;
+static OMX_CALLBACKTYPE callbacks;
+static OMX_INDEXTYPE type;
+static OMX_CONFIG_ROTATIONTYPE rotType;
+static omx_jpeg_thumbnail thumbnail;
+static OMX_CONFIG_RECTTYPE recttype;
+static OMX_PARAM_PORTDEFINITIONTYPE * inputPort;
+static OMX_PARAM_PORTDEFINITIONTYPE * outputPort;
+static OMX_PARAM_PORTDEFINITIONTYPE * inputPort1;
+static OMX_BUFFERHEADERTYPE* pInBuffers;
+static OMX_BUFFERHEADERTYPE* pOutBuffers;
+static OMX_BUFFERHEADERTYPE* pInBuffers1;
+OMX_INDEXTYPE user_preferences;
+omx_jpeg_user_preferences userpreferences;
+OMX_INDEXTYPE exif;
+static omx_jpeg_exif_info_tag tag;
+
+
+static pthread_mutex_t lock;
+static pthread_cond_t cond;
+static int expectedEvent = 0;
+static int expectedValue1 = 0;
+static int expectedValue2 = 0;
+static omx_jpeg_pmem_info pmem_info;
+static omx_jpeg_pmem_info pmem_info1;
+static OMX_IMAGE_PARAM_QFACTORTYPE qFactor;
+static omx_jpeg_thumbnail_quality thumbnailQuality;
+static OMX_INDEXTYPE thumbnailQualityType;
+static void *out_buffer;
+static int * out_buffer_size;
+static OMX_INDEXTYPE buffer_offset;
+static omx_jpeg_buffer_offset bufferoffset;
+
+static jpeg_color_format_t get_jpeg_format_from_cam_format(
+  cam_format_t cam_format )
+{
+  jpeg_color_format_t jpg_format = DEFAULT_COLOR_FORMAT;
+  int i, j;
+  j = sizeof (color_format_map) / sizeof(color_format_map_t);
+  ALOGV("%s: j =%d, cam_format =%d", __func__, j, cam_format);
+  for(i =0; i< j; i++) {
+    if (color_format_map[i].isp_format == cam_format){
+      jpg_format = color_format_map[i].jpg_format;
+      break;
+    }
+  }
+  ALOGV("%s x: i =%d, jpg_format=%d", __func__, i, jpg_format);
+
+  return jpg_format;
+}
+static omx_jpeg_buffer_offset bufferoffset1;
+void set_callbacks(
+    jpegfragment_callback_t fragcallback,
+    jpeg_callback_t eventcallback, void* userdata,
+    void* output_buffer,
+    int * outBufferSize) {
+    pthread_mutex_lock(&jpegcb_mutex);
+    mmcamera_jpegfragment_callback = fragcallback;
+    mmcamera_jpeg_callback = eventcallback;
+    user_data = userdata;
+    out_buffer = output_buffer;
+    out_buffer_size = outBufferSize;
+    pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+
+OMX_ERRORTYPE etbdone(OMX_OUT OMX_HANDLETYPE hComponent,
+                      OMX_OUT OMX_PTR pAppData,
+                      OMX_OUT OMX_BUFFERHEADERTYPE* pBuffer)
+{
+    pthread_mutex_lock(&lock);
+    expectedEvent = OMX_EVENT_ETB_DONE;
+    expectedValue1 = 0;
+    expectedValue2 = 0;
+    pthread_cond_signal(&cond);
+    pthread_mutex_unlock(&lock);
+    return 0;
+}
+
+OMX_ERRORTYPE ftbdone(OMX_OUT OMX_HANDLETYPE hComponent,
+                      OMX_OUT OMX_PTR pAppData,
+                      OMX_OUT OMX_BUFFERHEADERTYPE* pBuffer)
+{
+    ALOGE("%s", __func__);
+    *out_buffer_size = pBuffer->nFilledLen;
+    pthread_mutex_lock(&lock);
+    expectedEvent = OMX_EVENT_FTB_DONE;
+    expectedValue1 = 0;
+    expectedValue2 = 0;
+    pthread_cond_signal(&cond);
+    pthread_mutex_unlock(&lock);
+    ALOGI("%s:filled len = %u", __func__, (uint32_t)pBuffer->nFilledLen);
+    if (mmcamera_jpeg_callback && encoding)
+        mmcamera_jpeg_callback(0, user_data);
+    return 0;
+}
+
+OMX_ERRORTYPE handleError(OMX_IN OMX_EVENTTYPE eEvent, OMX_IN OMX_U32 error)
+{
+    ALOGE("%s", __func__);
+    if (error == OMX_EVENT_JPEG_ERROR) {
+        if (mmcamera_jpeg_callback && encoding) {
+            ALOGI("%s:OMX_EVENT_JPEG_ERROR\n", __func__);
+            mmcamera_jpeg_callback(JPEG_EVENT_ERROR, user_data);
+        }
+    } else if (error == OMX_EVENT_THUMBNAIL_DROPPED) {
+        if (mmcamera_jpeg_callback && encoding) {
+            ALOGI("%s:(OMX_EVENT_THUMBNAIL_DROPPED\n", __func__);
+            mmcamera_jpeg_callback(JPEG_EVENT_THUMBNAIL_DROPPED, user_data);
+        }
+    }
+    return 0;
+}
+
+OMX_ERRORTYPE eventHandler( OMX_IN OMX_HANDLETYPE hComponent,
+                            OMX_IN OMX_PTR pAppData, OMX_IN OMX_EVENTTYPE eEvent,
+                            OMX_IN OMX_U32 nData1, OMX_IN OMX_U32 nData2,
+                            OMX_IN OMX_PTR pEventData)
+{
+    ALOGI("%s", __func__);
+    ALOGI("%s:got event %d ndata1 %u ndata2 %u", __func__,
+      eEvent, nData1, nData2);
+    pthread_mutex_lock(&lock);
+    expectedEvent = eEvent;
+    expectedValue1 = nData1;
+    expectedValue2 = nData2;
+    pthread_cond_signal(&cond);
+    pthread_mutex_unlock(&lock);
+    if ((nData1== OMX_EVENT_JPEG_ERROR)||(nData1== OMX_EVENT_THUMBNAIL_DROPPED))
+        handleError(eEvent,nData1);
+    return 0;
+}
+
+void waitForEvent(int event, int value1, int value2 ){
+    pthread_mutex_lock(&lock);
+    ALOGI("%s:Waiting for:event=%d, value1=%d, value2=%d",
+      __func__, event, value1, value2);
+    while (! (expectedEvent == event &&
+    expectedValue1 == value1 && expectedValue2 == value2)) {
+        pthread_cond_wait(&cond, &lock);
+        ALOGI("%s:After cond_wait:expectedEvent=%d, expectedValue1=%d, expectedValue2=%d",
+          __func__, expectedEvent, expectedValue1, expectedValue2);
+        ALOGI("%s:After cond_wait:event=%d, value1=%d, value2=%d",
+          __func__, event, value1, value2);
+    }
+    ALOGI("%s:done:expectedEvent=%d, expectedValue1=%d, expectedValue2=%d",
+      __func__, expectedEvent, expectedValue1, expectedValue2);
+    pthread_mutex_unlock(&lock);
+}
+
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+    uint32_t* p_y_offset, uint32_t* p_cbcr_offset, uint32_t* p_buf_size,
+    uint8_t *num_planes, uint32_t planes[])
+{
+    ALOGI("%s:", __func__);
+    if ((NULL == p_y_offset) || (NULL == p_cbcr_offset)) {
+        return FALSE;
+    }
+    *num_planes = 2;
+    if (hw_encode ) {
+        int cbcr_offset = 0;
+        uint32_t actual_size = width*height;
+        uint32_t padded_size = width * CEILING16(height);
+        *p_y_offset = 0;
+        *p_cbcr_offset = 0;
+        if(!isZSLMode){
+        if ((jpegRotation == 90) || (jpegRotation == 180)) {
+            *p_y_offset = padded_size - actual_size;
+            *p_cbcr_offset = ((padded_size - actual_size) >> 1);
+          }
+        }
+        *p_buf_size = padded_size * 3/2;
+        planes[0] = width * CEILING16(height);
+        planes[1] = width * CEILING16(height)/2;
+    } else {
+        *p_y_offset = 0;
+        *p_cbcr_offset = PAD_TO_WORD(width*height);
+        *p_buf_size = *p_cbcr_offset * 3/2;
+        planes[0] = PAD_TO_WORD(width*CEILING16(height));
+        planes[1] = PAD_TO_WORD(width*CEILING16(height)/2);
+    }
+    return TRUE;
+}
+
+int8_t omxJpegOpen()
+{
+    ALOGI("%s", __func__);
+    pthread_mutex_lock(&jpege_mutex);
+    OMX_ERRORTYPE ret = OMX_GetHandle(&pHandle, "OMX.qcom.image.jpeg.encoder",
+      NULL, &callbacks);
+    pthread_mutex_unlock(&jpege_mutex);
+    return TRUE;
+}
+
+int8_t omxJpegStart()
+{
+    int rc = 0;
+    ALOGE("%s", __func__);
+    pthread_mutex_lock(&jpege_mutex);
+    hw_encode = true;
+    callbacks.EmptyBufferDone = etbdone;
+    callbacks.FillBufferDone = ftbdone;
+    callbacks.EventHandler = eventHandler;
+    pthread_mutex_init(&lock, NULL);
+    pthread_cond_init(&cond, NULL);
+    rc = OMX_Init();
+    pthread_mutex_unlock(&jpege_mutex);
+    return rc;
+}
+
+static omx_jpeg_color_format format_cam2jpeg(cam_format_t fmt)
+{
+    omx_jpeg_color_format jpeg_fmt = OMX_YCRCBLP_H2V2;
+    switch (fmt) {
+    case CAMERA_YUV_420_NV12:
+        jpeg_fmt = OMX_YCBCRLP_H2V2;
+        break;
+    case CAMERA_YUV_420_NV21:
+    case CAMERA_YUV_420_NV21_ADRENO:
+        jpeg_fmt = OMX_YCRCBLP_H2V2;
+        break;
+    default:
+        ALOGI("Camera format %d not supported", fmt);
+        break;
+    }
+    return jpeg_fmt;
+}
+
+int8_t omxJpegEncodeNext(omx_jpeg_encode_params *encode_params)
+{
+    ALOGI("%s:E", __func__);
+    pthread_mutex_lock(&jpege_mutex);
+    encoding = 1;
+    int orientation;
+    if(inputPort == NULL || inputPort1 == NULL || outputPort == NULL) {
+      ALOGI("%s:pointer is null: X", __func__);
+      pthread_mutex_unlock(&jpege_mutex);
+      return -1;
+    }
+    inputPort->nPortIndex = INPUT_PORT;
+    outputPort->nPortIndex = OUTPUT_PORT;
+    inputPort1->nPortIndex = INPUT_PORT1;
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, outputPort);
+
+    ALOGI("%s:nFrameWidth=%d nFrameHeight=%d nBufferSize=%d w=%d h=%d",
+      __func__, inputPort->format.image.nFrameWidth,
+      inputPort->format.image.nFrameHeight, inputPort->nBufferSize,
+      bufferoffset.width, bufferoffset.height);
+    OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.buffer_offset",
+      &buffer_offset);
+    ALOGI("%s:Buffer w %d h %d yOffset %d cbcrOffset %d totalSize %d\n",
+      __func__, bufferoffset.width, bufferoffset.height, bufferoffset.yOffset,
+      bufferoffset.cbcrOffset,bufferoffset.totalSize);
+    OMX_SetParameter(pHandle, buffer_offset, &bufferoffset);
+    OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+    ALOGI("%s: thumbnail widht %d height %d", __func__,
+      thumbnail.width, thumbnail.height);
+
+    userpreferences.color_format =
+        format_cam2jpeg(encode_params->dimension->main_img_format);
+    userpreferences.thumbnail_color_format =
+        format_cam2jpeg(encode_params->dimension->thumb_format);
+
+
+    pmem_info.fd = encode_params->snapshot_fd;
+    pmem_info.offset = 0;
+
+    //Release previously allocated buffers before doing UseBuffer in burst mode
+    OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+    OMX_FreeBuffer(pHandle, 0, pInBuffers);
+    OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+
+    OMX_UseBuffer(pHandle, &pInBuffers, 0, &pmem_info, inputPort->nBufferSize,
+    (void *) encode_params->snapshot_buf);
+    OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.exif", &exif);
+    /*temporarily set rotation in EXIF data. This is done to avoid
+      image corruption issues in ZSL mode since roation is known
+      before hand. The orientation is set in the exif tag and
+      decoder will decode it will the right orientation. need to add double
+      padding to fix the issue */
+    if (isZSLMode) {
+        /*Get the orientation tag values depending on rotation*/
+        switch (jpegRotation) {
+        case 0:
+            orientation = 1; /*Normal*/
+            break;
+        case 90:
+            orientation = 6; /*Rotated 90 CCW*/
+            break;
+        case 180:
+            orientation =  3; /*Rotated 180*/
+            break;
+        case 270:
+            orientation = 8; /*Rotated 90 CW*/
+            break;
+        default:
+            orientation = 1;
+            break;
+      }
+      tag.tag_id = EXIFTAGID_ORIENTATION;
+      tag.tag_entry.type = EXIFTAGTYPE_ORIENTATION;
+      tag.tag_entry.count = 1;
+      tag.tag_entry.copy = 1;
+      tag.tag_entry.data._short = orientation;
+      ALOGE("%s jpegRotation = %d , orientation value =%d\n", __func__,
+           jpegRotation, orientation);
+      OMX_SetParameter(pHandle, exif, &tag);
+    }
+
+    /*Set omx parameter for all exif tags*/
+    int i;
+    for (i = 0; i < encode_params->exif_numEntries; i++) {
+        memcpy(&tag, encode_params->exif_data + i,
+               sizeof(omx_jpeg_exif_info_tag));
+        OMX_SetParameter(pHandle, exif, &tag);
+    }
+
+    pmem_info1.fd = encode_params->thumbnail_fd;
+    pmem_info1.offset = 0;
+
+    ALOGI("%s: input1 buff size %d", __func__, inputPort1->nBufferSize);
+    OMX_UseBuffer(pHandle, &pInBuffers1, 2, &pmem_info1,
+      inputPort1->nBufferSize, (void *) encode_params->thumbnail_buf);
+    OMX_UseBuffer(pHandle, &pOutBuffers, 1, NULL, inputPort->nBufferSize,
+      (void *) out_buffer);
+    OMX_EmptyThisBuffer(pHandle, pInBuffers);
+    OMX_EmptyThisBuffer(pHandle, pInBuffers1);
+    OMX_FillThisBuffer(pHandle, pOutBuffers);
+    pthread_mutex_unlock(&jpege_mutex);
+    ALOGI("%s:X", __func__);
+    return TRUE;
+}
+
+int8_t omxJpegEncode(omx_jpeg_encode_params *encode_params)
+{
+    int size = 0;
+    uint8_t num_planes;
+    uint32_t planes[10];
+    int orientation;
+    ALOGI("%s:E", __func__);
+
+    inputPort = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+    outputPort = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+    inputPort1 = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+
+    pthread_mutex_lock(&jpege_mutex);
+    encoding = 1;
+    inputPort->nPortIndex = INPUT_PORT;
+    outputPort->nPortIndex = OUTPUT_PORT;
+    inputPort1->nPortIndex = INPUT_PORT1;
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, outputPort);
+
+    bufferoffset.width = encode_params->dimension->orig_picture_dx;
+    bufferoffset.height = encode_params->dimension->orig_picture_dy;
+
+    if (hw_encode)
+        userpreferences.preference = OMX_JPEG_PREF_HW_ACCELERATED_PREFERRED;
+    else
+        userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+    if (encode_params->a_cbcroffset > 0) {
+        userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+        hw_encode = 0;
+    }
+    if (encode_params->scaling_params->in2_w &&
+        encode_params->scaling_params->in2_h) {
+        if (jpegRotation) {
+            userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+            ALOGI("%s:Scaling and roation true: setting pref to sw\n",
+                __func__);
+            hw_encode = 0;
+        }
+    }
+    mm_jpeg_encoder_get_buffer_offset(bufferoffset.width, bufferoffset.height,
+                    &bufferoffset.yOffset,
+                    &bufferoffset.cbcrOffset,
+                    &bufferoffset.totalSize, &num_planes, planes);
+    if (encode_params->a_cbcroffset > 0) {
+        bufferoffset.totalSize = encode_params->a_cbcroffset * 1.5;
+    }
+    OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.buffer_offset",&buffer_offset);
+    ALOGI(" Buffer width = %d, Buffer  height = %d, yOffset =%d, cbcrOffset =%d, totalSize = %d\n",
+                 bufferoffset.width, bufferoffset.height, bufferoffset.yOffset,
+                 bufferoffset.cbcrOffset,bufferoffset.totalSize);
+    OMX_SetParameter(pHandle, buffer_offset, &bufferoffset);
+
+
+    if (encode_params->a_cbcroffset > 0) {
+        ALOGI("Using acbcroffset\n");
+        bufferoffset1.cbcrOffset = encode_params->a_cbcroffset;
+        OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.acbcr_offset",&buffer_offset);
+        OMX_SetParameter(pHandle, buffer_offset, &bufferoffset1);
+    }
+
+    inputPort->format.image.nFrameWidth = encode_params->dimension->orig_picture_dx;
+    inputPort->format.image.nFrameHeight = encode_params->dimension->orig_picture_dy;
+    inputPort->format.image.nStride = encode_params->dimension->orig_picture_dx;
+    inputPort->format.image.nSliceHeight = encode_params->dimension->orig_picture_dy;
+    inputPort->nBufferSize = bufferoffset.totalSize;
+
+    inputPort1->format.image.nFrameWidth =
+      encode_params->dimension->thumbnail_width;
+    inputPort1->format.image.nFrameHeight =
+      encode_params->dimension->thumbnail_height;
+    inputPort1->format.image.nStride =
+      encode_params->dimension->thumbnail_width;
+    inputPort1->format.image.nSliceHeight =
+      encode_params->dimension->thumbnail_height;
+
+    OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+    size = inputPort->nBufferSize;
+    thumbnail.width = encode_params->dimension->thumbnail_width;
+    thumbnail.height = encode_params->dimension->thumbnail_height;
+
+    OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+    OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+    ALOGI("%s: thumbnail width %d height %d", __func__,
+      encode_params->dimension->thumbnail_width,
+      encode_params->dimension->thumbnail_height);
+
+    if(encode_params->a_cbcroffset > 0)
+        inputPort1->nBufferSize = inputPort->nBufferSize;
+
+    userpreferences.color_format =
+      get_jpeg_format_from_cam_format(encode_params->main_format);
+    userpreferences.thumbnail_color_format =
+      get_jpeg_format_from_cam_format(encode_params->thumbnail_format);
+
+
+
+
+      ALOGI("%s:Scaling params in1_w %d in1_h %d out1_w %d out1_h %d"
+                  "main_img in2_w %d in2_h %d out2_w %d out2_h %d\n", __func__,
+      encode_params->scaling_params->in1_w,
+      encode_params->scaling_params->in1_h,
+      encode_params->scaling_params->out1_w,
+      encode_params->scaling_params->out1_h,
+      encode_params->scaling_params->in2_w,
+      encode_params->scaling_params->in2_h,
+      encode_params->scaling_params->out2_w,
+      encode_params->scaling_params->out2_h);
+   /*Main image scaling*/
+    ALOGI("%s:%d/n",__func__,__LINE__);
+
+
+    if (encode_params->scaling_params->in2_w &&
+        encode_params->scaling_params->in2_h) {
+
+      /* Scaler information  for main image */
+        recttype.nWidth = CEILING2(encode_params->scaling_params->in2_w);
+        recttype.nHeight = CEILING2(encode_params->scaling_params->in2_h);
+        ALOGI("%s:%d/n",__func__,__LINE__);
+
+        if (encode_params->main_crop_offset) {
+            recttype.nLeft = encode_params->main_crop_offset->x;
+            recttype.nTop = encode_params->main_crop_offset->y;
+            ALOGI("%s:%d/n",__func__,__LINE__);
+
+        } else {
+            recttype.nLeft = 0;
+            recttype.nTop = 0;
+            ALOGI("%s:%d/n",__func__,__LINE__);
+
+        }
+        ALOGI("%s:%d/n",__func__,__LINE__);
+
+        recttype.nPortIndex = 1;
+        OMX_SetConfig(pHandle, OMX_IndexConfigCommonInputCrop, &recttype);
+        ALOGI("%s:%d/n",__func__,__LINE__);
+
+        if (encode_params->scaling_params->out2_w &&
+            encode_params->scaling_params->out2_h) {
+            recttype.nWidth = (encode_params->scaling_params->out2_w);
+            recttype.nHeight = (encode_params->scaling_params->out2_h);
+            ALOGI("%s:%d/n",__func__,__LINE__);
+
+
+            recttype.nPortIndex = 1;
+            OMX_SetConfig(pHandle, OMX_IndexConfigCommonOutputCrop, &recttype);
+            ALOGI("%s:%d/n",__func__,__LINE__);
+
+        }
+
+    } else {
+        ALOGI("%s: There is no main image scaling information",
+          __func__);
+    }
+  /*case of thumbnail*/
+
+    if ((encode_params->scaling_params->in1_w &&
+        encode_params->scaling_params->in1_h) ||
+        ((encode_params->scaling_params->out1_w !=
+        encode_params->dimension->thumbnail_width) &&
+        (encode_params->scaling_params->out1_h !=
+        encode_params->dimension->thumbnail_height))) {
+
+        thumbnail.scaling = 0;
+
+        if ((encode_params->scaling_params->out1_w !=
+            encode_params->dimension->thumbnail_width)&&
+            (encode_params->scaling_params->out1_h !=
+            encode_params->dimension->thumbnail_height)) {
+
+            ALOGI("%s:%d/n",__func__,__LINE__);
+            thumbnail.cropWidth = CEILING2(encode_params->dimension->thumbnail_width);
+            thumbnail.cropHeight = CEILING2(encode_params->dimension->thumbnail_height);
+        }
+        if (encode_params->scaling_params->in1_w &&
+            encode_params->scaling_params->in1_h) {
+            ALOGI("%s:%d/n",__func__,__LINE__);
+            thumbnail.cropWidth = CEILING2(encode_params->scaling_params->in1_w);
+            thumbnail.cropHeight = CEILING2(encode_params->scaling_params->in1_h);
+        }
+        thumbnail.width  = encode_params->scaling_params->out1_w;
+        thumbnail.height = encode_params->scaling_params->out1_h;
+
+        if (encode_params->thumb_crop_offset) {
+            ALOGI("%s:%d/n",__func__,__LINE__);
+
+            thumbnail.left = encode_params->thumb_crop_offset->x;
+            thumbnail.top = encode_params->thumb_crop_offset->y;
+            thumbnail.scaling = 1;
+        } else {
+            thumbnail.left = 0;
+            thumbnail.top = 0;
+        }
+    } else {
+        thumbnail.scaling = 0;
+        ALOGI("%s: There is no thumbnail scaling information",
+          __func__);
+    }
+    OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.user_preferences",
+      &user_preferences);
+    ALOGI("%s:User Preferences: color_format %d"
+      "thumbnail_color_format = %d encoder preference =%d\n", __func__,
+      userpreferences.color_format,userpreferences.thumbnail_color_format,
+      userpreferences.preference);
+    OMX_SetParameter(pHandle,user_preferences,&userpreferences);
+
+    ALOGI("%s Thumbnail present? : %d ", __func__,
+                 encode_params->hasThumbnail);
+    if (encode_params->hasThumbnail) {
+    OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.thumbnail", &type);
+    OMX_SetParameter(pHandle, type, &thumbnail);
+    }
+    qFactor.nPortIndex = INPUT_PORT;
+    OMX_GetParameter(pHandle, OMX_IndexParamQFactor, &qFactor);
+    qFactor.nQFactor = jpegMainimageQuality;
+    OMX_SetParameter(pHandle, OMX_IndexParamQFactor, &qFactor);
+
+    OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.thumbnail_quality",
+    &thumbnailQualityType);
+
+    ALOGI("%s: thumbnail quality %u %d",
+      __func__, thumbnailQualityType, jpegThumbnailQuality);
+    OMX_GetParameter(pHandle, thumbnailQualityType, &thumbnailQuality);
+    thumbnailQuality.nQFactor = jpegThumbnailQuality;
+    OMX_SetParameter(pHandle, thumbnailQualityType, &thumbnailQuality);
+
+    ALOGE("isZSLMode is %d\n",isZSLMode);
+    if(!isZSLMode){
+    //Pass rotation if not ZSL mode
+    rotType.nPortIndex = OUTPUT_PORT;
+    rotType.nRotation = jpegRotation;
+    OMX_SetConfig(pHandle, OMX_IndexConfigCommonRotate, &rotType);
+    ALOGE("Set rotation to %d\n",jpegRotation);
+    }
+
+    OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.exif", &exif);
+    /*temporarily set rotation in EXIF data. This is done to avoid image corruption
+      issues in ZSL mode since roation is known before hand. The orientation is set in the
+      exif tag and decoder will decode it will the right orientation. need to add double
+      padding to fix the issue */
+    if(isZSLMode){
+
+      //Get the orientation tag values depending on rotation
+      switch(jpegRotation){
+        case 0: orientation =1; //Normal
+                break;
+        case 90: orientation =6; //Rotated 90 CCW
+                 break;
+        case 180: orientation =3; //Rotated 180
+                  break;
+        case 270: orientation =8; //Rotated 90 CW
+                  break;
+        default: orientation =1;
+                 break;
+     }
+      tag.tag_id = EXIFTAGID_ORIENTATION;
+      tag.tag_entry.type = EXIFTAGTYPE_ORIENTATION;
+      tag.tag_entry.count =1;
+      tag.tag_entry.copy = 1;
+      tag.tag_entry.data._short = orientation;
+      ALOGE("%s jpegRotation = %d , orientation value =%d\n",__func__,jpegRotation,orientation);
+      OMX_SetParameter(pHandle, exif, &tag);
+      }
+
+    //Set omx parameter for all exif tags
+    int i;
+    for(i=0; i<encode_params->exif_numEntries; i++) {
+        memcpy(&tag, encode_params->exif_data + i, sizeof(omx_jpeg_exif_info_tag));
+        OMX_SetParameter(pHandle, exif, &tag);
+    }
+
+    pmem_info.fd = encode_params->snapshot_fd;
+    pmem_info.offset = 0;
+
+    ALOGI("input buffer size is %d",size);
+    OMX_UseBuffer(pHandle, &pInBuffers, 0, &pmem_info, size,
+    (void *) encode_params->snapshot_buf);
+
+    pmem_info1.fd = encode_params->thumbnail_fd;
+    pmem_info1.offset = 0;
+
+    ALOGI("%s: input1 buff size %d", __func__, inputPort1->nBufferSize);
+    OMX_UseBuffer(pHandle, &pInBuffers1, 2, &pmem_info1,
+      inputPort1->nBufferSize, (void *) encode_params->thumbnail_buf);
+
+
+    OMX_UseBuffer(pHandle, &pOutBuffers, 1, NULL, size, (void *) out_buffer);
+
+    waitForEvent(OMX_EventCmdComplete, OMX_CommandStateSet, OMX_StateIdle);
+    ALOGI("%s:State changed to OMX_StateIdle\n", __func__);
+    OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateExecuting, NULL);
+    waitForEvent(OMX_EventCmdComplete, OMX_CommandStateSet, OMX_StateExecuting);
+
+    OMX_EmptyThisBuffer(pHandle, pInBuffers);
+    OMX_EmptyThisBuffer(pHandle, pInBuffers1);
+    OMX_FillThisBuffer(pHandle, pOutBuffers);
+    pthread_mutex_unlock(&jpege_mutex);
+    ALOGI("%s:X", __func__);
+    return TRUE;
+}
+
+void omxJpegFinish()
+{
+    pthread_mutex_lock(&jpege_mutex);
+    ALOGI("%s:encoding=%d", __func__, encoding);
+    if (encoding) {
+        encoding = 0;
+        OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateIdle, NULL);
+        OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
+        OMX_FreeBuffer(pHandle, 0, pInBuffers);
+        OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+        OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+        OMX_Deinit();
+    }
+    pthread_mutex_unlock(&jpege_mutex);
+}
+
+void omxJpegClose()
+{
+    ALOGI("%s:", __func__);
+}
+
+void omxJpegAbort()
+{
+    pthread_mutex_lock(&jpegcb_mutex);
+    mmcamera_jpegfragment_callback = NULL;
+    mmcamera_jpeg_callback = NULL;
+    user_data = NULL;
+    pthread_mutex_unlock(&jpegcb_mutex);
+    pthread_mutex_lock(&jpege_mutex);
+    ALOGI("%s: encoding=%d", __func__, encoding);
+    if (encoding) {
+      encoding = 0;
+      OMX_SendCommand(pHandle, OMX_CommandFlush, NULL, NULL);
+      ALOGI("%s:waitForEvent: OMX_CommandFlush", __func__);
+      waitForEvent(OMX_EVENT_JPEG_ABORT, 0, 0);
+      ALOGI("%s:waitForEvent: OMX_CommandFlush: DONE", __func__);
+      OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateIdle, NULL);
+      OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
+      OMX_FreeBuffer(pHandle, 0, pInBuffers);
+      OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+      OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+      OMX_Deinit();
+    }
+    pthread_mutex_unlock(&jpege_mutex);
+}
+
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality)
+{
+    pthread_mutex_lock(&jpege_mutex);
+    ALOGE("%s: current main inage quality %d ," \
+    " new quality : %d\n", __func__, jpegMainimageQuality, quality);
+    if (quality <= 100)
+        jpegMainimageQuality = quality;
+    pthread_mutex_unlock(&jpege_mutex);
+    return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality)
+{
+    pthread_mutex_lock(&jpege_mutex);
+    ALOGE("%s: current thumbnail quality %d ," \
+    " new quality : %d\n", __func__, jpegThumbnailQuality, quality);
+    if (quality <= 100)
+        jpegThumbnailQuality = quality;
+    pthread_mutex_unlock(&jpege_mutex);
+    return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setRotation(int rotation, int isZSL)
+{
+    pthread_mutex_lock(&jpege_mutex);
+
+    /*Set ZSL Mode*/
+    isZSLMode = isZSL;
+    ALOGE("%s: Setting ZSL Mode to %d Rotation = %d\n",__func__,isZSLMode,rotation);
+    /* Set rotation configuration */
+    switch (rotation) {
+    case 0:
+    case 90:
+    case 180:
+    case 270:
+        jpegRotation = rotation;
+        break;
+    default:
+        /* Invalid rotation mode, set to default */
+        ALOGI("%s:Setting Default rotation mode", __func__);
+        jpegRotation = 0;
+        break;
+    }
+    pthread_mutex_unlock(&jpege_mutex);
+    return TRUE;
+}
+
+/*===========================================================================
+FUNCTION      mm_jpege_set_phy_offset
+
+DESCRIPTION   Set physical offset for the buffer
+===========================================================================*/
+void mm_jpege_set_phy_offset(uint32_t a_phy_offset)
+{
+    phy_offset = a_phy_offset;
+}
diff --git a/camera/mm-camera-interface/mm_omx_jpeg_encoder.h b/camera/mm-camera-interface/mm_omx_jpeg_encoder.h
new file mode 100644
index 0000000..c995ead
--- /dev/null
+++ b/camera/mm-camera-interface/mm_omx_jpeg_encoder.h
@@ -0,0 +1,82 @@
+/* Copyright (c) 2011, Code Aurora Forum. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of Code Aurora Forum, Inc. nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MM_OMX_JPEG_ENCODER_H_
+#define MM_OMX_JPEG_ENCODER_H_
+#include <linux/ion.h>
+#include "camera.h"
+
+typedef struct omx_jpeg_encode_params_t {
+    const cam_ctrl_dimension_t * dimension;
+    const uint8_t * thumbnail_buf;
+    int thumbnail_fd;
+    uint32_t thumbnail_offset;
+    const uint8_t * snapshot_buf;
+    int snapshot_fd;
+    uint32_t snapshot_offset;
+    common_crop_t *scaling_params;
+    exif_tags_info_t *exif_data;
+    int exif_numEntries;
+    int32_t a_cbcroffset;
+    cam_point_t* main_crop_offset;
+    cam_point_t* thumb_crop_offset;
+    int hasThumbnail;
+    cam_format_t main_format;
+    cam_format_t thumbnail_format;
+}omx_jpeg_encode_params;
+
+int8_t omxJpegOpen();
+int8_t omxJpegStart();
+int8_t omxJpegEncode(omx_jpeg_encode_params *encode_params);
+int8_t omxJpegEncodeNext(omx_jpeg_encode_params *encode_params);
+void omxJpegFinish();
+void omxJpegClose();
+void omxJpegAbort();
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality);
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality);
+int8_t mm_jpeg_encoder_setRotation(int rotation,int isZSL);
+void jpege_set_phy_offset(uint32_t a_phy_offset);
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+    uint32_t* p_y_offset, uint32_t* p_cbcr_offset,
+    uint32_t* p_buf_size,uint8_t *num_planes,
+    uint32_t planes[]);
+typedef void (*jpegfragment_callback_t)(uint8_t * buff_ptr,
+    uint32_t buff_size, void* user_data);
+typedef void (*jpeg_callback_t)(jpeg_event_t, void *);
+
+void set_callbacks(
+    jpegfragment_callback_t fragcallback,
+    jpeg_callback_t eventcallback,
+    void* userdata,
+    void* output_buffer,
+    int * outBufferSize
+);
+
+
+#endif /* MM_OMX_JPEG_ENCODER_H_ */