am 31326aeb: am 2fb3b804: am 1c524977: Merge "Upgrade SELinux policy for N5 DRM crypto"

* commit '31326aeb6e33ba0d7acc73f391d8c0820946f169':
  Upgrade SELinux policy for N5 DRM crypto
diff --git a/BoardConfig.mk b/BoardConfig.mk
index 197bc34..02b7e53 100644
--- a/BoardConfig.mk
+++ b/BoardConfig.mk
@@ -81,17 +81,12 @@
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 131072
 
-BOARD_CHARGER_DISABLE_INIT_BLANK := true
 BOARD_CHARGER_ENABLE_SUSPEND := true
 
-TARGET_RECOVERY_PIXEL_FORMAT := RGBX_8888
-TARGET_RECOVERY_UI_LIB := librecovery_ui_hammerhead
 TARGET_RECOVERY_FSTAB = device/lge/hammerhead/fstab.hammerhead
 
 TARGET_RELEASETOOLS_EXTENSIONS := device/lge/hammerhead
 
-PDK_PLATFORM_ZIP_PRODUCT_BINARIES := device/lge/hammerhead-kernel/vmlinux.bz2
-
 BOARD_HAL_STATIC_LIBRARIES := libdumpstate.hammerhead
 
 BOARD_SEPOLICY_DIRS += \
@@ -132,9 +127,18 @@
 
 HAVE_ADRENO_SOURCE:= false
 
-#OVERRIDE_RS_DRIVER:= libRSDriver_adreno.so
+OVERRIDE_RS_DRIVER:= libRSDriver_adreno.so
 TARGET_FORCE_HWC_FOR_VIRTUAL_DISPLAYS := true
 
 TARGET_TOUCHBOOST_FREQUENCY:= 1200
 
+USE_DEVICE_SPECIFIC_QCOM_PROPRIETARY:= true
+USE_DEVICE_SPECIFIC_CAMERA:= true
+
 -include vendor/lge/hammerhead/BoardConfigVendor.mk
+
+# Enable Minikin text layout engine (will be the default soon)
+USE_MINIKIN := true
+
+# Include an expanded selection of fonts
+EXTENDED_FONT_FOOTPRINT := true
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 5f93737..2b4c00f 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -68,3 +68,4 @@
 $(call add-clean-step, rm -f $(OUT_DIR)/target/product/hammerhead/system/build.prop)
 $(call add-clean-step, rm -f $(OUT_DIR)/target/product/hammerhead/system/build.prop)
 $(call add-clean-step, rm -f $(OUT_DIR)/target/product/hammerhead/system/build.prop)
+$(call add-clean-step, rm -rf $(OUT_DIR)/target/product/hammerhead/system/build.prop)
diff --git a/audio_effects.conf b/audio_effects.conf
index a7df5f7..1fd0461 100644
--- a/audio_effects.conf
+++ b/audio_effects.conf
@@ -27,6 +27,9 @@
   qcom_pre_processing {
     path /system/lib/soundfx/libqcomvoiceprocessing.so
   }
+  loudness_enhancer {
+    path /system/lib/soundfx/libldnhncr.so
+  }
 }
 
 # Default pre-processing library. Add to audio_effect.conf "libraries" section if
@@ -124,6 +127,10 @@
     library downmix
     uuid 93f04452-e4fe-41cc-91f9-e475b6d1d69f
   }
+  loudness_enhancer {
+    library loudness_enhancer
+    uuid fa415329-2034-4bea-b5dc-5b381c8d1e2c
+  }
   aec {
     library qcom_pre_processing
     uuid 1e5c3ea0-1fcf-11e3-9176-0002a5d5c51b
diff --git a/audio_policy.conf b/audio_policy.conf
index 6613925..6bd644d 100644
--- a/audio_policy.conf
+++ b/audio_policy.conf
@@ -83,12 +83,20 @@
         devices AUDIO_DEVICE_OUT_USB_ACCESSORY
       }
       usb_device {
-        sampling_rates 44100
+        sampling_rates dynamic
         channel_masks AUDIO_CHANNEL_OUT_STEREO
-        formats AUDIO_FORMAT_PCM_16_BIT
+        formats dynamic
         devices AUDIO_DEVICE_OUT_USB_DEVICE
       }
     }
+    inputs {
+      usb_device {
+        sampling_rates dynamic
+        channel_masks AUDIO_CHANNEL_IN_STEREO
+        formats AUDIO_FORMAT_PCM_16_BIT
+        devices AUDIO_DEVICE_IN_USB_DEVICE
+      }
+    }
   }
   r_submix {
     outputs {
diff --git a/bcmdhd.cal b/bcmdhd.cal
index 7c72a19..ca814d0 100644
--- a/bcmdhd.cal
+++ b/bcmdhd.cal
@@ -13,7 +13,7 @@
 #boardnum=57410
 macaddr=00:90:4c:c5:12:38
 ccode=XV
-regrev=17
+regrev=16
 antswitch=0
 pdgain2g=7
 pdgain5g=7
diff --git a/bcmdhd.cal_4335 b/bcmdhd.cal_4335
deleted file mode 100644
index 07ecf21..0000000
--- a/bcmdhd.cal_4335
+++ /dev/null
@@ -1,94 +0,0 @@
-# Sample variables file for BCM94335 WLBGA ePA, eLNA board for production package
-NVRAMRev=$Rev: 356590 $
-sromrev=11
-boardrev=0x1216
-boardtype=0x064c
-boardflags=0x10401001
-boardflags2=0x10000000
-boardflags3=0x9
-#boardnum=57410
-macaddr=00:90:4c:c5:12:38
-ccode=0
-regrev=0
-antswitch=0
-pdgain5g=1
-pdgain2g=1
-tworangetssi2g=0
-tworangetssi5g=0
-femctrl=4
-pcieingress_war=15
-vendid=0x14e4
-devid=0x43ae
-manfid=0x2d0
-#prodid=0x052e
-nocrc=1
-otpimagesize=502
-muxenab=0x10
-xtalfreq=37400
-extpagain2g=1
-pdetrange2g=2
-extpagain5g=1
-pdetrange5g=2
-rxgains2gelnagaina0=3
-rxgains2gtrisoa0=6
-rxgains2gtrelnabypa0=1
-rxgains5gelnagaina0=3
-rxgains5gtrisoa0=6
-rxgains5gtrelnabypa0=1
-rxchain=1
-txchain=1
-aa2g=1
-aa5g=1
-tssipos5g=1
-tssipos2g=1
-pa2ga0=-116,5782,-642
-pa5ga0=-121,5511,-643,-68,5896,-638,-52,5850,-608,-24,6117,-620 
-maxp2ga0=76
-maxp5ga0=76,76,76,76
-pdoffset40ma0=0x2222
-pdoffset80ma0=0x2222
-pdoffsetcckma0=1
-cckbw202gpo=0x0000
-cckbw20ul2gpo=0x0000
-mcsbw202gpo=0x88555530
-mcsbw402gpo=0x88555530
-dot11agofdmhrbw202gpo=0x5511
-ofdmlrbw202gpo=0x0000
-mcsbw205glpo=0x88555530
-mcsbw405glpo=0x88555530
-mcsbw805glpo=0x88555530
-mcsbw1605glpo=0x88555530
-mcsbw205gmpo=0x88555530
-mcsbw405gmpo=0x88555530
-mcsbw805gmpo=0x88555530
-mcsbw1605gmpo=0x88555530
-mcsbw205ghpo=0x88555530
-mcsbw405ghpo=0x88555530
-mcsbw805ghpo=0x88555530
-mcsbw1605ghpo=0x88555530
-mcslr5glpo=0x0000
-mcslr5gmpo=0x0000
-mcslr5ghpo=0x0000
-sb20in40hrrpo=0x0
-sb20in80and160hr5glpo=0x0
-sb40and80hr5glpo=0x0
-sb20in80and160hr5gmpo=0x0
-sb40and80hr5gmpo=0x0
-sb20in80and160hr5ghpo=0x0
-sb40and80hr5ghpo=0x0
-sb20in40lrpo=0x0
-sb20in80and160lr5glpo=0x0
-sb40and80lr5glpo=0x0
-sb20in80and160lr5gmpo=0x0
-sb40and80lr5gmpo=0x0
-sb20in80and160lr5ghpo=0x0
-sb40and80lr5ghpo=0x0
-dot11agduphrpo=0x0
-dot11agduplrpo=0x0
-phycal_tempdelta=5
-tssifloor5g=220,213,218,228
-tssifloor2g=245
-femtable2=0x00FF8F08
-femtable1=0x04000300
-femtable0=0x40805010
-cckdigfilttype=1
diff --git a/bcmdhd.cal_4339 b/bcmdhd.cal_4339
deleted file mode 100644
index 60e05d8..0000000
--- a/bcmdhd.cal_4339
+++ /dev/null
@@ -1,139 +0,0 @@
-#LGE GED Rev.B for 4339 iPA/eLNA / AARDVARK_6_30_271
-#2G CCK Filter / 5G EVM SW Ctrl / Targer power / 130605
-
-sromrev=11
-boardrev=0x1106
-boardtype=0x06b6
-boardflags=0x10081001
-boardflags2=0x00000000
-boardflags3=0x08002180
-#boardnum=57410
-macaddr=00:90:4c:c5:12:38
-ccode=ALL
-regrev=0
-antswitch=0
-pdgain2g=7
-pdgain5g=7
-tworangetssi2g=0
-tworangetssi5g=0
-muxenab=0x10
-#sd_gpout=0
-#sd_oobonly=1
-vendid=0x14e4
-devid=0x43ae
-manfid=0x2d0
-#prodid=0x052e
-nocrc=1
-otpimagesize=502
-xtalfreq=37400
-extpagain2g=2
-pdetrange2g=2
-extpagain5g=2
-pdetrange5g=2
-rxgains2gelnagaina0=3
-rxgains2gtrisoa0=5
-rxgains2gtrelnabypa0=1
-rxgains5gelnagaina0=5
-rxgains5gtrisoa0=11
-rxgains5gtrelnabypa0=1
-rxchain=1
-txchain=1
-aa2g=1
-aa5g=1
-ag0=0
-ag0=0
-tssipos5g=0
-tssipos2g=0
-
-#pa2ga0=-161,6269,-723
-#pa2gccka0=-116,7568,-852
-
-pa2ga0=-180,5862,-702
-pa2gccka0=-185,5862,-702
-
-#pa5ga0=0xFF61,0x163C,0xFD55,0xFF5D,0x1671,0xFD4F,0xFF5F,0x16CA,0xFD45,0xFF60,0x1676,0xFD4D
-#pa5gbw40a0=0xFF61,0x163C,0xFD55,0xFF5D,0x1671,0xFD4F,0xFF5F,0x16CA,0xFD45,0xFF60,0x1676,0xFD4D
-#pa5gbw80a0=0xFF61,0x163C,0xFD55,0xFF5D,0x1671,0xFD4F,0xFF5F,0x16CA,0xFD45,0xFF60,0x1676,0xFD4D
-
-pa5ga0=0xFF49,0x15EA,0xFD51,0xFF65,0x16A4,0xFD5F,0xFF51,0x1606,0xFD54,0xFF64,0x1670,0xFD56
-pa5gbw40a0=0xFF4D,0x1676,0xFD49,0xFF45,0x1689,0xFD39,0xFF38,0x15CE,0xFD41,0xFF45,0x1613,0xFD43
-pa5gbw80a0=0xFF51,0x15C6,0xFD61,0xFF53,0x1636,0xFD4F,0xFF5D,0x1635,0xFD60,0xFF4C,0x15B4,0xFD51
-
-pdoffset40ma0=0
-pdoffset80ma0=0
-pdoffsetcckma0=0
-
-# Default Target Power for 2G -
-# 11b: 18dBm
-# 11g: 14dBm(54M,48M)/15dBm(36M,24M,18M,12M)/16dBm(6M,9M)
-# 11n: 13dBm(MCS7)/14dBmMCS(5~3)/16dBm(MCS2~0)
-
-maxp2ga0=78
-
-cckbw202gpo=0x0000
-cckbw20ul2gpo=0x0000
-cckbw20ul2gpo=0x0
-
-ofdmlrbw202gpo=0x8864
-dot11agofdmhrbw202gpo=0x8866
-
-mcsbw202gpo=0xaaaa8884
-mcsbw402gpo=0xaaaa8884
-
-
-# Default Target Power for 5G
-# 11a : 14dBm (54~24M)/16dBm(18~6M)
-# 11n : 13dBm (MCS7) 14dBm(6~3)/16dBm(MCS2~0)
-# 11nHT40 : 13dBm
-# 11ac : 12dBm (MCS8~3)/16dBm(MCS2~0)
-# 11ac HT40 : 12dBm(MCS9~8) 13dBm(MCS7~0)
-# 11ac HT80 : 12dBm(MCS9~8) 13dBm(MCS7~0)
-
-maxp5ga0=74,74,74,74
-
-tssifloor2g=500
-
-#low
-mcsbw205glpo=0xaa866662
-mcsbw405glpo=0xaa888888
-mcsbw805glpo=0xaa888888
-mcsbw1605glpo=0xaa888888
-#mid
-mcsbw205gmpo=0xaa866662
-mcsbw405gmpo=0xaa888888
-mcsbw805gmpo=0xaa888888
-mcsbw1605gmpo=0xaa888888
-#high
-mcsbw205ghpo=0xaa866662
-mcsbw405ghpo=0xaa888888
-mcsbw805ghpo=0xaa888888
-mcsbw1605ghpo=0xaa888888
-
-mcslr5glpo=0x0000
-mcslr5gmpo=0x0000
-mcslr5ghpo=0x0000
-
-sb20in40hrrpo=0x0
-sb20in80and160hr5glpo=0x0
-sb40and80hr5glpo=0x0
-sb20in80and160hr5gmpo=0x0
-sb40and80hr5gmpo=0x0
-sb20in80and160hr5ghpo=0x0
-sb40and80hr5ghpo=0x0
-sb20in40lrpo=0x0
-sb20in80and160lr5glpo=0x0
-sb40and80lr5glpo=0x0
-sb20in80and160lr5gmpo=0x0
-sb40and80lr5gmpo=0x0
-sb20in80and160lr5ghpo=0x0
-sb40and80lr5ghpo=0x0
-dot11agduphrpo=0x0
-dot11agduplrpo=0x0
-phycal_tempdelta=25
-cckdigfilttype=1
-swctrlmap_5g=0x00080008,0x00040000,0x00080008,0x800301,0x00c
-swctrlmap_2g=0x00000000,0x00030001,0x00010000,0x800301,0x0ff
-swctrlmapext_5g=0x00000000,0x00000000,0x00000000,0x000000,0x000
-swctrlmapext_2g=0x00000001,0x00000000,0x00000000,0x000000,0x001
-rssicorrnorm_c0=-3,-2
-rssicorrnorm5g_c0=-1,0,-3,-1,0,-3,-3,-2,-4,-3,-2,-4
diff --git a/bluetooth/bdroid_buildcfg.h b/bluetooth/bdroid_buildcfg.h
old mode 100644
new mode 100755
index 2350476..4983d19
--- a/bluetooth/bdroid_buildcfg.h
+++ b/bluetooth/bdroid_buildcfg.h
@@ -19,4 +19,7 @@
 
 #define BTA_DISABLE_DELAY 100 /* in milliseconds */
 
+#define BTM_WBS_INCLUDED TRUE
+#define BTIF_HF_WBS_PREFERRED TRUE
+
 #endif
diff --git a/camera/Android.mk b/camera/Android.mk
new file mode 100644
index 0000000..8faf8d7
--- /dev/null
+++ b/camera/Android.mk
@@ -0,0 +1,9 @@
+ifeq ($(strip $(USE_DEVICE_SPECIFIC_CAMERA)),true)
+ifneq ($(filter msm8960 msm8226 msm8974,$(TARGET_BOARD_PLATFORM)),)
+  ifneq ($(USE_CAMERA_STUB),true)
+    ifneq ($(BUILD_TINY_ANDROID),true)
+      include $(call all-subdir-makefiles)
+    endif
+  endif
+endif
+endif
diff --git a/camera/CleanSpec.mk b/camera/CleanSpec.mk
new file mode 100644
index 0000000..2d5df0b
--- /dev/null
+++ b/camera/CleanSpec.mk
@@ -0,0 +1,48 @@
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list.  These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+#     $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+#     $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list.  E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+
+$(call add-clean-step, find $(OUT_DIR) -name "camera.msm8960*" -print0 | xargs -0 rm -rf)
+$(call add-clean-step, rm -f $(PRODUCT_OUT)/system/lib/hw/camera.*.so)
diff --git a/camera/MODULE_LICENSE_BSD b/camera/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/camera/MODULE_LICENSE_BSD
diff --git a/camera/QCamera2/Android.mk b/camera/QCamera2/Android.mk
new file mode 100644
index 0000000..c36c538
--- /dev/null
+++ b/camera/QCamera2/Android.mk
@@ -0,0 +1,3 @@
+ifeq ($(TARGET_ARCH),arm)
+  include $(call all-subdir-makefiles)
+endif
diff --git a/camera/QCamera2/HAL/Android.mk b/camera/QCamera2/HAL/Android.mk
new file mode 100644
index 0000000..06b4824
--- /dev/null
+++ b/camera/QCamera2/HAL/Android.mk
@@ -0,0 +1,48 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+        QCamera2Factory.cpp \
+        QCamera2Hal.cpp \
+        QCamera2HWI.cpp \
+        QCameraMem.cpp \
+        ../util/QCameraQueue.cpp \
+        ../util/QCameraCmdThread.cpp \
+        QCameraStateMachine.cpp \
+        QCameraChannel.cpp \
+        QCameraStream.cpp \
+        QCameraPostProc.cpp \
+        QCamera2HWICallbacks.cpp \
+        QCameraParameters.cpp \
+        QCameraThermalAdapter.cpp
+
+LOCAL_CFLAGS = -Wall -Werror -DDEFAULT_ZSL_MODE_ON -DDEFAULT_DENOISE_MODE_ON
+#Debug logs are enabled
+#LOCAL_CFLAGS += -DDISABLE_DEBUG_LOG
+
+LOCAL_C_INCLUDES := \
+        $(LOCAL_PATH)/../stack/common \
+        frameworks/native/include/media/hardware \
+        frameworks/native/include/media/openmax \
+        hardware/qcom/media/libstagefrighthw \
+        system/media/camera/include \
+        $(LOCAL_PATH)/../../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../../mm-image-codec/qomx_core \
+        $(LOCAL_PATH)/../util
+
+LOCAL_C_INCLUDES += \
+        hardware/qcom/display/msm8974/libgralloc
+
+LOCAL_SHARED_LIBRARIES := libcamera_client liblog libhardware libutils libcutils libdl
+LOCAL_SHARED_LIBRARIES += libmmcamera_interface libmmjpeg_interface
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
+#LOCAL_MODULE := camera.$(TARGET_DEVICE)
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+#include $(LOCAL_PATH)/test/Android.mk
+
diff --git a/camera/QCamera2/HAL/QCamera2Factory.cpp b/camera/QCamera2/HAL/QCamera2Factory.cpp
new file mode 100644
index 0000000..4408cce
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2Factory.cpp
@@ -0,0 +1,208 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCamera2Factory"
+
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <hardware/camera.h>
+
+#include "QCamera2Factory.h"
+
+namespace qcamera {
+
+QCamera2Factory gQCamera2Factory;
+
+/*===========================================================================
+ * FUNCTION   : QCamera2Factory
+ *
+ * DESCRIPTION: default constructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::QCamera2Factory()
+{
+    mNumOfCameras = get_num_of_cameras();
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2Factory
+ *
+ * DESCRIPTION: deconstructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera2Factory::~QCamera2Factory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : get_number_of_cameras
+ *
+ * DESCRIPTION: static function to query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::get_number_of_cameras()
+{
+    return gQCamera2Factory.getNumberOfCameras();
+}
+
+/*===========================================================================
+ * FUNCTION   : get_camera_info
+ *
+ * DESCRIPTION: static function to query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::get_camera_info(int camera_id, struct camera_info *info)
+{
+    return gQCamera2Factory.getCameraInfo(camera_id, info);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera2Factory::getNumberOfCameras()
+{
+    return mNumOfCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::getCameraInfo(int camera_id, struct camera_info *info)
+{
+    int rc;
+    ALOGE("%s: E, camera_id = %d", __func__, camera_id);
+
+    if (!mNumOfCameras || camera_id >= mNumOfCameras || !info) {
+        return INVALID_OPERATION;
+    }
+
+    rc = QCamera2HardwareInterface::getCapabilities(camera_id, info);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::cameraDeviceOpen(int camera_id,
+                    struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    if (camera_id < 0 || camera_id >= mNumOfCameras)
+        return BAD_VALUE;
+
+    QCamera2HardwareInterface *hw = new QCamera2HardwareInterface(camera_id);
+    if (!hw) {
+        ALOGE("Allocation of hardware interface failed");
+        return NO_MEMORY;
+    }
+    rc = hw->openCamera(hw_device);
+    if (rc != NO_ERROR) {
+        delete hw;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2Factory::camera_device_open(
+    const struct hw_module_t *module, const char *id,
+    struct hw_device_t **hw_device)
+{
+    if (module != &HAL_MODULE_INFO_SYM.common) {
+        ALOGE("Invalid module. Trying to open %p, expect %p",
+            module, &HAL_MODULE_INFO_SYM.common);
+        return INVALID_OPERATION;
+    }
+    if (!id) {
+        ALOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+    return gQCamera2Factory.cameraDeviceOpen(atoi(id), hw_device);
+}
+
+struct hw_module_methods_t QCamera2Factory::mModuleMethods = {
+    open: QCamera2Factory::camera_device_open,
+};
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCamera2Factory.h b/camera/QCamera2/HAL/QCamera2Factory.h
new file mode 100644
index 0000000..3504bbd
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2Factory.h
@@ -0,0 +1,68 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2FACTORY_H__
+#define __QCAMERA2FACTORY_H__
+
+#include <hardware/camera.h>
+#include <system/camera.h>
+#include <media/msmb_camera.h>
+
+#include "QCamera2HWI.h"
+
+namespace qcamera {
+
+class QCamera2Factory
+{
+public:
+    QCamera2Factory();
+    virtual ~QCamera2Factory();
+
+    static int get_number_of_cameras();
+    static int get_camera_info(int camera_id, struct camera_info *info);
+
+private:
+    int getNumberOfCameras();
+    int getCameraInfo(int camera_id, struct camera_info *info);
+    int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+    static int camera_device_open(const struct hw_module_t *module, const char *id,
+                struct hw_device_t **hw_device);
+
+public:
+    static struct hw_module_methods_t mModuleMethods;
+
+private:
+    int mNumOfCameras;
+};
+
+}; /*namespace qcamera*/
+
+extern camera_module_t HAL_MODULE_INFO_SYM;
+
+#endif /* ANDROID_HARDWARE_QUALCOMM_CAMERA_H */
diff --git a/camera/QCamera2/HAL/QCamera2HWI.cpp b/camera/QCamera2/HAL/QCamera2HWI.cpp
new file mode 100644
index 0000000..9579365
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWI.cpp
@@ -0,0 +1,4225 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+#include <cutils/properties.h>
+#include <hardware/camera.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include <gralloc_priv.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+
+#define MAP_TO_DRIVER_COORDINATE(val, base, scale, offset) (val * scale / base + offset)
+#define CAMERA_MIN_STREAMING_BUFFERS     3
+#define CAMERA_MIN_JPEG_ENCODING_BUFFERS 2
+#define CAMERA_MIN_VIDEO_BUFFERS         9
+
+namespace qcamera {
+
+cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+static pthread_mutex_t g_camlock = PTHREAD_MUTEX_INITIALIZER;
+
+camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
+    set_preview_window:         QCamera2HardwareInterface::set_preview_window,
+    set_callbacks:              QCamera2HardwareInterface::set_CallBacks,
+    enable_msg_type:            QCamera2HardwareInterface::enable_msg_type,
+    disable_msg_type:           QCamera2HardwareInterface::disable_msg_type,
+    msg_type_enabled:           QCamera2HardwareInterface::msg_type_enabled,
+
+    start_preview:              QCamera2HardwareInterface::start_preview,
+    stop_preview:               QCamera2HardwareInterface::stop_preview,
+    preview_enabled:            QCamera2HardwareInterface::preview_enabled,
+    store_meta_data_in_buffers: QCamera2HardwareInterface::store_meta_data_in_buffers,
+
+    start_recording:            QCamera2HardwareInterface::start_recording,
+    stop_recording:             QCamera2HardwareInterface::stop_recording,
+    recording_enabled:          QCamera2HardwareInterface::recording_enabled,
+    release_recording_frame:    QCamera2HardwareInterface::release_recording_frame,
+
+    auto_focus:                 QCamera2HardwareInterface::auto_focus,
+    cancel_auto_focus:          QCamera2HardwareInterface::cancel_auto_focus,
+
+    take_picture:               QCamera2HardwareInterface::take_picture,
+    cancel_picture:             QCamera2HardwareInterface::cancel_picture,
+
+    set_parameters:             QCamera2HardwareInterface::set_parameters,
+    get_parameters:             QCamera2HardwareInterface::get_parameters,
+    put_parameters:             QCamera2HardwareInterface::put_parameters,
+    send_command:               QCamera2HardwareInterface::send_command,
+
+    release:                    QCamera2HardwareInterface::release,
+    dump:                       QCamera2HardwareInterface::dump,
+};
+
+/*===========================================================================
+ * FUNCTION   : set_preview_window
+ *
+ * DESCRIPTION: set preview window.
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @window  : window ops table
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
+        struct preview_stream_ops *window)
+{
+    int rc = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW);
+        rc = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_CallBacks
+ *
+ * DESCRIPTION: set callbacks for notify and data
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp  : video data cd with timestamp
+ *   @get_memory : ops table for request gralloc memory
+ *   @user       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::set_CallBacks(struct camera_device *device,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+
+    qcamera_sm_evt_setcb_payload_t payload;
+    payload.notify_cb = notify_cb;
+    payload.data_cb = data_cb;
+    payload.data_cb_timestamp = data_cb_timestamp;
+    payload.get_memory = get_memory;
+    payload.user = user;
+
+    hw->lockAPI();
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_SET_CALLBACKS, (void *)&payload);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_CALLBACKS);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : enable_msg_type
+ *
+ * DESCRIPTION: enable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, (void *)msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_ENABLE_MSG_TYPE);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : disable_msg_type
+ *
+ * DESCRIPTION: disable certain msg type
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, (void *)msg_type);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DISABLE_MSG_TYPE);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : msg_type_enabled
+ *
+ * DESCRIPTION: if certain msg type is enabled
+ *
+ * PARAMETERS :
+ *   @device     : ptr to camera device struct
+ *   @msg_type   : msg type mask
+ *
+ * RETURN     : 1 -- enabled
+ *              0 -- not enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, (void *)msg_type);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_MSG_TYPE_ENABLED);
+        ret = hw->m_apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+   return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_preview
+ *
+ * DESCRIPTION: start preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_preview(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    ALOGD("[KPI Perf] %s: E", __func__);
+    hw->lockAPI();
+    qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
+    if (hw->isNoDisplayMode()) {
+        evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
+    }
+    ret = hw->processAPI(evt, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(evt);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_preview
+ *
+ * DESCRIPTION: stop preview
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    ALOGD("[KPI Perf] %s: E", __func__);
+    hw->lockAPI();
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_PREVIEW, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_PREVIEW);
+    }
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_enabled
+ *
+ * DESCRIPTION: if preview is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_PREVIEW_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PREVIEW_ENABLED);
+        ret = hw->m_apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : store_meta_data_in_buffers
+ *
+ * DESCRIPTION: if need to store meta data in buffers for video frame
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @enable  : flag if enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::store_meta_data_in_buffers(
+                struct camera_device *device, int enable)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, (void *)enable);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : start_recording
+ *
+ * DESCRIPTION: start recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::start_recording(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    ALOGD("[KPI Perf] %s: E", __func__);
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_RECORDING);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop_recording
+ *
+ * DESCRIPTION: stop recording
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    ALOGD("[KPI Perf] %s: E", __func__);
+    hw->lockAPI();
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_RECORDING);
+    }
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : recording_enabled
+ *
+ * DESCRIPTION: if recording is running
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : 1 -- running
+ *              0 -- not running
+ *==========================================================================*/
+int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_RECORDING_ENABLED, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RECORDING_ENABLED);
+        ret = hw->m_apiResult.enabled;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release_recording_frame
+ *
+ * DESCRIPTION: return recording frame back
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @opaque  : ptr to frame to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release_recording_frame(
+            struct camera_device *device, const void *opaque)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    ALOGD("%s: E", __func__);
+    hw->lockAPI();
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME);
+    }
+    hw->unlockAPI();
+    ALOGD("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : auto_focus
+ *
+ * DESCRIPTION: start auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::auto_focus(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    ALOGD("[KPI Perf] %s : E", __func__);
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_START_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_START_AUTO_FOCUS);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s : X", __func__);
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_STOP_AUTO_FOCUS);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : take_picture
+ *
+ * DESCRIPTION: take picture
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::take_picture(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    ALOGD("[KPI Perf] %s: E", __func__);
+    hw->lockAPI();
+
+    /* Prepare snapshot in case LED needs to be flashed */
+    ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT);
+        ret = hw->m_apiResult.status;
+    }
+
+    /* Regardless what the result value for prepare_snapshot,
+     * go ahead with capture anyway. Just like the way autofocus
+     * is handled in capture case. */
+
+    /* capture */
+    ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE);
+        ret = hw->m_apiResult.status;
+    }
+
+    hw->unlockAPI();
+    ALOGD("[KPI Perf] %s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancel_picture
+ *
+ * DESCRIPTION: cancel current take picture request
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_CANCEL_PICTURE);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : set_parameters
+ *
+ * DESCRIPTION: set camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parms   : string of packed parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
+                                              const char *parms)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_parameters
+ *
+ * DESCRIPTION: query camera parameters
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : packed parameters in a string
+ *==========================================================================*/
+char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
+{
+    char *ret = NULL;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return NULL;
+    }
+    hw->lockAPI();
+    int32_t rc = hw->processAPI(QCAMERA_SM_EVT_GET_PARAMS, NULL);
+    if (rc == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_GET_PARAMS);
+        ret = hw->m_apiResult.params;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : put_parameters
+ *
+ * DESCRIPTION: return camera parameters string back to HAL
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @parm    : ptr to parameter string to be returned
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
+                                               char *parm)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_PUT_PARAMS, (void *)parm);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_PUT_PARAMS);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : send_command
+ *
+ * DESCRIPTION: command to be executed
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @cmd     : cmd to be executed
+ *   @arg1    : ptr to optional argument1
+ *   @arg2    : ptr to optional argument2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::send_command(struct camera_device *device,
+                                            int32_t cmd,
+                                            int32_t arg1,
+                                            int32_t arg2)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+
+    qcamera_sm_evt_command_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
+    payload.cmd = cmd;
+    payload.arg1 = arg1;
+    payload.arg2 = arg2;
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::release(struct camera_device *device)
+{
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return;
+    }
+    hw->lockAPI();
+    int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE, NULL);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE);
+    }
+    hw->unlockAPI();
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: dump camera status
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @fd      : fd for status to be dumped to
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(struct camera_device *device, int fd)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_DUMP, (void *)fd);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_DUMP);
+        ret = hw->m_apiResult.status;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION: close camera device
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
+{
+    int ret = NO_ERROR;
+    ALOGD("[KPI Perf] %s: E",__func__);
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(
+            reinterpret_cast<camera_device_t *>(hw_dev)->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return BAD_VALUE;
+    }
+    delete hw;
+    ALOGD("[KPI Perf] %s: X",__func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : register_face_image
+ *
+ * DESCRIPTION: register a face image into imaging lib for face authenticatio/
+ *              face recognition
+ *
+ * PARAMETERS :
+ *   @device  : ptr to camera device struct
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config about input image, i.e., format, dimension, and etc.
+ *
+ * RETURN     : >=0 unique ID of face registerd.
+ *              <0  failure.
+ *==========================================================================*/
+int QCamera2HardwareInterface::register_face_image(struct camera_device *device,
+                                                   void *img_ptr,
+                                                   cam_pp_offline_src_config_t *config)
+{
+    int ret = NO_ERROR;
+    QCamera2HardwareInterface *hw =
+        reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    qcamera_sm_evt_reg_face_payload_t payload;
+    memset(&payload, 0, sizeof(qcamera_sm_evt_reg_face_payload_t));
+    payload.img_ptr = img_ptr;
+    payload.config = config;
+    hw->lockAPI();
+    ret = hw->processAPI(QCAMERA_SM_EVT_REG_FACE_IMAGE, (void *)&payload);
+    if (ret == NO_ERROR) {
+        hw->waitAPIResult(QCAMERA_SM_EVT_REG_FACE_IMAGE);
+        ret = hw->m_apiResult.handle;
+    }
+    hw->unlockAPI();
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera2HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera ID
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::QCamera2HardwareInterface(int cameraId)
+    : mCameraId(cameraId),
+      mCameraHandle(NULL),
+      mCameraOpened(false),
+      mPreviewWindow(NULL),
+      mMsgEnabled(0),
+      mStoreMetaDataInFrame(0),
+      m_stateMachine(this),
+      m_postprocessor(this),
+      m_thermalAdapter(QCameraThermalAdapter::getInstance()),
+      m_cbNotifier(this),
+      m_bShutterSoundPlayed(false),
+      m_currentFocusState(CAM_AF_NOT_FOCUSED),
+      m_bStartZSLSnapshotCalled(false),
+      m_pPowerModule(NULL),
+      mDumpFrmCnt(0),
+      mDumpSkipCnt(0)
+{
+    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+    mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
+    mCameraDevice.common.close = close_camera_device;
+    mCameraDevice.ops = &mCameraOps;
+    mCameraDevice.priv = this;
+
+
+    pthread_mutex_init(&m_lock, NULL);
+    pthread_cond_init(&m_cond, NULL);
+    memset(&m_apiResult, 0, sizeof(qcamera_api_result_t));
+
+    pthread_mutex_init(&m_evtLock, NULL);
+    pthread_cond_init(&m_evtCond, NULL);
+    memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+
+    memset(m_channels, 0, sizeof(m_channels));
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
+        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
+    }
+#endif
+
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera2HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera2HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera2HardwareInterface::~QCamera2HardwareInterface()
+{
+    closeCamera();
+    pthread_mutex_destroy(&m_lock);
+    pthread_cond_destroy(&m_cond);
+    pthread_mutex_destroy(&m_evtLock);
+    pthread_cond_destroy(&m_evtCond);
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ *   @hw_device  : double ptr for camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    if (mCameraOpened) {
+        *hw_device = NULL;
+        return PERMISSION_DENIED;
+    }
+
+    rc = openCamera();
+    if (rc == NO_ERROR)
+        *hw_device = &mCameraDevice.common;
+    else
+        *hw_device = NULL;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::openCamera()
+{
+    if (mCameraHandle) {
+        ALOGE("Failure: Camera already opened");
+        return ALREADY_EXISTS;
+    }
+    mCameraHandle = camera_open(mCameraId);
+    if (!mCameraHandle) {
+        ALOGE("camera_open failed.");
+        return UNKNOWN_ERROR;
+    }
+
+    mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
+                                              camEvtHandle,
+                                              (void *) this);
+
+    int32_t rc = m_postprocessor.init(jpegEvtHandle, this);
+    if (rc != 0) {
+        ALOGE("Init Postprocessor failed");
+        return UNKNOWN_ERROR;
+    }
+
+    // update padding info from jpeg
+    cam_padding_info_t padding_info;
+    m_postprocessor.getJpegPaddingReq(padding_info);
+    if (gCamCapability[mCameraId]->padding_info.width_padding < padding_info.width_padding) {
+        gCamCapability[mCameraId]->padding_info.width_padding = padding_info.width_padding;
+    }
+    if (gCamCapability[mCameraId]->padding_info.height_padding < padding_info.height_padding) {
+        gCamCapability[mCameraId]->padding_info.height_padding = padding_info.height_padding;
+    }
+    if (gCamCapability[mCameraId]->padding_info.plane_padding < padding_info.plane_padding) {
+        gCamCapability[mCameraId]->padding_info.plane_padding = padding_info.plane_padding;
+    }
+
+    mParameters.init(gCamCapability[mCameraId], mCameraHandle);
+
+    rc = m_thermalAdapter.init(this);
+    if (rc != 0) {
+        ALOGE("Init thermal adapter failed");
+    }
+
+    mCameraOpened = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::closeCamera()
+{
+    int rc = NO_ERROR;
+    int i;
+
+    // deinit Parameters
+    mParameters.deinit();
+
+    // stop and deinit postprocessor
+    m_postprocessor.stop();
+    m_postprocessor.deinit();
+
+    m_thermalAdapter.deinit();
+
+    // delete all channels if not already deleted
+    for (i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            m_channels[i]->stop();
+            delete m_channels[i];
+            m_channels[i] = NULL;
+        }
+    }
+
+    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+    mCameraOpened = false;
+
+    return rc;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+/*===========================================================================
+ * FUNCTION   : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::initCapabilities(int cameraId)
+{
+    int rc = NO_ERROR;
+    mm_camera_vtbl_t *cameraHandle = NULL;
+    QCameraHeapMemory *capabilityHeap = NULL;
+
+    cameraHandle = camera_open(cameraId);
+    if (!cameraHandle) {
+        ALOGE("%s: camera_open failed", __func__);
+        rc = UNKNOWN_ERROR;
+        goto open_failed;
+    }
+
+    /* Allocate memory for capability buffer */
+    capabilityHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t));
+    if(rc != OK) {
+        ALOGE("%s: No memory for cappability", __func__);
+        goto allocate_failed;
+    }
+
+    /* Map memory for capability buffer */
+    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
+                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                capabilityHeap->getFd(0),
+                                sizeof(cam_capability_t));
+    if(rc < 0) {
+        ALOGE("%s: failed to map capability buffer", __func__);
+        goto map_failed;
+    }
+
+    /* Query Capability */
+    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+    if(rc < 0) {
+        ALOGE("%s: failed to query capability",__func__);
+        goto query_failed;
+    }
+    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
+    if (!gCamCapability[cameraId]) {
+        ALOGE("%s: out of memory", __func__);
+        goto query_failed;
+    }
+    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+                                        sizeof(cam_capability_t));
+
+    rc = NO_ERROR;
+
+query_failed:
+    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+    capabilityHeap->deallocate();
+    delete capabilityHeap;
+allocate_failed:
+    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
+    cameraHandle = NULL;
+open_failed:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCapabilities
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *   @info      : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::getCapabilities(int cameraId,
+                                    struct camera_info *info)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&g_camlock);
+    if (NULL == gCamCapability[cameraId]) {
+        rc = initCapabilities(cameraId);
+        if (rc < 0) {
+            pthread_mutex_unlock(&g_camlock);
+            return rc;
+        }
+    }
+
+    switch(gCamCapability[cameraId]->position) {
+    case CAM_POSITION_BACK:
+        info->facing = CAMERA_FACING_BACK;
+        break;
+
+    case CAM_POSITION_FRONT:
+        info->facing = CAMERA_FACING_FRONT;
+        break;
+
+    default:
+        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
+        rc = BAD_VALUE;
+        break;
+    }
+
+    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
+    pthread_mutex_unlock(&g_camlock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufNumRequired
+ *
+ * DESCRIPTION: return number of stream buffers needed for given stream type
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : number of buffers needed
+ *==========================================================================*/
+uint8_t QCamera2HardwareInterface::getBufNumRequired(cam_stream_type_t stream_type)
+{
+    int bufferCnt = 0;
+    int minCaptureBuffers = mParameters.getNumOfSnapshots();
+
+    int zslQBuffers = mParameters.getZSLQueueDepth() +
+                      mParameters.getMaxUnmatchedFramesInQueue();
+
+    int minCircularBufNum = CAMERA_MIN_STREAMING_BUFFERS +
+                            CAMERA_MIN_JPEG_ENCODING_BUFFERS +
+                            mParameters.getMaxUnmatchedFramesInQueue() +
+                            mParameters.getNumOfHDRBufsIfNeeded();
+
+    // Get buffer count for the particular stream type
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (mParameters.isZSLMode()) {
+                bufferCnt = zslQBuffers + minCircularBufNum;
+            } else {
+                bufferCnt = CAMERA_MIN_STREAMING_BUFFERS +
+                            mParameters.getMaxUnmatchedFramesInQueue();
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraHDRBufsIfNeeded() +
+                        CAMERA_MIN_STREAMING_BUFFERS;
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        {
+            if (mParameters.isZSLMode()) {
+                bufferCnt = zslQBuffers + minCircularBufNum;
+            } else {
+                bufferCnt = minCaptureBuffers +
+                            mParameters.getMaxUnmatchedFramesInQueue() +
+                            mParameters.getNumOfExtraHDRBufsIfNeeded() +
+                            CAMERA_MIN_STREAMING_BUFFERS;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        if (mParameters.isZSLMode()) {
+            bufferCnt = zslQBuffers + CAMERA_MIN_STREAMING_BUFFERS;
+        } else {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraHDRBufsIfNeeded() +
+                        CAMERA_MIN_STREAMING_BUFFERS;
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            bufferCnt = CAMERA_MIN_VIDEO_BUFFERS +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        CAMERA_MIN_STREAMING_BUFFERS;
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getMaxUnmatchedFramesInQueue() +
+                        mParameters.getNumOfExtraHDRBufsIfNeeded() +
+                        CAMERA_MIN_STREAMING_BUFFERS;
+            if (bufferCnt < zslQBuffers + minCircularBufNum) {
+                bufferCnt = zslQBuffers + minCircularBufNum;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        {
+            bufferCnt = minCaptureBuffers +
+                        mParameters.getMaxUnmatchedFramesInQueue();
+            if (bufferCnt < CAMERA_MIN_STREAMING_BUFFERS) {
+                bufferCnt = CAMERA_MIN_STREAMING_BUFFERS;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        bufferCnt = 0;
+        break;
+    }
+
+    return bufferCnt;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamBuf
+ *
+ * DESCRIPTION: alocate stream buffers
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *   @size         : size of buffer
+ *   @bufferCnt    : [IN/OUT] minimum num of buffers to be allocated.
+ *                   could be modified during allocation if more buffers needed
+ *
+ * RETURN     : ptr to a memory obj that holds stream buffers.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraMemory *QCamera2HardwareInterface::allocateStreamBuf(cam_stream_type_t stream_type,
+                                                            int size,
+                                                            uint8_t &bufferCnt)
+{
+    int rc = NO_ERROR;
+    QCameraMemory *mem = NULL;
+    bool bCachedMem = QCAMERA_ION_USE_CACHE;
+
+    // Allocate stream buffer memory object
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        {
+            if (isNoDisplayMode()) {
+                mem = new QCameraStreamMemory(mGetMemory, bCachedMem);
+            } else {
+                cam_dimension_t dim;
+                QCameraGrallocMemory *grallocMemory =
+                    new QCameraGrallocMemory(mGetMemory);
+
+                mParameters.getStreamDimension(stream_type, dim);
+                if (grallocMemory)
+                    grallocMemory->setWindowInfo(mPreviewWindow, dim.width, dim.height,
+                            mParameters.getPreviewHalPixelFormat());
+                mem = grallocMemory;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        {
+            cam_dimension_t dim;
+            QCameraGrallocMemory *grallocMemory =
+                new QCameraGrallocMemory(mGetMemory);
+
+            mParameters.getStreamDimension(stream_type, dim);
+            if (grallocMemory)
+                grallocMemory->setWindowInfo(mPreviewWindow, dim.width, dim.height,
+                        mParameters.getPreviewHalPixelFormat());
+            mem = grallocMemory;
+        }
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        mem = new QCameraStreamMemory(mGetMemory, bCachedMem);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        {
+            char value[PROPERTY_VALUE_MAX];
+            property_get("persist.camera.mem.usecache", value, "1");
+            if (atoi(value) == 0) {
+                bCachedMem = QCAMERA_ION_USE_NOCACHE;
+            }
+            ALOGD("%s: vidoe buf using cached memory = %d", __func__, bCachedMem);
+            mem = new QCameraVideoMemory(mGetMemory, bCachedMem);
+        }
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    case CAM_STREAM_TYPE_MAX:
+    default:
+        break;
+    }
+    if (!mem) {
+        return NULL;
+    }
+
+    if (bufferCnt > 0) {
+        rc = mem->allocate(bufferCnt, size);
+        if (rc < 0) {
+            delete mem;
+            return NULL;
+        }
+        bufferCnt = mem->getCnt();
+    }
+    return mem;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocateStreamInfoBuf
+ *
+ * DESCRIPTION: alocate stream info buffer
+ *
+ * PARAMETERS :
+ *   @stream_type  : type of stream
+ *
+ * RETURN     : ptr to a memory obj that holds stream info buffer.
+ *              NULL if failed
+ *==========================================================================*/
+QCameraHeapMemory *QCamera2HardwareInterface::allocateStreamInfoBuf(
+    cam_stream_type_t stream_type)
+{
+    int rc = NO_ERROR;
+
+    QCameraHeapMemory *streamInfoBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (!streamInfoBuf) {
+        ALOGE("allocateStreamInfoBuf: Unable to allocate streamInfo object");
+        return NULL;
+    }
+
+    rc = streamInfoBuf->allocate(1, sizeof(cam_stream_info_t));
+    if (rc < 0) {
+        ALOGE("allocateStreamInfoBuf: Failed to allocate stream info memory");
+        delete streamInfoBuf;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfo = (cam_stream_info_t *)streamInfoBuf->getPtr(0);
+    memset(streamInfo, 0, sizeof(cam_stream_info_t));
+    streamInfo->stream_type = stream_type;
+    rc = mParameters.getStreamFormat(stream_type, streamInfo->fmt);
+    rc = mParameters.getStreamDimension(stream_type, streamInfo->dim);
+
+    streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    switch (stream_type) {
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+    case CAM_STREAM_TYPE_RAW:
+        if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+        } else {
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = mParameters.getNumOfSnapshots();
+        }
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+        streamInfo->num_of_burst = mParameters.getNumOfSnapshots();
+        break;
+    default:
+        break;
+    }
+
+    //set flip mode based on Stream type;
+    int flipMode = mParameters.getFlipMode(stream_type);
+    if (flipMode > 0) {
+        streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_FLIP;
+        streamInfo->pp_config.flip = flipMode;
+    }
+
+    return streamInfoBuf;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewWindow
+ *
+ * DESCRIPTION: set preview window impl
+ *
+ * PARAMETERS :
+ *   @window  : ptr to window ops table struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setPreviewWindow(
+        struct preview_stream_ops *window)
+{
+    mPreviewWindow = window;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallBacks
+ *
+ * DESCRIPTION: set callbacks impl
+ *
+ * PARAMETERS :
+ *   @notify_cb  : notify cb
+ *   @data_cb    : data cb
+ *   @data_cb_timestamp : data cb with time stamp
+ *   @get_memory : request memory ops table
+ *   @user       : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::setCallBacks(camera_notify_callback notify_cb,
+                                            camera_data_callback data_cb,
+                                            camera_data_timestamp_callback data_cb_timestamp,
+                                            camera_request_memory get_memory,
+                                            void *user)
+{
+    mNotifyCb        = notify_cb;
+    mDataCb          = data_cb;
+    mDataCbTimestamp = data_cb_timestamp;
+    mGetMemory       = get_memory;
+    mCallbackCookie  = user;
+    m_cbNotifier.setCallbacks(notify_cb, data_cb, data_cb_timestamp, user);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : enableMsgType
+ *
+ * DESCRIPTION: enable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::enableMsgType(int32_t msg_type)
+{
+    mMsgEnabled |= msg_type;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : disableMsgType
+ *
+ * DESCRIPTION: disable msg type impl
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask to be disabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::disableMsgType(int32_t msg_type)
+{
+    mMsgEnabled &= ~msg_type;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabled
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabled(int32_t msg_type)
+{
+    return (mMsgEnabled & msg_type);
+}
+
+/*===========================================================================
+ * FUNCTION   : msgTypeEnabledWithLock
+ *
+ * DESCRIPTION: impl to determine if certain msg_type is enabled with lock
+ *
+ * PARAMETERS :
+ *   @msg_type  : msg type mask
+ *
+ * RETURN     : 0 -- not enabled
+ *              none 0 -- enabled
+ *==========================================================================*/
+int QCamera2HardwareInterface::msgTypeEnabledWithLock(int32_t msg_type)
+{
+    int enabled = 0;
+    lockAPI();
+    enabled = mMsgEnabled & msg_type;
+    unlockAPI();
+    return enabled;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPreview
+ *
+ * DESCRIPTION: start preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startPreview()
+{
+    int32_t rc = NO_ERROR;
+    ALOGD("%s: E", __func__);
+    // start preview stream
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        rc = startChannel(QCAMERA_CH_TYPE_ZSL);
+    } else {
+        rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+    }
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopPreview
+ *
+ * DESCRIPTION: stop preview impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopPreview()
+{
+    ALOGD("%s: E", __func__);
+    // stop preview stream
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        stopChannel(QCAMERA_CH_TYPE_ZSL);
+    } else {
+        stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+    }
+
+    // delete all channels from preparePreview
+    unpreparePreview();
+    ALOGD("%s: X", __func__);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : storeMetaDataInBuffers
+ *
+ * DESCRIPTION: enable store meta data in buffers for video frames impl
+ *
+ * PARAMETERS :
+ *   @enable  : flag if need enable
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::storeMetaDataInBuffers(int enable)
+{
+    mStoreMetaDataInFrame = enable;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startRecording
+ *
+ * DESCRIPTION: start recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::startRecording()
+{
+    int32_t rc = NO_ERROR;
+    ALOGD("%s: E", __func__);
+    if (mParameters.getRecordingHintValue() == false) {
+        ALOGE("%s: start recording when hint is false, stop preview first", __func__);
+        stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+        delChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+        // Set recording hint to TRUE
+        mParameters.updateRecordingHintValue(TRUE);
+        rc = preparePreview();
+        if (rc == NO_ERROR) {
+            rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        rc = startChannel(QCAMERA_CH_TYPE_VIDEO);
+    }
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (rc == NO_ERROR) {
+        if (m_pPowerModule) {
+            if (m_pPowerModule->powerHint) {
+                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
+            }
+        }
+    }
+#endif
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopRecording
+ *
+ * DESCRIPTION: stop recording impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::stopRecording()
+{
+    int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
+    ALOGD("%s: E", __func__);
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (m_pPowerModule) {
+        if (m_pPowerModule->powerHint) {
+            m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
+        }
+    }
+#endif
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseRecordingFrame
+ *
+ * DESCRIPTION: return video frame impl
+ *
+ * PARAMETERS :
+ *   @opaque  : ptr to video frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::releaseRecordingFrame(const void * opaque)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    QCameraVideoChannel *pChannel =
+        (QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
+    ALOGD("%s: opaque data = %p", __func__,opaque);
+    if(pChannel != NULL) {
+        rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : autoFocus
+ *
+ * DESCRIPTION: start auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::autoFocus()
+{
+    int rc = NO_ERROR;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+        {
+            rc = mCameraHandle->ops->do_auto_focus(mCameraHandle->camera_handle);
+            if (rc == NO_ERROR) {
+                mParameters.setAFRunning(true);
+            }
+        }
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+        // According to Google API definition, the focus callback will immediately
+        // return with a boolean that indicates whether the focus is sharp or not.
+        // The focus position is locked after autoFocus call.
+        // in this sense, the effect is the same as cancel_auto_focus
+        {
+            rc = mParameters.setLockCAF(true);
+
+            // send evt notify that foucs is done
+            sendEvtNotify(CAMERA_MSG_FOCUS,
+                          (m_currentFocusState == CAM_AF_FOCUSED)? true : false,
+                          0);
+        }
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        // According to Google API definition, if the autofocus is in the middle
+        // of scanning, the focus callback will return when it completes. If the
+        // autofocus is not scanning, focus callback will immediately return with
+        // a boolean that indicates whether the focus is sharp or not. The apps
+        // can then decide if they want to take a picture immediately or to change
+        // the focus mode to auto, and run a full autofocus cycle. The focus position
+        // is locked after autoFocus call.
+        if (m_currentFocusState != CAM_AF_SCANNING) {
+            // lock focus
+            rc = mParameters.setLockCAF(true);
+
+            // send evt notify that foucs is done
+            sendEvtNotify(CAMERA_MSG_FOCUS,
+                          (m_currentFocusState == CAM_AF_FOCUSED)? true : false,
+                          0);
+        } else {
+            // set flag that lock CAF is needed once focus state becomes focsued/not focused
+            mParameters.setLockCAFNeeded(true);
+            rc = NO_ERROR;
+        }
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        ALOGE("%s: No ops in focusMode (%d)", __func__, focusMode);
+        rc = BAD_VALUE;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelAutoFocus
+ *
+ * DESCRIPTION: cancel auto focus impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelAutoFocus()
+{
+    int rc = NO_ERROR;
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+        if (mParameters.isAFRunning()) {
+            rc = mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
+            if (rc == NO_ERROR) {
+                mParameters.setAFRunning(false);
+            }
+        }
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        if (mParameters.isCAFLocked()) {
+            // resume CAF by unlock CAF
+            rc = mParameters.setLockCAF(false);;
+            mParameters.setLockCAFNeeded(false);
+        }
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        ALOGI("%s: No ops in focusMode (%d)", __func__, focusMode);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: take picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takePicture()
+{
+    int rc = NO_ERROR;
+    uint8_t numSnapshots = mParameters.getNumOfSnapshots();
+    ALOGD("%s: E", __func__);
+    if (mParameters.isZSLMode()) {
+        QCameraPicChannel *pZSLChannel =
+            (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+        if (NULL != pZSLChannel) {
+            // start postprocessor
+            m_postprocessor.start(pZSLChannel);
+
+            rc = pZSLChannel->takePicture(numSnapshots);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: cannot take ZSL picture", __func__);
+                m_postprocessor.stop();
+                return rc;
+            }
+        } else {
+            ALOGE("%s: ZSL channel is NULL", __func__);
+            return UNKNOWN_ERROR;
+        }
+    } else {
+        // normal capture case
+        // need to stop preview channel
+        stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+        delChannel(QCAMERA_CH_TYPE_PREVIEW);
+
+        // start snapshot
+        if (mParameters.isJpegPictureFormat() ||
+            mParameters.isNV16PictureFormat() ) {
+            rc = addCaptureChannel();
+            if (rc == NO_ERROR) {
+                // start postprocessor
+                m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_CAPTURE]);
+
+                // start catpure channel
+                rc = startChannel(QCAMERA_CH_TYPE_CAPTURE);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start capture channel", __func__);
+                    m_postprocessor.stop();
+                    delChannel(QCAMERA_CH_TYPE_CAPTURE);
+                    return rc;
+                }
+            } else {
+                ALOGE("%s: cannot add capture channel", __func__);
+                return rc;
+            }
+        } else {
+            rc = addRawChannel();
+            if (rc == NO_ERROR) {
+                // start postprocessor
+                m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
+                rc = startChannel(QCAMERA_CH_TYPE_RAW);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: cannot start raw channel", __func__);
+                    m_postprocessor.stop();
+                    delChannel(QCAMERA_CH_TYPE_RAW);
+                    return rc;
+                }
+            } else {
+                ALOGE("%s: cannot add raw channel", __func__);
+                return rc;
+            }
+        }
+    }
+    ALOGD("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel picture impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelPicture()
+{
+    //stop post processor
+    m_postprocessor.stop();
+
+    if (mParameters.isZSLMode()) {
+        QCameraPicChannel *pZSLChannel =
+            (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
+        if (NULL != pZSLChannel) {
+            if (m_bStartZSLSnapshotCalled) {
+                mCameraHandle->ops->stop_zsl_snapshot(
+                        mCameraHandle->camera_handle);
+                m_bStartZSLSnapshotCalled = false;
+            }
+            pZSLChannel->cancelPicture();
+        }
+    } else {
+        // normal capture case
+        if (mParameters.isJpegPictureFormat() ||
+            mParameters.isNV16PictureFormat() ) {
+            stopChannel(QCAMERA_CH_TYPE_CAPTURE);
+            delChannel(QCAMERA_CH_TYPE_CAPTURE);
+        } else {
+            stopChannel(QCAMERA_CH_TYPE_RAW);
+            delChannel(QCAMERA_CH_TYPE_RAW);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : takeLiveSnapshot
+ *
+ * DESCRIPTION: take live snapshot during recording
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::takeLiveSnapshot()
+{
+    int rc = NO_ERROR;
+
+    // start post processor
+    rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_SNAPSHOT]);
+
+    // start snapshot channel
+    if (rc == NO_ERROR) {
+        rc = startChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelLiveSnapshot
+ *
+ * DESCRIPTION: cancel current live snapshot request
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::cancelLiveSnapshot()
+{
+    int rc = NO_ERROR;
+
+    //stop post processor
+    m_postprocessor.stop();
+
+    // stop snapshot channel
+    rc = stopChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getParameters
+ *
+ * DESCRIPTION: get parameters impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : a string containing parameter pairs
+ *==========================================================================*/
+char* QCamera2HardwareInterface::getParameters()
+{
+    char* strParams = NULL;
+    String8 str;
+    str = mParameters.flatten( );
+    strParams = (char *)malloc(sizeof(char)*(str.length()+1));
+    if(strParams != NULL){
+        memset(strParams, 0, sizeof(char)*(str.length()+1));
+        strncpy(strParams, str.string(), str.length());
+        strParams[str.length()] = 0;
+    }
+    return strParams;
+}
+
+/*===========================================================================
+ * FUNCTION   : putParameters
+ *
+ * DESCRIPTION: put parameters string impl
+ *
+ * PARAMETERS :
+ *   @parms   : parameters string to be released
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::putParameters(char *parms)
+{
+    free(parms);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCommand
+ *
+ * DESCRIPTION: send command impl
+ *
+ * PARAMETERS :
+ *   @command : command to be executed
+ *   @arg1    : optional argument 1
+ *   @arg2    : optional argument 2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::sendCommand(int32_t command, int32_t /*arg1*/, int32_t /*arg2*/)
+{
+    int rc = NO_ERROR;
+
+    switch (command) {
+    case CAMERA_CMD_START_FACE_DETECTION:
+    case CAMERA_CMD_STOP_FACE_DETECTION:
+        rc = setFaceDetection(command == CAMERA_CMD_START_FACE_DETECTION? true : false);
+        break;
+    default:
+        rc = NO_ERROR;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerFaceImage
+ *
+ * DESCRIPTION: register face image impl
+ *
+ * PARAMETERS :
+ *   @img_ptr : ptr to image buffer
+ *   @config  : ptr to config struct about input image info
+ *   @faceID  : [OUT] face ID to uniquely identifiy the registered face image
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::registerFaceImage(void *img_ptr,
+                                                 cam_pp_offline_src_config_t *config,
+                                                 int32_t &faceID)
+{
+    int rc = NO_ERROR;
+    faceID = -1;
+
+    if (img_ptr == NULL || config == NULL) {
+        ALOGE("%s: img_ptr or config is NULL", __func__);
+        return BAD_VALUE;
+    }
+
+    // allocate ion memory for source image
+    QCameraHeapMemory *imgBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    if (imgBuf == NULL) {
+        ALOGE("%s: Unable to new heap memory obj for image buf", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = imgBuf->allocate(1, config->input_buf_planes.plane_info.frame_len);
+    if (rc < 0) {
+        ALOGE("%s: Unable to allocate heap memory for image buf", __func__);
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+
+    void *pBufPtr = imgBuf->getPtr(0);
+    if (pBufPtr == NULL) {
+        ALOGE("%s: image buf is NULL", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        return NO_MEMORY;
+    }
+    memcpy(pBufPtr, img_ptr, config->input_buf_planes.plane_info.frame_len);
+
+    cam_pp_feature_config_t pp_feature;
+    memset(&pp_feature, 0, sizeof(cam_pp_feature_config_t));
+    pp_feature.feature_mask = CAM_QCOM_FEATURE_REGISTER_FACE;
+    QCameraReprocessChannel *pChannel =
+        addOfflineReprocChannel(*config, pp_feature, NULL, NULL);
+
+    if (pChannel == NULL) {
+        ALOGE("%s: fail to add offline reprocess channel", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        return UNKNOWN_ERROR;
+    }
+
+    rc = pChannel->start();
+    if (rc != NO_ERROR) {
+        ALOGE("%s: Cannot start reprocess channel", __func__);
+        imgBuf->deallocate();
+        delete imgBuf;
+        delete pChannel;
+        return rc;
+    }
+
+    rc = pChannel->doReprocess(imgBuf->getFd(0), imgBuf->getSize(0), faceID);
+
+    // done with register face image, free imgbuf and delete reprocess channel
+    imgBuf->deallocate();
+    delete imgBuf;
+    imgBuf = NULL;
+    pChannel->stop();
+    delete pChannel;
+    pChannel = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : release
+ *
+ * DESCRIPTION: release camera resource impl
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::release()
+{
+    // stop and delete all channels
+    for (int i = 0; i <QCAMERA_CH_TYPE_MAX ; i++) {
+        if (m_channels[i] != NULL) {
+            stopChannel((qcamera_ch_type_enum_t)i);
+            delChannel((qcamera_ch_type_enum_t)i);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION: camera status dump impl
+ *
+ * PARAMETERS :
+ *   @fd      : fd for the buffer to be dumped with camera status
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::dump(int /*fd*/)
+{
+    ALOGE("%s: not supported yet", __func__);
+    return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION   : processAPI
+ *
+ * DESCRIPTION: process API calls from upper layer
+ *
+ * PARAMETERS :
+ *   @api         : API to be processed
+ *   @api_payload : ptr to API payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processAPI(qcamera_sm_evt_enum_t api, void *api_payload)
+{
+    return m_stateMachine.procAPI(api, api_payload);
+}
+
+/*===========================================================================
+ * FUNCTION   : processEvt
+ *
+ * DESCRIPTION: process Evt from backend via mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    return m_stateMachine.procEvt(evt, evt_payload);
+}
+
+/*===========================================================================
+ * FUNCTION   : processSyncEvt
+ *
+ * DESCRIPTION: process synchronous Evt from backend
+ *
+ * PARAMETERS :
+ *   @evt         : event type to be processed
+ *   @evt_payload : ptr to event payload if any
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload)
+{
+    int rc = NO_ERROR;
+
+    pthread_mutex_lock(&m_evtLock);
+    rc =  processEvt(evt, evt_payload);
+    if (rc == NO_ERROR) {
+        memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
+        while (m_evtResult.request_api != evt) {
+            pthread_cond_wait(&m_evtCond, &m_evtLock);
+        }
+        rc =  m_evtResult.status;
+    }
+    pthread_mutex_unlock(&m_evtLock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : evtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-camera-interface to handle backend events
+ *
+ * PARAMETERS :
+ *   @camera_handle : event type to be processed
+ *   @evt           : ptr to event
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
+                                          mm_camera_event_t *evt,
+                                          void *user_data)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)user_data;
+    if (obj && evt) {
+        mm_camera_event_t *payload =
+            (mm_camera_event_t *)malloc(sizeof(mm_camera_event_t));
+        if (NULL != payload) {
+            *payload = *evt;
+            obj->processEvt(QCAMERA_SM_EVT_EVT_NOTIFY, payload);
+        }
+    } else {
+        ALOGE("%s: NULL user_data", __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events
+ *
+ * PARAMETERS :
+ *   @status    : status of jpeg job
+ *   @client_hdl: jpeg client handle
+ *   @jobId     : jpeg job Id
+ *   @p_ouput   : ptr to jpeg output result struct
+ *   @userdata  : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::jpegEvtHandle(jpeg_job_status_t status,
+                                              uint32_t /*client_hdl*/,
+                                              uint32_t jobId,
+                                              mm_jpeg_output_t *p_output,
+                                              void *userdata)
+{
+    QCamera2HardwareInterface *obj = (QCamera2HardwareInterface *)userdata;
+    if (obj) {
+        qcamera_jpeg_evt_payload_t *payload =
+            (qcamera_jpeg_evt_payload_t *)malloc(sizeof(qcamera_jpeg_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_jpeg_evt_payload_t));
+            payload->status = status;
+            payload->jobId = jobId;
+            if (p_output != NULL) {
+                payload->out_data = *p_output;
+            }
+            obj->processEvt(QCAMERA_SM_EVT_JPEG_EVT_NOTIFY, payload);
+        }
+    } else {
+        ALOGE("%s: NULL user_data", __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : thermalEvtHandle
+ *
+ * DESCRIPTION: routine to handle thermal event notification
+ *
+ * PARAMETERS :
+ *   @level      : thermal level
+ *   @userdata   : userdata passed in during registration
+ *   @data       : opaque data from thermal client
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::thermalEvtHandle(
+        qcamera_thermal_level_enum_t level, void *userdata, void *data)
+{
+    // Make sure thermal events are logged
+    ALOGI("%s: level = %d, userdata = %p, data = %p",
+        __func__, level, userdata, data);
+    //We don't need to lockAPI, waitAPI here. QCAMERA_SM_EVT_THERMAL_NOTIFY
+    // becomes an aync call. This also means we can only pass payload
+    // by value, not by address.
+    return processAPI(QCAMERA_SM_EVT_THERMAL_NOTIFY, (void *)level);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify to notify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type to be sent
+ *   @ext1    : optional extension1
+ *   @ext2    : optional extension2
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::sendEvtNotify(int32_t msg_type,
+                                                 int32_t ext1,
+                                                 int32_t ext2)
+{
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.ext1 = ext1;
+    cbArg.ext2 = ext2;
+    return m_cbNotifier.notifyCallback(cbArg);
+}
+
+/*===========================================================================
+ * FUNCTION   : processAutoFocusEvent
+ *
+ * DESCRIPTION: process auto focus event
+ *
+ * PARAMETERS :
+ *   @focus_data: struct containing auto focus result info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processAutoFocusEvent(cam_auto_focus_data_t &focus_data)
+{
+    int32_t ret = NO_ERROR;
+
+    m_currentFocusState = focus_data.focus_state;
+
+    cam_focus_mode_type focusMode = mParameters.getFocusMode();
+    switch (focusMode) {
+    case CAM_FOCUS_MODE_AUTO:
+    case CAM_FOCUS_MODE_MACRO:
+        if (mParameters.isAFRunning()) {
+            if (focus_data.focus_state == CAM_AF_SCANNING) {
+                // in the middle of focusing, just ignore it
+                break;
+            }
+
+            // update focus distance
+            mParameters.updateFocusDistances(&focus_data.focus_dist);
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                                (focus_data.focus_state == CAM_AF_FOCUSED)? true : false,
+                                0);
+            mParameters.setAFRunning(false);
+        } else {
+            ret = UNKNOWN_ERROR;
+            ALOGE("%s: autoFocusEvent when no auto_focus running", __func__);
+        }
+        break;
+    case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
+    case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
+        if (focus_data.focus_state == CAM_AF_FOCUSED ||
+            focus_data.focus_state == CAM_AF_NOT_FOCUSED) {
+            // update focus distance
+            mParameters.updateFocusDistances(&focus_data.focus_dist);
+            if (mParameters.isLockCAFNeeded()) {
+                mParameters.setLockCAFNeeded(false);
+                ret = mParameters.setLockCAF(true);
+            }
+
+            ret = sendEvtNotify(CAMERA_MSG_FOCUS,
+                  (focus_data.focus_state == CAM_AF_FOCUSED)? true : false,
+                  0);
+        }
+        ret = sendEvtNotify(CAMERA_MSG_FOCUS_MOVE,
+                (focus_data.focus_state == CAM_AF_SCANNING)? true : false,
+                0);
+        break;
+    case CAM_FOCUS_MODE_INFINITY:
+    case CAM_FOCUS_MODE_FIXED:
+    case CAM_FOCUS_MODE_EDOF:
+    default:
+        ALOGD("%s: no ops for autofocus event in focusmode %d", __func__, focusMode);
+        break;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomEvent
+ *
+ * DESCRIPTION: process zoom event
+ *
+ * PARAMETERS :
+ *   @crop_info : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processZoomEvent(cam_crop_data_t &crop_info)
+{
+    int32_t ret = NO_ERROR;
+
+    for (int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL) {
+            ret = m_channels[i]->processZoomDone(mPreviewWindow, crop_info);
+        }
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPrepSnapshotDone
+ *
+ * DESCRIPTION: process prep snapshot done event
+ *
+ * PARAMETERS :
+ *   @prep_snapshot_state  : state of prepare snapshot done. In other words,
+ *                           i.e. whether need future frames for capture.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processPrepSnapshotDoneEvent(
+                        cam_prep_snapshot_state_t prep_snapshot_state)
+{
+    int32_t ret = NO_ERROR;
+
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] &&
+        prep_snapshot_state == NEED_FUTURE_FRAME) {
+
+        ret = mCameraHandle->ops->start_zsl_snapshot(
+                            mCameraHandle->camera_handle);
+        if (ret < 0) {
+            ALOGE("%s: start_led_zsl_capture failed %d",
+                            __func__, ret);
+            return ret;
+        }
+        m_bStartZSLSnapshotCalled = true;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegNotify
+ *
+ * DESCRIPTION: process jpeg event
+ *
+ * PARAMETERS :
+ *   @jpeg_evt: ptr to jpeg event payload
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_evt)
+{
+    return m_postprocessor.processJpegEvt(jpeg_evt);
+}
+
+/*===========================================================================
+ * FUNCTION   : lockAPI
+ *
+ * DESCRIPTION: lock to process API
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::lockAPI()
+{
+    pthread_mutex_lock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : waitAPIResult
+ *
+ * DESCRIPTION: wait for API result coming back. This is a blocking call, it will
+ *              return only cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @api_evt : API event type
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::waitAPIResult(qcamera_sm_evt_enum_t api_evt)
+{
+    ALOGV("%s: wait for API result of evt (%d)", __func__, api_evt);
+    memset(&m_apiResult, 0, sizeof(qcamera_api_result_t));
+    while (m_apiResult.request_api != api_evt) {
+        pthread_cond_wait(&m_cond, &m_lock);
+    }
+    ALOGV("%s: return (%d) from API result wait for evt (%d)",
+          __func__, m_apiResult.status, api_evt);
+}
+
+/*===========================================================================
+ * FUNCTION   : unlockAPI
+ *
+ * DESCRIPTION: API processing is done, unlock
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unlockAPI()
+{
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalAPIResult
+ *
+ * DESCRIPTION: signal condition viarable that cerntain API event type arrives
+ *
+ * PARAMETERS :
+ *   @result  : API result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalAPIResult(qcamera_api_result_t *result)
+{
+    pthread_mutex_lock(&m_lock);
+    m_apiResult = *result;
+    pthread_cond_signal(&m_cond);
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : signalEvtResult
+ *
+ * DESCRIPTION: signal condition variable that certain event was processed
+ *
+ * PARAMETERS :
+ *   @result  : Event result
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::signalEvtResult(qcamera_api_result_t *result)
+{
+    pthread_mutex_lock(&m_evtLock);
+    m_evtResult = *result;
+    pthread_cond_signal(&m_evtCond);
+    pthread_mutex_unlock(&m_evtLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : addStreamToChannel
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @pChannel   : ptr to channel obj
+ *   @streamType : type of stream to be added
+ *   @streamCB   : callback of stream
+ *   @userData   : user data ptr to callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addStreamToChannel(QCameraChannel *pChannel,
+                                                      cam_stream_type_t streamType,
+                                                      stream_cb_routine streamCB,
+                                                      void *userData)
+{
+    int32_t rc = NO_ERROR;
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(streamType);
+    if (pStreamInfo == NULL) {
+        ALOGE("%s: no mem for stream info buf", __func__);
+        return NO_MEMORY;
+    }
+    uint8_t minStreamBufNum = getBufNumRequired(streamType);
+    rc = pChannel->addStream(*this,
+                             pStreamInfo,
+                             minStreamBufNum,
+                             &gCamCapability[mCameraId]->padding_info,
+                             streamCB, userData);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add stream type (%d) failed, ret = %d",
+              __func__, streamType, rc);
+        pStreamInfo->deallocate();
+        delete pStreamInfo;
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addPreviewChannel
+ *
+ * DESCRIPTION: add a preview channel that contains a preview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addPreviewChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL) {
+        // if we had preview channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
+        m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for preview channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // preview only channel, don't need bundle attr and cb
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init preview channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (isNoDisplayMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                nodisplay_preview_stream_cb_routine, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+    }
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add preview stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_PREVIEW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addVideoChannel
+ *
+ * DESCRIPTION: add a video channel that contains a video stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addVideoChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraVideoChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_VIDEO] != NULL) {
+        // if we had video channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_VIDEO];
+        m_channels[QCAMERA_CH_TYPE_VIDEO] = NULL;
+    }
+
+    pChannel = new QCameraVideoChannel(mCameraHandle->camera_handle,
+                                       mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for video channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // preview only channel, don't need bundle attr and cb
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != 0) {
+        ALOGE("%s: init video channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_VIDEO,
+                            video_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add video stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_VIDEO] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addSnapshotChannel
+ *
+ * DESCRIPTION: add a snapshot channel that contains a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for live snapshot usecase. Regular capture will
+ *              use addCaptureChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addSnapshotChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_SNAPSHOT] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_SNAPSHOT];
+        m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for snapshot channel", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init snapshot channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT,
+                            snapshot_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_SNAPSHOT] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addRawChannel
+ *
+ * DESCRIPTION: add a raw channel that contains a raw image stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addRawChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_RAW] != NULL) {
+        // if we had raw channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_RAW];
+        m_channels[QCAMERA_CH_TYPE_RAW] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for raw channel", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init raw channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with snapshot in regular RAW capture case
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_RAW,
+                            raw_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_RAW] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addZSLChannel
+ *
+ * DESCRIPTION: add a ZSL channel that contains a preview stream and
+ *              a snapshot stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addZSLChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraPicChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_ZSL] != NULL) {
+        // if we had ZSL channel before, delete it first
+        delete m_channels[QCAMERA_CH_TYPE_ZSL];
+        m_channels[QCAMERA_CH_TYPE_ZSL] = NULL;
+    }
+
+    pChannel = new QCameraPicChannel(mCameraHandle->camera_handle,
+                                     mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for ZSL channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // ZSL channel, init with bundle attr and cb
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    attr.look_back = mParameters.getZSLBackLookCount();
+    attr.post_frame_skip = mParameters.getZSLBurstInterval();
+    attr.water_mark = mParameters.getZSLQueueDepth();
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        zsl_channel_cb,
+                        this);
+    if (rc != 0) {
+        ALOGE("%s: init ZSL channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    // meta data stream always coexists with preview if applicable
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    if (isNoDisplayMode()) {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                nodisplay_preview_stream_cb_routine, this);
+    } else {
+        rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
+                                preview_stream_cb_routine, this);
+    }
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add preview stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_SNAPSHOT,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_ZSL] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addCaptureChannel
+ *
+ * DESCRIPTION: add a capture channel that contains a snapshot stream
+ *              and a postview stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : Add this channel for regular capture usecase.
+ *              For Live snapshot usecase, use addSnapshotChannel.
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addCaptureChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_CAPTURE] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_CAPTURE];
+        m_channels[QCAMERA_CH_TYPE_CAPTURE] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for capture channel", __func__);
+        return NO_MEMORY;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+
+    rc = pChannel->init(&attr,
+                        capture_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init capture channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+
+    // meta data stream always coexists with snapshot in regular capture case
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_POSTVIEW,
+                            postview_stream_cb_routine, this);
+
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add postview stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT,
+                            NULL, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add snapshot stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_CAPTURE] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addMetaDataChannel
+ *
+ * DESCRIPTION: add a meta data channel that contains a metadata stream
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addMetaDataChannel()
+{
+    int32_t rc = NO_ERROR;
+    QCameraChannel *pChannel = NULL;
+
+    if (m_channels[QCAMERA_CH_TYPE_METADATA] != NULL) {
+        delete m_channels[QCAMERA_CH_TYPE_METADATA];
+        m_channels[QCAMERA_CH_TYPE_METADATA] = NULL;
+    }
+
+    pChannel = new QCameraChannel(mCameraHandle->camera_handle,
+                                  mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for metadata channel", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pChannel->init(NULL,
+                        NULL,
+                        NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init metadata channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_METADATA,
+                            metadata_stream_cb_routine, this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add metadata stream failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return rc;
+    }
+
+    m_channels[QCAMERA_CH_TYPE_METADATA] = pChannel;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOnlineReprocChannel
+ *
+ * DESCRIPTION: add a online reprocess channel that will do reprocess on frames
+ *              coming from input channel
+ *
+ * PARAMETERS :
+ *   @pInputChannel : ptr to input channel whose frames will be post-processed
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addOnlineReprocChannel(
+                                                      QCameraChannel *pInputChannel)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    if (pInputChannel == NULL) {
+        ALOGE("%s: input channel obj is NULL", __func__);
+        return NULL;
+    }
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    // Capture channel, only need snapshot and postview streams start together
+    mm_camera_channel_attr_t attr;
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = mParameters.getMaxUnmatchedFramesInQueue();
+    rc = pChannel->init(&attr,
+                        postproc_channel_cb_routine,
+                        this);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+    if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+        pp_config.sharpness = mParameters.getInt(QCameraParameters::KEY_QC_SHARPNESS);
+    }
+
+    if (mParameters.isWNREnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+        pp_config.denoise2d.denoise_enable = 1;
+        pp_config.denoise2d.process_plates = mParameters.getWaveletDenoiseProcessPlate();
+    }
+
+    if (isCACEnabled()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_CAC;
+    }
+
+    if (needRotationReprocess()) {
+        pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+        int rotation = mParameters.getJpegRotation();
+        if (rotation == 0) {
+            pp_config.rotation = ROTATE_0;
+        } else if (rotation == 90) {
+            pp_config.rotation = ROTATE_90;
+        } else if (rotation == 180) {
+            pp_config.rotation = ROTATE_180;
+        } else if (rotation == 270) {
+            pp_config.rotation = ROTATE_270;
+        }
+    }
+
+    uint8_t minStreamBufNum = mParameters.getNumOfSnapshots();
+    rc = pChannel->addReprocStreamsFromSource(*this,
+                                              pp_config,
+                                              pInputChannel,
+                                              minStreamBufNum,
+                                              &gCamCapability[mCameraId]->padding_info);
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a offline reprocess channel contains one reproc stream,
+ *              that will do reprocess on frames coming from external images
+ *
+ * PARAMETERS :
+ *   @img_config  : offline reporcess image info
+ *   @pp_feature  : pp feature config
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+QCameraReprocessChannel *QCamera2HardwareInterface::addOfflineReprocChannel(
+                                            cam_pp_offline_src_config_t &img_config,
+                                            cam_pp_feature_config_t &pp_feature,
+                                            stream_cb_routine stream_cb,
+                                            void *userdata)
+{
+    int32_t rc = NO_ERROR;
+    QCameraReprocessChannel *pChannel = NULL;
+
+    pChannel = new QCameraReprocessChannel(mCameraHandle->camera_handle,
+                                           mCameraHandle->ops);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    rc = pChannel->init(NULL, NULL, NULL);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    QCameraHeapMemory *pStreamInfo = allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+    if (pStreamInfo == NULL) {
+        ALOGE("%s: no mem for stream info buf", __func__);
+        delete pChannel;
+        return NULL;
+    }
+
+    cam_stream_info_t *streamInfoBuf = (cam_stream_info_t *)pStreamInfo->getPtr(0);
+    memset(streamInfoBuf, 0, sizeof(cam_stream_info_t));
+    streamInfoBuf->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+    streamInfoBuf->fmt = img_config.input_fmt;
+    streamInfoBuf->dim = img_config.input_dim;
+    streamInfoBuf->buf_planes = img_config.input_buf_planes;
+    streamInfoBuf->streaming_mode = CAM_STREAMING_MODE_BURST;
+    streamInfoBuf->num_of_burst = img_config.num_of_bufs;
+
+    streamInfoBuf->reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+    streamInfoBuf->reprocess_config.offline = img_config;
+    streamInfoBuf->reprocess_config.pp_feature_config = pp_feature;
+
+    rc = pChannel->addStream(*this,
+                             pStreamInfo, img_config.num_of_bufs,
+                             &gCamCapability[mCameraId]->padding_info,
+                             stream_cb, userdata);
+
+    if (rc != NO_ERROR) {
+        ALOGE("%s: add reprocess stream failed, ret = %d", __func__, rc);
+        pStreamInfo->deallocate();
+        delete pStreamInfo;
+        delete pChannel;
+        return NULL;
+    }
+
+    return pChannel;
+}
+
+/*===========================================================================
+ * FUNCTION   : addChannel
+ *
+ * DESCRIPTION: add a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    switch (ch_type) {
+    case QCAMERA_CH_TYPE_ZSL:
+        rc = addZSLChannel();
+        break;
+    case QCAMERA_CH_TYPE_CAPTURE:
+        rc = addCaptureChannel();
+        break;
+    case QCAMERA_CH_TYPE_PREVIEW:
+        rc = addPreviewChannel();
+        break;
+    case QCAMERA_CH_TYPE_VIDEO:
+        rc = addVideoChannel();
+        break;
+    case QCAMERA_CH_TYPE_SNAPSHOT:
+        rc = addSnapshotChannel();
+        break;
+    case QCAMERA_CH_TYPE_RAW:
+        rc = addRawChannel();
+        break;
+    case QCAMERA_CH_TYPE_METADATA:
+        rc = addMetaDataChannel();
+        break;
+    default:
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : delChannel
+ *
+ * DESCRIPTION: delete a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::delChannel(qcamera_ch_type_enum_t ch_type)
+{
+    if (m_channels[ch_type] != NULL) {
+        delete m_channels[ch_type];
+        m_channels[ch_type] = NULL;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startChannel
+ *
+ * DESCRIPTION: start a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::startChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->start();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopChannel
+ *
+ * DESCRIPTION: stop a channel by its type
+ *
+ * PARAMETERS :
+ *   @ch_type : channel type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::stopChannel(qcamera_ch_type_enum_t ch_type)
+{
+    int32_t rc = UNKNOWN_ERROR;
+    if (m_channels[ch_type] != NULL) {
+        rc = m_channels[ch_type]->stop();
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : preparePreview
+ *
+ * DESCRIPTION: add channels needed for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::preparePreview()
+{
+    int32_t rc = NO_ERROR;
+
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        rc = addChannel(QCAMERA_CH_TYPE_ZSL);
+        if (rc != NO_ERROR) {
+            return rc;
+        }
+    } else {
+        bool recordingHint = mParameters.getRecordingHintValue();
+        if(recordingHint) {
+            rc = addChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+            if (rc != NO_ERROR) {
+                return rc;
+            }
+
+            rc = addChannel(QCAMERA_CH_TYPE_VIDEO);
+            if (rc != NO_ERROR) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                return rc;
+            }
+        }
+
+        rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
+        if (rc != NO_ERROR) {
+            if (recordingHint) {
+                delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+                delChannel(QCAMERA_CH_TYPE_VIDEO);
+            }
+            return rc;
+        }
+
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : unpreparePreview
+ *
+ * DESCRIPTION: delete channels for preview
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::unpreparePreview()
+{
+    if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
+        delChannel(QCAMERA_CH_TYPE_ZSL);
+    } else {
+        delChannel(QCAMERA_CH_TYPE_PREVIEW);
+        if(mParameters.getRecordingHintValue() == true) {
+            delChannel(QCAMERA_CH_TYPE_VIDEO);
+            delChannel(QCAMERA_CH_TYPE_SNAPSHOT);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : playShutter
+ *
+ * DESCRIPTION: send request to play shutter sound
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::playShutter(){
+     if (mNotifyCb == NULL ||
+         msgTypeEnabledWithLock(CAMERA_MSG_SHUTTER) == 0){
+         ALOGV("%s: shutter msg not enabled or NULL cb", __func__);
+         return;
+     }
+
+     qcamera_callback_argm_t cbArg;
+     memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+     cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+     cbArg.msg_type = CAMERA_MSG_SHUTTER;
+     cbArg.ext1 = 0;
+
+     if(!m_bShutterSoundPlayed){
+         cbArg.ext2 = true;
+         m_cbNotifier.notifyCallback(cbArg);
+     }
+     cbArg.ext2 = false;
+     m_cbNotifier.notifyCallback(cbArg);
+     m_bShutterSoundPlayed = false;
+}
+
+/*===========================================================================
+ * FUNCTION   : getChannelByHandle
+ *
+ * DESCRIPTION: return a channel by its handle
+ *
+ * PARAMETERS :
+ *   @channelHandle : channel handle
+ *
+ * RETURN     : a channel obj if found, NULL if not found
+ *==========================================================================*/
+QCameraChannel *QCamera2HardwareInterface::getChannelByHandle(uint32_t channelHandle)
+{
+    for(int i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
+        if (m_channels[i] != NULL &&
+            m_channels[i]->getMyHandle() == channelHandle) {
+            return m_channels[i];
+        }
+    }
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : processFaceDetectionReuslt
+ *
+ * DESCRIPTION: process face detection reuslt
+ *
+ * PARAMETERS :
+ *   @fd_data : ptr to face detection result struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processFaceDetectionResult(cam_face_detection_data_t *fd_data)
+{
+    if (!mParameters.isFaceDetectionEnabled()) {
+        ALOGD("%s: FaceDetection not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    if ((NULL == mDataCb) || (msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_METADATA) == 0)) {
+        ALOGD("%s: prevew metadata msgtype not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    cam_dimension_t display_dim;
+    mParameters.getStreamDimension(CAM_STREAM_TYPE_PREVIEW, display_dim);
+    if (display_dim.width <= 0 || display_dim.height <= 0) {
+        ALOGE("%s: Invalid preview width or height (%d x %d)",
+              __func__, display_dim.width, display_dim.height);
+        return UNKNOWN_ERROR;
+    }
+
+    // process face detection result
+    size_t faceResultSize = sizeof(camera_frame_metadata_t);
+    faceResultSize += sizeof(camera_face_t) * MAX_ROI;
+    camera_memory_t *faceResultBuffer = mGetMemory(-1,
+                                                   faceResultSize,
+                                                   1,
+                                                   mCallbackCookie);
+    if ( NULL == faceResultBuffer ) {
+        ALOGE("%s: Not enough memory for face result data",
+              __func__);
+        return NO_MEMORY;
+    }
+
+    unsigned char *faceData = ( unsigned char * ) faceResultBuffer->data;
+    memset(faceData, 0, faceResultSize);
+    camera_frame_metadata_t *roiData = (camera_frame_metadata_t * ) faceData;
+    camera_face_t *faces = (camera_face_t *) ( faceData + sizeof(camera_frame_metadata_t) );
+
+    roiData->number_of_faces = fd_data->num_faces_detected;
+    roiData->faces = faces;
+    if (roiData->number_of_faces > 0) {
+        for (int i = 0; i < roiData->number_of_faces; i++) {
+            faces[i].id = fd_data->faces[i].face_id;
+            faces[i].score = fd_data->faces[i].score;
+
+            // left
+            faces[i].rect[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.left, display_dim.width, 2000, -1000);
+
+            // top
+            faces[i].rect[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.top, display_dim.height, 2000, -1000);
+
+            // right
+            faces[i].rect[2] = faces[i].rect[0] +
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.width, display_dim.width, 2000, 0);
+
+             // bottom
+            faces[i].rect[3] = faces[i].rect[1] +
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].face_boundary.height, display_dim.height, 2000, 0);
+
+            // Center of left eye
+            faces[i].left_eye[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].left_eye_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].left_eye[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].left_eye_center.y, display_dim.height, 2000, -1000);
+
+            // Center of right eye
+            faces[i].right_eye[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].right_eye_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].right_eye[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].right_eye_center.y, display_dim.height, 2000, -1000);
+
+            // Center of mouth
+            faces[i].mouth[0] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].mouth_center.x, display_dim.width, 2000, -1000);
+
+            faces[i].mouth[1] =
+                MAP_TO_DRIVER_COORDINATE(fd_data->faces[i].mouth_center.y, display_dim.height, 2000, -1000);
+
+#if 0
+            faces[i].smile_degree = fd_data->faces[i].smile_degree;
+            faces[i].smile_score = fd_data->faces[i].smile_confidence;
+            faces[i].blink_detected = fd_data->faces[i].blink_detected;
+            faces[i].face_recognised = fd_data->faces[i].face_recognised;
+            faces[i].gaze_angle = fd_data->faces[i].gaze_angle;
+
+            // upscale by 2 to recover from demaen downscaling
+            faces[i].updown_dir = fd_data->faces[i].updown_dir * 2;
+            faces[i].leftright_dir = fd_data->faces[i].leftright_dir * 2;
+            faces[i].roll_dir = fd_data->faces[i].roll_dir * 2;
+
+            faces[i].leye_blink = fd_data->faces[i].left_blink;
+            faces[i].reye_blink = fd_data->faces[i].right_blink;
+            faces[i].left_right_gaze = fd_data->faces[i].left_right_gaze;
+            faces[i].top_bottom_gaze = fd_data->faces[i].top_bottom_gaze;
+#endif
+
+        }
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+    cbArg.msg_type = CAMERA_MSG_PREVIEW_METADATA;
+    cbArg.data = faceResultBuffer;
+    cbArg.metadata = roiData;
+    cbArg.user_data = faceResultBuffer;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseCameraMemory;
+    m_cbNotifier.notifyCallback(cbArg);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseCameraMemory
+ *
+ * DESCRIPTION: releases camera memory objects
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::releaseCameraMemory(void *data, void */*cookie*/)
+{
+    camera_memory_t *mem = ( camera_memory_t * ) data;
+    if ( NULL != mem ) {
+        mem->release(mem);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : returnStreamBuffer
+ *
+ * DESCRIPTION: returns back a stream buffer
+ *
+ * PARAMETERS :
+ *   @data    : buffer to be released
+ *   @cookie  : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::returnStreamBuffer(void *data, void *cookie)
+{
+    QCameraStream *stream = ( QCameraStream * ) cookie;
+    int idx = ( int ) data;
+    if ( ( NULL != stream )) {
+        stream->bufDone(idx);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : processHistogramStats
+ *
+ * DESCRIPTION: process histogram stats
+ *
+ * PARAMETERS :
+ *   @hist_data : ptr to histogram stats struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::processHistogramStats(cam_hist_stats_t &/*stats_data*/)
+{
+    if (!mParameters.isHistogramEnabled()) {
+        ALOGD("%s: Histogram not enabled, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    camera_memory_t *histBuffer = mGetMemory(-1,
+                                             sizeof(cam_histogram_data_t),
+                                             1,
+                                             mCallbackCookie);
+    if ( NULL == histBuffer ) {
+        ALOGE("%s: Not enough memory for histogram data",
+              __func__);
+        return NO_MEMORY;
+    }
+
+    cam_histogram_data_t *pHistData = (cam_histogram_data_t *)histBuffer->data;
+    if (pHistData == NULL) {
+        ALOGE("%s: memory data ptr is NULL", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateThermalLevel
+ *
+ * DESCRIPTION: update thermal level depending on thermal events
+ *
+ * PARAMETERS :
+ *   @level   : thermal level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateThermalLevel(
+            qcamera_thermal_level_enum_t level)
+{
+    int ret = NO_ERROR;
+    cam_fps_range_t adjustedRange;
+    int minFPS, maxFPS;
+    qcamera_thermal_mode thermalMode = mParameters.getThermalMode();
+    enum msm_vfe_frame_skip_pattern skipPattern;
+
+    mParameters.getPreviewFpsRange(&minFPS, &maxFPS);
+
+    switch(level) {
+    case QCAMERA_THERMAL_NO_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = minFPS / 1000.0f;
+            adjustedRange.max_fps = maxFPS / 1000.0f;
+            skipPattern = NO_SKIP;
+        }
+        break;
+    case QCAMERA_THERMAL_SLIGHT_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = (minFPS / 2) / 1000.0f;
+            adjustedRange.max_fps = (maxFPS / 2) / 1000.0f;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            skipPattern = EVERY_2FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_BIG_ADJUSTMENT:
+        {
+            adjustedRange.min_fps = (minFPS / 4) / 1000.0f;
+            adjustedRange.max_fps = (maxFPS / 4) / 1000.0f;
+            if ( adjustedRange.min_fps < 1 ) {
+                adjustedRange.min_fps = 1;
+            }
+            if ( adjustedRange.max_fps < 1 ) {
+                adjustedRange.max_fps = 1;
+            }
+            skipPattern = EVERY_4FRAME;
+        }
+        break;
+    case QCAMERA_THERMAL_SHUTDOWN:
+        {
+            // Stop Preview?
+            // Set lowest min FPS for now
+            adjustedRange.min_fps = minFPS/1000.0f;
+            adjustedRange.max_fps = minFPS/1000.0f;
+            for ( int i = 0 ; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt ; i++ ) {
+                if ( gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps < adjustedRange.min_fps ) {
+                    adjustedRange.min_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                    adjustedRange.max_fps = adjustedRange.min_fps;
+                }
+            }
+            skipPattern = MAX_SKIP;
+        }
+        break;
+    default:
+        {
+            ALOGE("%s: Invalid thermal level %d", __func__, level);
+            return BAD_VALUE;
+        }
+        break;
+    }
+
+    ALOGI("%s: Thermal level %d, FPS range [%3.2f,%3.2f], frameskip %d",
+          __func__,
+          level,
+          adjustedRange.min_fps,
+          adjustedRange.max_fps,
+          skipPattern);
+
+    if (thermalMode == QCAMERA_THERMAL_ADJUST_FPS)
+        ret = mParameters.adjustPreviewFpsRange(&adjustedRange);
+    else if (thermalMode == QCAMERA_THERMAL_ADJUST_FRAMESKIP)
+        ret = mParameters.setFrameSkip(skipPattern);
+    else
+        ALOGE("%s: Incorrect thermal mode %d", __func__, thermalMode);
+
+    return ret;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters
+ *
+ * PARAMETERS :
+ *   @parms       : input parameters string
+ *   @needRestart : output, flag to indicate if preview restart is needed
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera2HardwareInterface::updateParameters(const char *parms, bool &needRestart)
+{
+    String8 str = String8(parms);
+    QCameraParameters param(str);
+    return mParameters.updateParameters(param, needRestart);
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameterChanges
+ *
+ * DESCRIPTION: commit parameter changes to the backend to take effect
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ * NOTE       : This function must be called after updateParameters.
+ *              Otherwise, no change will be passed to backend to take effect.
+ *==========================================================================*/
+int QCamera2HardwareInterface::commitParameterChanges()
+{
+    int rc = mParameters.commitParameters();
+    if (rc == NO_ERROR) {
+        // update number of snapshot based on committed parameters setting
+        rc = mParameters.setNumOfSnapshot();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : needDebugFps
+ *
+ * DESCRIPTION: if fps log info need to be printed out
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: need print out fps log
+ *              false: no need to print out fps log
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needDebugFps()
+{
+    return mParameters.isFpsDebugEnabled();
+}
+
+/*===========================================================================
+ * FUNCTION   : isCACEnabled
+ *
+ * DESCRIPTION: if CAC is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::isCACEnabled()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.feature.cac", prop, "0");
+    int enableCAC = atoi(prop);
+    return enableCAC == 1;
+}
+
+/*===========================================================================
+ * FUNCTION   : needReprocess
+ *
+ * DESCRIPTION: if reprocess is needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needReprocess()
+{
+    if (!mParameters.isJpegPictureFormat()) {
+        // RAW image, no need to reprocess
+        return false;
+    }
+
+    if (((gCamCapability[mCameraId]->min_required_pp_mask > 0) ||
+         mParameters.isWNREnabled() || isCACEnabled())) {
+        // TODO: add for ZSL HDR later
+        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
+        return true;
+    }
+
+    return needRotationReprocess();
+}
+
+/*===========================================================================
+ * FUNCTION   : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera2HardwareInterface::needRotationReprocess()
+{
+    if (!mParameters.isJpegPictureFormat()) {
+        // RAW image, no need to reprocess
+        return false;
+    }
+
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 &&
+        mParameters.getJpegRotation() > 0) {
+        // current rotation is not zero, and pp has the capability to process rotation
+        ALOGD("%s: need do reprocess for rotation", __func__);
+        return true;
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : getThumbnailSize
+ *
+ * DESCRIPTION: get user set thumbnail size
+ *
+ * PARAMETERS :
+ *   @dim     : output of thumbnail dimension
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera2HardwareInterface::getThumbnailSize(cam_dimension_t &dim)
+{
+    mParameters.getThumbnailSize(&dim.width, &dim.height);
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get user set jpeg quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg quality setting
+ *==========================================================================*/
+int QCamera2HardwareInterface::getJpegQuality()
+{
+    return mParameters.getJpegQuality();
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegRotation
+ *
+ * DESCRIPTION: get rotation information to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation information
+ *==========================================================================*/
+int QCamera2HardwareInterface::getJpegRotation() {
+    return mParameters.getJpegRotation();
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : exif data from user setting and GPS
+ *==========================================================================*/
+QCameraExif *QCamera2HardwareInterface::getExifData()
+{
+    QCameraExif *exif = new QCameraExif();
+    if (exif == NULL) {
+        ALOGE("%s: No memory for QCameraExif", __func__);
+        return NULL;
+    }
+
+    int32_t rc = NO_ERROR;
+    uint32_t count = 0;
+
+    // add exif entries
+    char dateTime[20];
+    memset(dateTime, 0, sizeof(dateTime));
+    count = 20;
+    rc = mParameters.getExifDateTime(dateTime, count);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
+                       EXIF_ASCII,
+                       count,
+                       (void *)dateTime);
+    } else {
+        ALOGE("%s: getExifDateTime failed", __func__);
+    }
+
+    rat_t focalLength;
+    rc = mParameters.getExifFocalLength(&focalLength);
+    if (rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(focalLength));
+    } else {
+        ALOGE("%s: getExifFocalLength failed", __func__);
+    }
+
+    uint16_t isoSpeed = mParameters.getExifIsoSpeed();
+    exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
+                   EXIF_SHORT,
+                   1,
+                   (void *)&(isoSpeed));
+
+    char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+    count = 0;
+    rc = mParameters.getExifGpsProcessingMethod(gpsProcessingMethod, count);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+                       EXIF_ASCII,
+                       count,
+                       (void *)gpsProcessingMethod);
+    } else {
+        ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
+    }
+
+    rat_t latitude[3];
+    char latRef[2];
+    rc = mParameters.getExifLatitude(latitude, latRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)latitude);
+        exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)latRef);
+    } else {
+        ALOGE("%s: getExifLatitude failed", __func__);
+    }
+
+    rat_t longitude[3];
+    char lonRef[2];
+    rc = mParameters.getExifLongitude(longitude, lonRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)longitude);
+
+        exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+                       EXIF_ASCII,
+                       2,
+                       (void *)lonRef);
+    } else {
+        ALOGE("%s: getExifLongitude failed", __func__);
+    }
+
+    rat_t altitude;
+    char altRef;
+    rc = mParameters.getExifAltitude(&altitude, &altRef);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+                       EXIF_RATIONAL,
+                       1,
+                       (void *)&(altitude));
+
+        exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+                       EXIF_BYTE,
+                       1,
+                       (void *)&altRef);
+    } else {
+        ALOGE("%s: getExifAltitude failed", __func__);
+    }
+
+    char gpsDateStamp[20];
+    rat_t gpsTimeStamp[3];
+    rc = mParameters.getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp);
+    if(rc == NO_ERROR) {
+        exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
+                       EXIF_ASCII,
+                       strlen(gpsDateStamp) + 1,
+                       (void *)gpsDateStamp);
+
+        exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+                       EXIF_RATIONAL,
+                       3,
+                       (void *)gpsTimeStamp);
+    } else {
+        ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MAKE,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifMaker failed", __func__);
+    }
+
+    if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MODEL,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifModel failed", __func__);
+    }
+
+    return exif;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set if histogram should be enabled
+ *
+ * PARAMETERS :
+ *   @histogram_en : bool flag if histogram should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setHistogram(bool histogram_en)
+{
+    return mParameters.setHistogram(histogram_en);
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set if face detection should be enabled
+ *
+ * PARAMETERS :
+ *   @enabled : bool flag if face detection should be enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::setFaceDetection(bool enabled)
+{
+    return mParameters.setFaceDetection(enabled);
+}
+
+/*===========================================================================
+ * FUNCTION   : prepareHardwareForSnapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot, such as LED
+ *
+ * PARAMETERS :
+ *   @afNeeded: flag indicating if Auto Focus needs to be done during preparation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera2HardwareInterface::prepareHardwareForSnapshot(int32_t afNeeded)
+{
+    ALOGD("[KPI Perf] %s: Prepare hardware such as LED",__func__);
+    return mCameraHandle->ops->prepare_snapshot(mCameraHandle->camera_handle,
+                                                afNeeded);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCamera2HWI.h b/camera/QCamera2/HAL/QCamera2HWI.h
new file mode 100644
index 0000000..cfb413d
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWI.h
@@ -0,0 +1,436 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HARDWAREINTERFACE_H__
+#define __QCAMERA2HARDWAREINTERFACE_H__
+
+#include <hardware/camera.h>
+#include <hardware/power.h>
+#include <utils/Log.h>
+#include <QCameraParameters.h>
+
+#include "QCameraQueue.h"
+#include "QCameraCmdThread.h"
+#include "QCameraChannel.h"
+#include "QCameraStream.h"
+#include "QCameraStateMachine.h"
+#include "QCameraAllocator.h"
+#include "QCameraPostProc.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+#if DISABLE_DEBUG_LOG
+
+inline void __null_log(int, const char *, const char *, ...) {}
+
+#ifdef ALOGD
+#undef ALOGD
+#define ALOGD(...) do { __null_log(0, LOG_TAG,__VA_ARGS__); } while (0)
+#endif
+
+#ifdef ALOGI
+#undef ALOGI
+#define ALOGI(...) do { __null_log(0, LOG_TAG,__VA_ARGS__); } while (0)
+#endif
+
+#endif
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+    QCAMERA_CH_TYPE_ZSL,
+    QCAMERA_CH_TYPE_CAPTURE,
+    QCAMERA_CH_TYPE_PREVIEW,
+    QCAMERA_CH_TYPE_VIDEO,
+    QCAMERA_CH_TYPE_SNAPSHOT,
+    QCAMERA_CH_TYPE_RAW,
+    QCAMERA_CH_TYPE_METADATA,
+    QCAMERA_CH_TYPE_MAX
+} qcamera_ch_type_enum_t;
+
+typedef struct {
+    int32_t msg_type;
+    int32_t ext1;
+    int32_t ext2;
+} qcamera_evt_argm_t;
+
+#define QCAMERA_DUMP_FRM_PREVIEW    1
+#define QCAMERA_DUMP_FRM_VIDEO      (1<<1)
+#define QCAMERA_DUMP_FRM_SNAPSHOT   (1<<2)
+#define QCAMERA_DUMP_FRM_THUMBNAIL  (1<<3)
+#define QCAMERA_DUMP_FRM_RAW        (1<<4)
+#define QCAMERA_DUMP_FRM_JPEG       (1<<5)
+
+#define QCAMERA_DUMP_FRM_MASK_ALL    0x000000ff
+
+#define QCAMERA_ION_USE_CACHE   true
+#define QCAMERA_ION_USE_NOCACHE false
+
+typedef enum {
+    QCAMERA_NOTIFY_CALLBACK,
+    QCAMERA_DATA_CALLBACK,
+    QCAMERA_DATA_TIMESTAMP_CALLBACK,
+    QCAMERA_DATA_SNAPSHOT_CALLBACK
+} qcamera_callback_type_m;
+
+typedef void (*camera_release_callback)(void *user_data, void *cookie);
+
+typedef struct {
+    qcamera_callback_type_m  cb_type;    // event type
+    int32_t                  msg_type;   // msg type
+    int32_t                  ext1;       // extended parameter
+    int32_t                  ext2;       // extended parameter
+    camera_memory_t *        data;       // ptr to data memory struct
+    unsigned int             index;      // index of the buf in the whole buffer
+    int64_t                  timestamp;  // buffer timestamp
+    camera_frame_metadata_t *metadata;   // meta data
+    void                    *user_data;  // any data needs to be released after callback
+    void                    *cookie;     // release callback cookie
+    camera_release_callback  release_cb; // release callback
+} qcamera_callback_argm_t;
+
+class QCameraCbNotifier {
+public:
+    QCameraCbNotifier(QCamera2HardwareInterface *parent) :
+                          mNotifyCb (NULL),
+                          mDataCb (NULL),
+                          mDataCbTimestamp (NULL),
+                          mCallbackCookie (NULL),
+                          mParent (parent),
+                          mDataQ(releaseNotifications, this) {}
+
+    virtual ~QCameraCbNotifier();
+
+    virtual int32_t notifyCallback(qcamera_callback_argm_t &cbArgs);
+    virtual void setCallbacks(camera_notify_callback notifyCb,
+                              camera_data_callback dataCb,
+                              camera_data_timestamp_callback dataCbTimestamp,
+                              void *callbackCookie);
+    virtual int32_t startSnapshots();
+    virtual void stopSnapshots();
+    static void * cbNotifyRoutine(void * data);
+    static void releaseNotifications(void *data, void *user_data);
+    static bool matchSnapshotNotifications(void *data, void *user_data);
+private:
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    void                          *mCallbackCookie;
+    QCamera2HardwareInterface     *mParent;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh;
+};
+class QCamera2HardwareInterface : public QCameraAllocator,
+                                    public QCameraThermalCallback
+{
+public:
+    /* static variable and functions accessed by camera service */
+    static camera_device_ops_t mCameraOps;
+
+    static int set_preview_window(struct camera_device *,
+        struct preview_stream_ops *window);
+    static void set_CallBacks(struct camera_device *,
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    static void enable_msg_type(struct camera_device *, int32_t msg_type);
+    static void disable_msg_type(struct camera_device *, int32_t msg_type);
+    static int msg_type_enabled(struct camera_device *, int32_t msg_type);
+    static int start_preview(struct camera_device *);
+    static void stop_preview(struct camera_device *);
+    static int preview_enabled(struct camera_device *);
+    static int store_meta_data_in_buffers(struct camera_device *, int enable);
+    static int start_recording(struct camera_device *);
+    static void stop_recording(struct camera_device *);
+    static int recording_enabled(struct camera_device *);
+    static void release_recording_frame(struct camera_device *, const void *opaque);
+    static int auto_focus(struct camera_device *);
+    static int cancel_auto_focus(struct camera_device *);
+    static int take_picture(struct camera_device *);
+    static int cancel_picture(struct camera_device *);
+    static int set_parameters(struct camera_device *, const char *parms);
+    static char* get_parameters(struct camera_device *);
+    static void put_parameters(struct camera_device *, char *);
+    static int send_command(struct camera_device *,
+              int32_t cmd, int32_t arg1, int32_t arg2);
+    static void release(struct camera_device *);
+    static int dump(struct camera_device *, int fd);
+    static int close_camera_device(hw_device_t *);
+
+    static int register_face_image(struct camera_device *,
+                                   void *img_ptr,
+                                   cam_pp_offline_src_config_t *config);
+public:
+    QCamera2HardwareInterface(int cameraId);
+    virtual ~QCamera2HardwareInterface();
+    int openCamera(struct hw_device_t **hw_device);
+
+    static int getCapabilities(int cameraId, struct camera_info *info);
+    static int initCapabilities(int cameraId);
+
+    // Implementation of QCameraAllocator
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+                                             int size,
+                                             uint8_t &bufferCnt);
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type);
+
+    // Implementation of QCameraThermalCallback
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t level,
+            void *userdata, void *data);
+
+    friend class QCameraStateMachine;
+    friend class QCameraPostProcessor;
+    friend class QCameraCbNotifier;
+
+private:
+    int setPreviewWindow(struct preview_stream_ops *window);
+    int setCallBacks(
+        camera_notify_callback notify_cb,
+        camera_data_callback data_cb,
+        camera_data_timestamp_callback data_cb_timestamp,
+        camera_request_memory get_memory,
+        void *user);
+    int enableMsgType(int32_t msg_type);
+    int disableMsgType(int32_t msg_type);
+    int msgTypeEnabled(int32_t msg_type);
+    int msgTypeEnabledWithLock(int32_t msg_type);
+    int startPreview();
+    int stopPreview();
+    int storeMetaDataInBuffers(int enable);
+    int startRecording();
+    int stopRecording();
+    int releaseRecordingFrame(const void *opaque);
+    int autoFocus();
+    int cancelAutoFocus();
+    int takePicture();
+    int cancelPicture();
+    int takeLiveSnapshot();
+    int cancelLiveSnapshot();
+    char* getParameters();
+    int putParameters(char *);
+    int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+    int release();
+    int dump(int fd);
+    int registerFaceImage(void *img_ptr,
+                          cam_pp_offline_src_config_t *config,
+                          int32_t &faceID);
+
+    int openCamera();
+    int closeCamera();
+
+    int processAPI(qcamera_sm_evt_enum_t api, void *api_payload);
+    int processEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    int processSyncEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+    void lockAPI();
+    void waitAPIResult(qcamera_sm_evt_enum_t api_evt);
+    void unlockAPI();
+    void signalAPIResult(qcamera_api_result_t *result);
+    void signalEvtResult(qcamera_api_result_t *result);
+
+    int updateThermalLevel(qcamera_thermal_level_enum_t level);
+
+    // update entris to set parameters and check if restart is needed
+    int updateParameters(const char *parms, bool &needRestart);
+    // send request to server to set parameters
+    int commitParameterChanges();
+
+    bool needDebugFps();
+    bool isCACEnabled();
+    bool needReprocess();
+    bool needRotationReprocess();
+    void debugShowVideoFPS();
+    void debugShowPreviewFPS();
+    void dumpFrameToFile(const void *data, uint32_t size,
+                         int index, int dump_type);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    void playShutter();
+    void getThumbnailSize(cam_dimension_t &dim);
+    int getJpegQuality();
+    int getJpegRotation();
+    QCameraExif *getExifData();
+
+    int32_t processAutoFocusEvent(cam_auto_focus_data_t &focus_data);
+    int32_t processZoomEvent(cam_crop_data_t &crop_info);
+    int32_t processPrepSnapshotDoneEvent(cam_prep_snapshot_state_t prep_snapshot_state);
+    int32_t processJpegNotify(qcamera_jpeg_evt_payload_t *jpeg_job);
+
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    int32_t sendDataNotify(int32_t msg_type,
+                           camera_memory_t *data,
+                           uint8_t index,
+                           camera_frame_metadata_t *metadata);
+
+    int32_t addChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t startChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t stopChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t delChannel(qcamera_ch_type_enum_t ch_type);
+    int32_t addPreviewChannel();
+    int32_t addSnapshotChannel();
+    int32_t addVideoChannel();
+    int32_t addZSLChannel();
+    int32_t addCaptureChannel();
+    int32_t addRawChannel();
+    int32_t addMetaDataChannel();
+    QCameraReprocessChannel *addOnlineReprocChannel(QCameraChannel *pInputChannel);
+    QCameraReprocessChannel *addOfflineReprocChannel(
+                                                cam_pp_offline_src_config_t &img_config,
+                                                cam_pp_feature_config_t &pp_feature,
+                                                stream_cb_routine stream_cb,
+                                                void *userdata);
+    int32_t addStreamToChannel(QCameraChannel *pChannel,
+                               cam_stream_type_t streamType,
+                               stream_cb_routine streamCB,
+                               void *userData);
+    int32_t preparePreview();
+    void unpreparePreview();
+    QCameraChannel *getChannelByHandle(uint32_t channelHandle);
+    mm_camera_buf_def_t *getSnapshotFrame(mm_camera_super_buf_t *recvd_frame);
+    int32_t processFaceDetectionResult(cam_face_detection_data_t *fd_data);
+    int32_t processHistogramStats(cam_hist_stats_t &stats_data);
+    int32_t setHistogram(bool histogram_en);
+    int32_t setFaceDetection(bool enabled);
+    int32_t prepareHardwareForSnapshot(int32_t afNeeded);
+    bool needProcessPreviewFrame() {return m_stateMachine.isPreviewRunning();};
+    bool isNoDisplayMode() {return mParameters.isNoDisplayMode();};
+    bool isZSLMode() {return mParameters.isZSLMode();};
+    uint8_t numOfSnapshotsExpected() {return mParameters.getNumOfSnapshots();};
+    uint8_t getBufNumRequired(cam_stream_type_t stream_type);
+
+    static void camEvtHandle(uint32_t camera_handle,
+                          mm_camera_event_t *evt,
+                          void *user_data);
+    static void jpegEvtHandle(jpeg_job_status_t status,
+                              uint32_t client_hdl,
+                              uint32_t jobId,
+                              mm_jpeg_output_t *p_buf,
+                              void *userdata);
+
+    static void *evtNotifyRoutine(void *data);
+
+    // functions for different data notify cb
+    static void zsl_channel_cb(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                           void *userdata);
+    static void postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                            void *userdata);
+    static void nodisplay_preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                                    QCameraStream *stream,
+                                                    void *userdata);
+    static void preview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                          QCameraStream *stream,
+                                          void *userdata);
+    static void postview_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void video_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                        QCameraStream *stream,
+                                        void *userdata);
+    static void snapshot_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void raw_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                      QCameraStream *stream,
+                                      void *userdata);
+    static void metadata_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                           QCameraStream *stream,
+                                           void *userdata);
+    static void reprocess_stream_cb_routine(mm_camera_super_buf_t *frame,
+                                            QCameraStream *stream,
+                                            void *userdata);
+
+    static void releaseCameraMemory(void *data, void *cookie);
+    static void returnStreamBuffer(void *data, void *cookie);
+
+private:
+    camera_device_t   mCameraDevice;
+    uint8_t           mCameraId;
+    mm_camera_vtbl_t *mCameraHandle;
+    bool mCameraOpened;
+
+    preview_stream_ops_t *mPreviewWindow;
+    QCameraParameters mParameters;
+    int32_t               mMsgEnabled;
+    int                   mStoreMetaDataInFrame;
+
+    camera_notify_callback         mNotifyCb;
+    camera_data_callback           mDataCb;
+    camera_data_timestamp_callback mDataCbTimestamp;
+    camera_request_memory          mGetMemory;
+    void                          *mCallbackCookie;
+
+    QCameraStateMachine m_stateMachine;   // state machine
+    QCameraPostProcessor m_postprocessor; // post processor
+    QCameraThermalAdapter &m_thermalAdapter;
+    QCameraCbNotifier m_cbNotifier;
+    pthread_mutex_t m_lock;
+    pthread_cond_t m_cond;
+    qcamera_api_result_t m_apiResult;
+
+    pthread_mutex_t m_evtLock;
+    pthread_cond_t m_evtCond;
+    qcamera_api_result_t m_evtResult;
+
+    QCameraChannel *m_channels[QCAMERA_CH_TYPE_MAX]; // array holding channel ptr
+
+    bool m_bShutterSoundPlayed;         // if shutter sound had been played
+
+    // if auto focus is running, in other words, when auto_focus is called from service,
+    // and beforeany focus callback/cancel_focus happens. This flag is not an indication
+    // of whether lens is moving or not.
+    bool m_bAutoFocusRunning;
+    cam_autofocus_state_t m_currentFocusState;
+
+    // If start_zsl_snapshot is called to notify camera daemon about zsl snapshot
+    bool m_bStartZSLSnapshotCalled;
+
+    power_module_t *m_pPowerModule;   // power module
+
+    int mDumpFrmCnt;  // frame dump count
+    int mDumpSkipCnt; // frame skip count
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp b/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
new file mode 100644
index 0000000..8277414
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2HWICallbacks.cpp
@@ -0,0 +1,1289 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera2HWI"
+
+#include <fcntl.h>
+#include <utils/Errors.h>
+#include <utils/Timers.h>
+#include "QCamera2HWI.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : zsl_channel_cb
+ *
+ * DESCRIPTION: helper function to handle ZSL superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+ *==========================================================================*/
+void QCamera2HardwareInterface::zsl_channel_cb(mm_camera_super_buf_t *recvd_frame,
+                                               void *userdata)
+{
+    ALOGD("[KPI Perf] %s: E",__func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+       ALOGE("%s: camera obj not valid", __func__);
+       return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_ZSL];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        ALOGE("%s: ZSL channel doesn't exist, return here", __func__);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // send to postprocessor
+    pme->m_postprocessor.processData(frame);
+
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : capture_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::capture_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                           void *userdata)
+{
+    ALOGD("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(recvd_frame);
+        return;
+    }
+
+    QCameraChannel *pChannel = pme->m_channels[QCAMERA_CH_TYPE_CAPTURE];
+    if (pChannel == NULL ||
+        pChannel->getMyHandle() != recvd_frame->ch_id) {
+        ALOGE("%s: Capture channel doesn't exist, return here", __func__);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        pChannel->bufDone(recvd_frame);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // send to postprocessor
+    pme->m_postprocessor.processData(frame);
+
+/* START of test register face image for face authentication */
+#ifdef QCOM_TEST_FACE_REGISTER_FACE
+    static uint8_t bRunFaceReg = 1;
+
+    if (bRunFaceReg > 0) {
+        // find snapshot frame
+        QCameraStream *main_stream = NULL;
+        mm_camera_buf_def_t *main_frame = NULL;
+        for (int i = 0; i < recvd_frame->num_bufs; i++) {
+            QCameraStream *pStream =
+                pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+            if (pStream != NULL) {
+                if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                    pStream->isTypeOf(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT)) {
+                    main_stream = pStream;
+                    main_frame = recvd_frame->bufs[i];
+                    break;
+                }
+            }
+        }
+        if (main_stream != NULL && main_frame != NULL) {
+            int32_t faceId = -1;
+            cam_pp_offline_src_config_t config;
+            memset(&config, 0, sizeof(cam_pp_offline_src_config_t));
+            config.num_of_bufs = 1;
+            main_stream->getFormat(config.input_fmt);
+            main_stream->getFrameDimension(config.input_dim);
+            main_stream->getFrameOffset(config.input_buf_planes.plane_info);
+            ALOGD("DEBUG: registerFaceImage E");
+            int32_t rc = pme->registerFaceImage(main_frame->buffer, &config, faceId);
+            ALOGD("DEBUG: registerFaceImage X, ret=%d, faceId=%d", rc, faceId);
+            bRunFaceReg = 0;
+        }
+    }
+
+#endif
+/* END of test register face image for face authentication */
+
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : postproc_channel_cb_routine
+ *
+ * DESCRIPTION: helper function to handle postprocess superbuf callback directly from
+ *              mm-camera-interface
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received super buffer
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : recvd_frame will be released after this call by caller, so if
+ *             async operation needed for recvd_frame, it's our responsibility
+ *             to save a copy for this variable to be used later.
+*==========================================================================*/
+void QCamera2HardwareInterface::postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                                            void *userdata)
+{
+    ALOGD("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != recvd_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(recvd_frame);
+        return;
+    }
+
+    // save a copy for the superbuf
+    mm_camera_super_buf_t* frame =
+               (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+        return;
+    }
+    *frame = *recvd_frame;
+
+    // send to postprocessor
+    pme->m_postprocessor.processPPData(frame);
+
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              normal case with display.
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. The new
+ *             preview frame will be sent to display, and an older frame
+ *             will be dequeued from display and needs to be returned back
+ *             to kernel for future use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream * stream,
+                                                          void *userdata)
+{
+    ALOGD("[KPI Perf] %s : BEGIN", __func__);
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+    if (pme == NULL) {
+        ALOGE("%s: Invalid hardware object", __func__);
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        ALOGE("%s: Invalid memory object", __func__);
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame()) {
+        ALOGE("%s: preview is not running, no need to process", __func__);
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    int idx = frame->buf_idx;
+    pme->dumpFrameToFile(frame->buffer, frame->frame_len,
+                         frame->frame_idx, QCAMERA_DUMP_FRM_PREVIEW);
+
+    // Display the buffer.
+    int dequeuedIdx = memory->displayBuffer(idx);
+    if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
+        ALOGD("%s: Invalid dequeued buffer index %d from display",
+              __func__, dequeuedIdx);
+    } else {
+        // Return dequeued buffer back to driver
+        err = stream->bufDone(dequeuedIdx);
+        if ( err < 0) {
+            ALOGE("stream bufDone failed %d", err);
+        }
+    }
+
+    // Handle preview data callback
+    if (pme->mDataCb != NULL && pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0) {
+        camera_memory_t *previewMem = NULL;
+        camera_memory_t *data = NULL;
+        int previewBufSize;
+        cam_dimension_t preview_dim;
+        cam_format_t previewFmt;
+        stream->getFrameDimension(preview_dim);
+        stream->getFormat(previewFmt);
+
+        /* The preview buffer size in the callback should be (width*height*bytes_per_pixel)
+         * As all preview formats we support, use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+         * We need to put a check if some other formats are supported in future. */
+        if ((previewFmt == CAM_FORMAT_YUV_420_NV21) ||
+            (previewFmt == CAM_FORMAT_YUV_420_NV12) ||
+            (previewFmt == CAM_FORMAT_YUV_420_YV12)) {
+            if(previewFmt == CAM_FORMAT_YUV_420_YV12) {
+                previewBufSize = ((preview_dim.width+15)/16) * 16 * preview_dim.height +
+                                 ((preview_dim.width/2+15)/16) * 16* preview_dim.height;
+                } else {
+                    previewBufSize = preview_dim.width * preview_dim.height * 3/2;
+                }
+            if(previewBufSize != memory->getSize(idx)) {
+                previewMem = pme->mGetMemory(memory->getFd(idx),
+                           previewBufSize, 1, pme->mCallbackCookie);
+                if (!previewMem || !previewMem->data) {
+                    ALOGE("%s: mGetMemory failed.\n", __func__);
+                } else {
+                    data = previewMem;
+                }
+            } else
+                data = memory->getMemory(idx, false);
+        } else {
+            data = memory->getMemory(idx, false);
+            ALOGE("%s: Invalid preview format, buffer size in preview callback may be wrong.", __func__);
+        }
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+        cbArg.data = data;
+        if ( previewMem ) {
+            cbArg.user_data = previewMem;
+            cbArg.release_cb = releaseCameraMemory;
+        }
+        cbArg.cookie = pme;
+        pme->m_cbNotifier.notifyCallback(cbArg);
+    }
+
+    free(super_frame);
+    ALOGD("[KPI Perf] %s : END", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : nodisplay_preview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle preview frame from preview stream in
+ *              no-display case
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::nodisplay_preview_stream_cb_routine(
+                                                          mm_camera_super_buf_t *super_frame,
+                                                          QCameraStream *stream,
+                                                          void * userdata)
+{
+    ALOGD("[KPI Perf] %s E",__func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    if (!pme->needProcessPreviewFrame()) {
+        ALOGD("%s: preview is not running, no need to process", __func__);
+        stream->bufDone(frame->buf_idx);
+        free(super_frame);
+        return;
+    }
+
+    if (pme->needDebugFps()) {
+        pme->debugShowPreviewFPS();
+    }
+
+    QCameraMemory *previewMemObj = (QCameraMemory *)frame->mem_info;
+    camera_memory_t *preview_mem = NULL;
+    if (previewMemObj != NULL) {
+        preview_mem = previewMemObj->getMemory(frame->buf_idx, false);
+    }
+    if (NULL != previewMemObj && NULL != preview_mem) {
+        pme->dumpFrameToFile(frame->buffer, frame->frame_len,
+                             frame->frame_idx, QCAMERA_DUMP_FRM_PREVIEW);
+
+        if (pme->needProcessPreviewFrame() &&
+            pme->mDataCb != NULL &&
+            pme->msgTypeEnabledWithLock(CAMERA_MSG_PREVIEW_FRAME) > 0 ) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_PREVIEW_FRAME;
+            cbArg.data = preview_mem;
+            int user_data = frame->buf_idx;
+            cbArg.user_data = ( void * ) user_data;
+            cbArg.cookie = stream;
+            cbArg.release_cb = returnStreamBuffer;
+            pme->m_cbNotifier.notifyCallback(cbArg);
+        } else {
+            stream->bufDone(frame->buf_idx);
+        }
+    }
+    free(super_frame);
+    ALOGD("[KPI Perf] %s X",__func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : postview_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle post frame from postview stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done.
+ *==========================================================================*/
+void QCamera2HardwareInterface::postview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                           QCameraStream *stream,
+                                                           void *userdata)
+{
+    int err = NO_ERROR;
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    QCameraGrallocMemory *memory = (QCameraGrallocMemory *)super_frame->bufs[0]->mem_info;
+
+    if (pme == NULL) {
+        ALOGE("%s: Invalid hardware object", __func__);
+        free(super_frame);
+        return;
+    }
+    if (memory == NULL) {
+        ALOGE("%s: Invalid memory object", __func__);
+        free(super_frame);
+        return;
+    }
+
+    ALOGD("[KPI Perf] %s : BEGIN", __func__);
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    if (NULL == frame) {
+        ALOGE("%s: preview frame is NLUL", __func__);
+        free(super_frame);
+        return;
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)frame->mem_info;
+    if (NULL != memObj) {
+        pme->dumpFrameToFile(frame->buffer, frame->frame_len,
+                             frame->frame_idx, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    // Display the buffer.
+    int dequeuedIdx = memory->displayBuffer(frame->buf_idx);
+    if (dequeuedIdx < 0 || dequeuedIdx >= memory->getCnt()) {
+        ALOGD("%s: Invalid dequeued buffer index %d",
+              __func__, dequeuedIdx);
+        free(super_frame);
+        return;
+    }
+
+    // Return dequeued buffer back to driver
+    err = stream->bufDone(dequeuedIdx);
+    if ( err < 0) {
+        ALOGE("stream bufDone failed %d", err);
+    }
+
+    free(super_frame);
+    ALOGD("[KPI Perf] %s : END", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : video_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle video frame from video stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. video
+ *             frame will be sent to video encoder. Once video encoder is
+ *             done with the video frame, it will call another API
+ *             (release_recording_frame) to return the frame back
+ *==========================================================================*/
+void QCamera2HardwareInterface::video_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                        QCameraStream */*stream*/,
+                                                        void *userdata)
+{
+    ALOGD("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+
+    if (pme->needDebugFps()) {
+        pme->debugShowVideoFPS();
+    }
+
+    ALOGE("%s: Stream(%d), Timestamp: %ld %ld",
+          __func__,
+          frame->stream_id,
+          frame->ts.tv_sec,
+          frame->ts.tv_nsec);
+
+    nsecs_t timeStamp = nsecs_t(frame->ts.tv_sec) * 1000000000LL + frame->ts.tv_nsec;
+    ALOGE("Send Video frame to services/encoder TimeStamp : %lld", timeStamp);
+    QCameraMemory *videoMemObj = (QCameraMemory *)frame->mem_info;
+    camera_memory_t *video_mem = NULL;
+    if (NULL != videoMemObj) {
+        video_mem = videoMemObj->getMemory(frame->buf_idx, (pme->mStoreMetaDataInFrame > 0)? true : false);
+    }
+    if (NULL != videoMemObj && NULL != video_mem) {
+        pme->dumpFrameToFile(frame->buffer, frame->frame_len,
+                             frame->frame_idx, QCAMERA_DUMP_FRM_VIDEO);
+        if ((pme->mDataCbTimestamp != NULL) &&
+            pme->msgTypeEnabledWithLock(CAMERA_MSG_VIDEO_FRAME) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_TIMESTAMP_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_VIDEO_FRAME;
+            cbArg.data = video_mem;
+            cbArg.timestamp = timeStamp;
+            pme->m_cbNotifier.notifyCallback(cbArg);
+        }
+    }
+    free(super_frame);
+    ALOGD("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : snapshot_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle snapshot frame from snapshot stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. For
+ *             snapshot, it need to send to postprocessor for jpeg
+ *             encoding, therefore the ownership of super_frame will be
+ *             hand to postprocessor.
+ *==========================================================================*/
+void QCamera2HardwareInterface::snapshot_stream_cb_routine(mm_camera_super_buf_t *super_frame,
+                                                           QCameraStream * /*stream*/,
+                                                           void *userdata)
+{
+    ALOGD("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processData(super_frame);
+
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : raw_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle raw dump frame from raw stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. For raw
+ *             frame, there is no need to send to postprocessor for jpeg
+ *             encoding. this function will play shutter and send the data
+ *             callback to upper layer. Raw frame buffer will be returned
+ *             back to kernel, and frame will be free after use.
+ *==========================================================================*/
+void QCamera2HardwareInterface::raw_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                      QCameraStream * /*stream*/,
+                                                      void * userdata)
+{
+    ALOGD("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processRawData(super_frame);
+    ALOGD("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : metadata_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle metadata frame from metadata stream
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. Metadata
+ *             could have valid entries for face detection result or
+ *             histogram statistics information.
+ *==========================================================================*/
+void QCamera2HardwareInterface::metadata_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                           QCameraStream * stream,
+                                                           void * userdata)
+{
+    ALOGV("[KPI Perf] %s : BEGIN", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    mm_camera_buf_def_t *frame = super_frame->bufs[0];
+    cam_metadata_info_t *pMetaData = (cam_metadata_info_t *)frame->buffer;
+
+    if (pMetaData->is_faces_valid) {
+        if (pMetaData->faces_data.num_faces_detected > MAX_ROI) {
+            ALOGE("%s: Invalid number of faces %d",
+                __func__, pMetaData->faces_data.num_faces_detected);
+        } else {
+            // process face detection result
+            ALOGD("[KPI Perf] %s: Number of faces detected %d",__func__,pMetaData->faces_data.num_faces_detected);
+            pme->processFaceDetectionResult(&pMetaData->faces_data);
+        }
+    }
+
+    if (pMetaData->is_stats_valid) {
+        // process histogram statistics info
+        pme->processHistogramStats(pMetaData->stats_data);
+    }
+
+    if (pMetaData->is_focus_valid) {
+        // process focus info
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_FOCUS_UPDATE;
+            payload->focus_data = pMetaData->focus_data;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEVt failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    if (pMetaData->is_crop_valid) {
+        if (pMetaData->crop_data.num_of_streams > MAX_NUM_STREAMS) {
+            ALOGE("%s: Invalid num_of_streams %d in crop_data", __func__,
+                pMetaData->crop_data.num_of_streams);
+        } else {
+            pme->processZoomEvent(pMetaData->crop_data);
+        }
+    }
+
+    if (pMetaData->is_prep_snapshot_done_valid) {
+        qcamera_sm_internal_evt_payload_t *payload =
+            (qcamera_sm_internal_evt_payload_t *)malloc(sizeof(qcamera_sm_internal_evt_payload_t));
+        if (NULL != payload) {
+            memset(payload, 0, sizeof(qcamera_sm_internal_evt_payload_t));
+            payload->evt_type = QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE;
+            payload->prep_snapshot_state = pMetaData->prep_snapshot_done_state;
+            int32_t rc = pme->processEvt(QCAMERA_SM_EVT_EVT_INTERNAL, payload);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: processEVt failed", __func__);
+                free(payload);
+                payload = NULL;
+
+            }
+        } else {
+            ALOGE("%s: No memory for qcamera_sm_internal_evt_payload_t", __func__);
+        }
+    }
+
+    stream->bufDone(frame->buf_idx);
+    free(super_frame);
+
+    ALOGV("[KPI Perf] %s : END", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : reprocess_stream_cb_routine
+ *
+ * DESCRIPTION: helper function to handle reprocess frame from reprocess stream
+                (after reprocess, e.g., ZSL snapshot frame after WNR if
+ *              WNR is enabled)
+ *
+ * PARAMETERS :
+ *   @super_frame : received super buffer
+ *   @stream      : stream object
+ *   @userdata    : user data ptr
+ *
+ * RETURN    : None
+ *
+ * NOTE      : caller passes the ownership of super_frame, it's our
+ *             responsibility to free super_frame once it's done. In this
+ *             case, reprocessed frame need to be passed to postprocessor
+ *             for jpeg encoding.
+ *==========================================================================*/
+void QCamera2HardwareInterface::reprocess_stream_cb_routine(mm_camera_super_buf_t * super_frame,
+                                                            QCameraStream * /*stream*/,
+                                                            void * userdata)
+{
+    ALOGD("[KPI Perf] %s: E", __func__);
+    QCamera2HardwareInterface *pme = (QCamera2HardwareInterface *)userdata;
+    if (pme == NULL ||
+        pme->mCameraHandle == NULL ||
+        pme->mCameraHandle->camera_handle != super_frame->camera_handle){
+        ALOGE("%s: camera obj not valid", __func__);
+        // simply free super frame
+        free(super_frame);
+        return;
+    }
+
+    pme->m_postprocessor.processPPData(super_frame);
+
+    ALOGD("[KPI Perf] %s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpFrameToFile
+ *
+ * DESCRIPTION: helper function to dump frame into file for debug purpose.
+ *
+ * PARAMETERS :
+ *    @data : data ptr
+ *    @size : length of data buffer
+ *    @index : identifier for data
+ *    @dump_type : type of the frame to be dumped. Only such
+ *                 dump type is enabled, the frame will be
+ *                 dumped into a file.
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::dumpFrameToFile(const void *data,
+                                                uint32_t size,
+                                                int index,
+                                                int dump_type)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.dumpimg", value, "0");
+    int32_t enabled = atoi(value);
+    int frm_num = 0;
+    uint32_t skip_mode = 0;
+
+    char buf[32];
+    cam_dimension_t dim;
+    memset(buf, 0, sizeof(buf));
+    memset(&dim, 0, sizeof(dim));
+
+    if(enabled & QCAMERA_DUMP_FRM_MASK_ALL) {
+        if((enabled & dump_type) && data) {
+            frm_num = ((enabled & 0xffff0000) >> 16);
+            if(frm_num == 0) {
+                frm_num = 10; //default 10 frames
+            }
+            if(frm_num > 256) {
+                frm_num = 256; //256 buffers cycle around
+            }
+            skip_mode = ((enabled & 0x0000ff00) >> 8);
+            if(skip_mode == 0) {
+                skip_mode = 1; //no-skip
+            }
+
+            if( mDumpSkipCnt % skip_mode == 0) {
+                if((frm_num == 256) && (mDumpFrmCnt >= frm_num)) {
+                    // reset frame count if cycling
+                    mDumpFrmCnt = 0;
+                }
+                if (mDumpFrmCnt >= 0 && mDumpFrmCnt <= frm_num) {
+                    switch (dump_type) {
+                    case QCAMERA_DUMP_FRM_PREVIEW:
+                        {
+                            mParameters.getStreamDimension(CAM_STREAM_TYPE_PREVIEW, dim);
+                            snprintf(buf, sizeof(buf), "/data/%dp_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_THUMBNAIL:
+                        {
+                        mParameters.getStreamDimension(CAM_STREAM_TYPE_POSTVIEW, dim);
+                        snprintf(buf, sizeof(buf), "/data/%dt_%dx%d_%d.yuv",
+                                 mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_SNAPSHOT:
+                        {
+                            if (mParameters.isZSLMode())
+                                mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+                            else
+                                mParameters.getStreamDimension(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT, dim);
+                            snprintf(buf, sizeof(buf), "/data/%ds_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                    break;
+                    case QCAMERA_DUMP_FRM_VIDEO:
+                        {
+                            mParameters.getStreamDimension(CAM_STREAM_TYPE_VIDEO, dim);
+                            snprintf(buf, sizeof(buf), "/data/%dv_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_RAW:
+                        {
+                            mParameters.getStreamDimension(CAM_STREAM_TYPE_RAW, dim);
+                            snprintf(buf, sizeof(buf), "/data/%dr_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                        break;
+                    case QCAMERA_DUMP_FRM_JPEG:
+                        {
+                            if (mParameters.isZSLMode())
+                                mParameters.getStreamDimension(CAM_STREAM_TYPE_SNAPSHOT, dim);
+                            else
+                                mParameters.getStreamDimension(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT, dim);
+                            snprintf(buf, sizeof(buf), "/data/%dj_%dx%d_%d.yuv",
+                                     mDumpFrmCnt, dim.width, dim.height, index);
+                        }
+                        break;
+                    default:
+                        ALOGE("%s: Not supported for dumping stream type %d",
+                              __func__, dump_type);
+                        return;
+                    }
+
+                    ALOGD("dump %s size =%d, data = %p", buf, size, data);
+                    int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+                    if (file_fd > 0) {
+                        int written_len = write(file_fd, data, size);
+                        ALOGD("%s: written number of bytes %d\n", __func__, written_len);
+                        close(file_fd);
+                    } else {
+                        ALOGE("%s: fail t open file for image dumping", __func__);
+                    }
+                    mDumpFrmCnt++;
+                }
+            }
+            mDumpSkipCnt++;
+        }
+    } else {
+        mDumpFrmCnt = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowVideoFPS
+ *
+ * DESCRIPTION: helper function to log video frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowVideoFPS()
+{
+    static int n_vFrameCount = 0;
+    static int n_vLastFrameCount = 0;
+    static nsecs_t n_vLastFpsTime = 0;
+    static float n_vFps = 0;
+    n_vFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - n_vLastFpsTime;
+    if (diff > ms2ns(250)) {
+        n_vFps =  ((n_vFrameCount - n_vLastFrameCount) * float(s2ns(1))) / diff;
+        ALOGE("Video Frames Per Second: %.4f", n_vFps);
+        n_vLastFpsTime = now;
+        n_vLastFrameCount = n_vFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : debugShowPreviewFPS
+ *
+ * DESCRIPTION: helper function to log preview frame FPS for debug purpose.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera2HardwareInterface::debugShowPreviewFPS()
+{
+    static int n_pFrameCount = 0;
+    static int n_pLastFrameCount = 0;
+    static nsecs_t n_pLastFpsTime = 0;
+    static float n_pFps = 0;
+    n_pFrameCount++;
+    nsecs_t now = systemTime();
+    nsecs_t diff = now - n_pLastFpsTime;
+    if (diff > ms2ns(250)) {
+        n_pFps =  ((n_pFrameCount - n_pLastFrameCount) * float(s2ns(1))) / diff;
+        ALOGE("Preview Frames Per Second: %.4f", n_pFps);
+        n_pLastFpsTime = now;
+        n_pLastFrameCount = n_pFrameCount;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCbNotifier
+ *
+ * DESCRIPTION: Destructor for exiting the callback context.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCbNotifier::~QCameraCbNotifier()
+{
+    mProcTh.exit();
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifications
+ *
+ * DESCRIPTION: callback for releasing data stored in the callback queue.
+ *
+ * PARAMETERS :
+ *   @data      : data to be released
+ *   @user_data : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::releaseNotifications(void *data, void *user_data)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+
+    if ( ( NULL != arg ) && ( NULL != user_data ) ) {
+        if ( arg->release_cb ) {
+            arg->release_cb(arg->user_data, arg->cookie);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : matchSnapshotNotifications
+ *
+ * DESCRIPTION: matches snapshot data callbacks
+ *
+ * PARAMETERS :
+ *   @data      : data to match
+ *   @user_data : context data
+ *
+ * RETURN     : bool match
+ *              true - match found
+ *              false- match not found
+ *==========================================================================*/
+bool QCameraCbNotifier::matchSnapshotNotifications(void *data,
+                                                   void */*user_data*/)
+{
+    qcamera_callback_argm_t *arg = ( qcamera_callback_argm_t * ) data;
+    if ( NULL != arg ) {
+        if ( QCAMERA_DATA_SNAPSHOT_CALLBACK == arg->cb_type ) {
+            return true;
+        }
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : cbNotifyRoutine
+ *
+ * DESCRIPTION: callback thread which interfaces with the upper layers
+ *              given input commands.
+ *
+ * PARAMETERS :
+ *   @data    : context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void * QCameraCbNotifier::cbNotifyRoutine(void * data)
+{
+    int running = 1;
+    int ret;
+    QCameraCbNotifier *pme = (QCameraCbNotifier *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    uint8_t isSnapshotActive = FALSE;
+    uint32_t numOfSnapshotExpected = 0;
+    uint32_t numOfSnapshotRcvd = 0;
+
+    ALOGV("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGV("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        ALOGV("%s: get cmd %d", __func__, cmd);
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            {
+                isSnapshotActive = TRUE;
+                numOfSnapshotExpected = pme->mParent->numOfSnapshotsExpected();
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                pme->mDataQ.flushNodes(matchSnapshotNotifications);
+                isSnapshotActive = FALSE;
+
+                numOfSnapshotExpected = 0;
+                numOfSnapshotRcvd = 0;
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                qcamera_callback_argm_t *cb =
+                    (qcamera_callback_argm_t *)pme->mDataQ.dequeue();
+                if (NULL != cb) {
+                    ALOGV("%s: cb type %d received",
+                          __func__,
+                          cb->cb_type);
+
+                    if (pme->mParent->msgTypeEnabledWithLock(cb->msg_type)) {
+                        switch (cb->cb_type) {
+                        case QCAMERA_NOTIFY_CALLBACK:
+                            {
+                                if (cb->msg_type == CAMERA_MSG_FOCUS) {
+                                    ALOGD("[KPI Perf] %s : sending focus evt to app", __func__);
+                                }
+                                if (pme->mNotifyCb) {
+                                    pme->mNotifyCb(cb->msg_type,
+                                                  cb->ext1,
+                                                  cb->ext2,
+                                                  pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s : notify callback not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_CALLBACK:
+                            {
+                                if (pme->mDataCb) {
+                                    pme->mDataCb(cb->msg_type,
+                                                 cb->data,
+                                                 cb->index,
+                                                 cb->metadata,
+                                                 pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s : data callback not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_TIMESTAMP_CALLBACK:
+                            {
+                                if(pme->mDataCbTimestamp) {
+                                    pme->mDataCbTimestamp(cb->timestamp,
+                                                          cb->msg_type,
+                                                          cb->data,
+                                                          cb->index,
+                                                          pme->mCallbackCookie);
+                                } else {
+                                    ALOGE("%s:data cb with tmp not set!",
+                                          __func__);
+                                }
+                            }
+                            break;
+                        case QCAMERA_DATA_SNAPSHOT_CALLBACK:
+                            {
+                                if (TRUE == isSnapshotActive && pme->mDataCb ) {
+                                    numOfSnapshotRcvd++;
+                                    if (numOfSnapshotExpected > 0 &&
+                                        numOfSnapshotExpected == numOfSnapshotRcvd) {
+                                        // notify HWI that snapshot is done
+                                        pme->mParent->processSyncEvt(QCAMERA_SM_EVT_SNAPSHOT_DONE,
+                                                                     NULL);
+                                    }
+                                    pme->mDataCb(cb->msg_type,
+                                                 cb->data,
+                                                 cb->index,
+                                                 cb->metadata,
+                                                 pme->mCallbackCookie);
+                                }
+                            }
+                            break;
+                        default:
+                            {
+                                ALOGE("%s : invalid cb type %d",
+                                      __func__,
+                                      cb->cb_type);
+                            }
+                            break;
+                        };
+                    } else {
+                        ALOGE("%s : cb message type %d not enabled!",
+                              __func__,
+                              cb->msg_type);
+                    }
+                    if ( cb->release_cb ) {
+                        cb->release_cb(cb->user_data, cb->cookie);
+                    }
+                    delete cb;
+                } else {
+                    ALOGE("%s: invalid cb type passed", __func__);
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            {
+                pme->mDataQ.flush();
+                running = 0;
+            }
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    ALOGV("%s: X", __func__);
+
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : notifyCallback
+ *
+ * DESCRIPTION: Enqueus pending callback notifications for the upper layers.
+ *
+ * PARAMETERS :
+ *   @cbArgs  : callback arguments
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::notifyCallback(qcamera_callback_argm_t &cbArgs)
+{
+    qcamera_callback_argm_t *cbArg = new qcamera_callback_argm_t();
+    if (NULL == cbArg) {
+        ALOGE("%s: no mem for qcamera_callback_argm_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(cbArg, 0, sizeof(qcamera_callback_argm_t));
+    *cbArg = cbArgs;
+
+    if (mDataQ.enqueue((void *)cbArg)) {
+        mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        ALOGE("%s: Error adding cb data into queue", __func__);
+        delete cbArg;
+        return UNKNOWN_ERROR;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: Initializes the callback functions, which would be used for
+ *              communication with the upper layers and launches the callback
+ *              context in which the callbacks will occur.
+ *
+ * PARAMETERS :
+ *   @notifyCb          : notification callback
+ *   @dataCb            : data callback
+ *   @dataCbTimestamp   : data with timestamp callback
+ *   @callbackCookie    : callback context data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::setCallbacks(camera_notify_callback notifyCb,
+                                     camera_data_callback dataCb,
+                                     camera_data_timestamp_callback dataCbTimestamp,
+                                     void *callbackCookie)
+{
+    if ( ( NULL == mNotifyCb ) &&
+         ( NULL == mDataCb ) &&
+         ( NULL == mDataCbTimestamp ) &&
+         ( NULL == mCallbackCookie ) ) {
+        mNotifyCb = notifyCb;
+        mDataCb = dataCb;
+        mDataCbTimestamp = dataCbTimestamp;
+        mCallbackCookie = callbackCookie;
+        mProcTh.launch(cbNotifyRoutine, this);
+    } else {
+        ALOGE("%s : Camera callback notifier already initialized!",
+              __func__);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : startSnapshots
+ *
+ * DESCRIPTION: Enables snapshot mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCbNotifier::startSnapshots()
+{
+    return mProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, TRUE);
+}
+
+/*===========================================================================
+ * FUNCTION   : stopSnapshots
+ *
+ * DESCRIPTION: Disables snapshot processing mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraCbNotifier::stopSnapshots()
+{
+    mProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, FALSE, TRUE);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCamera2Hal.cpp b/camera/QCamera2/HAL/QCamera2Hal.cpp
new file mode 100644
index 0000000..cfae8e5
--- /dev/null
+++ b/camera/QCamera2/HAL/QCamera2Hal.cpp
@@ -0,0 +1,52 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "QCamera2Factory.h"
+
+static hw_module_t camera_common = {
+    tag: HARDWARE_MODULE_TAG,
+    module_api_version: CAMERA_MODULE_API_VERSION_1_0,
+    hal_api_version: HARDWARE_HAL_API_VERSION,
+    id: CAMERA_HARDWARE_MODULE_ID,
+    name: "QCamera Module",
+    author: "Qualcomm Innovation Center Inc",
+    methods: &qcamera::QCamera2Factory::mModuleMethods,
+    dso: NULL,
+    reserved:  {0},
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+    common: camera_common,
+    get_number_of_cameras: qcamera::QCamera2Factory::get_number_of_cameras,
+    get_camera_info: qcamera::QCamera2Factory::get_camera_info,
+    set_callbacks: NULL,
+    get_vendor_tag_ops: NULL,
+    open_legacy: NULL,
+    reserved: {0}
+};
diff --git a/camera/QCamera2/HAL/QCameraAllocator.h b/camera/QCamera2/HAL/QCameraAllocator.h
new file mode 100644
index 0000000..ae08c7e
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraAllocator.h
@@ -0,0 +1,52 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_ALLOCATOR__
+#define __QCAMERA_ALLOCATOR__
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraMemory;
+class QCameraHeapMemory;
+
+class QCameraAllocator {
+public:
+    virtual QCameraMemory *allocateStreamBuf(cam_stream_type_t stream_type,
+                                             int size,
+                                             uint8_t &bufferCnt) = 0;
+    virtual QCameraHeapMemory *allocateStreamInfoBuf(cam_stream_type_t stream_type) = 0;
+    virtual ~QCameraAllocator() {}
+};
+
+}; /* namespace qcamera */
+#endif /* __QCAMERA_ALLOCATOR__ */
diff --git a/camera/QCamera2/HAL/QCameraChannel.cpp b/camera/QCamera2/HAL/QCameraChannel.cpp
new file mode 100644
index 0000000..944f381
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraChannel.cpp
@@ -0,0 +1,834 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraChannel"
+
+#include <utils/Errors.h>
+#include "QCameraParameters.h"
+#include "QCamera2HWI.h"
+#include "QCameraChannel.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: constrcutor of QCameraChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel(uint32_t cam_handle,
+                               mm_camera_ops_t *cam_ops)
+{
+    m_camHandle = cam_handle;
+    m_camOps = cam_ops;
+    m_bIsActive = false;
+
+    m_handle = 0;
+    m_numStreams = 0;
+    memset(mStreams, 0, sizeof(mStreams));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraChannel
+ *
+ * DESCRIPTION: default constrcutor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::QCameraChannel()
+{
+    m_camHandle = 0;
+    m_camOps = NULL;
+    m_bIsActive = false;
+
+    m_handle = 0;
+    m_numStreams = 0;
+    memset(mStreams, 0, sizeof(mStreams));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraChannel
+ *
+ * DESCRIPTION: destructor of QCameraChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraChannel::~QCameraChannel()
+{
+    if (m_bIsActive) {
+        stop();
+    }
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            delete mStreams[i];
+            mStreams[i] = 0;
+        }
+    }
+    m_numStreams = 0;
+    m_camOps->delete_channel(m_camHandle, m_handle);
+    m_handle = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of channel
+ *
+ * PARAMETERS :
+ *   @attr    : channel bundle attribute setting
+ *   @dataCB  : data notify callback
+ *   @userData: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::init(mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t dataCB,
+                             void *userData)
+{
+    m_handle = m_camOps->add_channel(m_camHandle,
+                                      attr,
+                                      dataCB,
+                                      userData);
+    if (m_handle == 0) {
+        ALOGE("%s: Add channel failed", __func__);
+        return UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @streamInfoBuf  : ptr to buf that constains stream info
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @paddingInfo    : padding information
+ *   @stream_cb      : stream data notify callback
+ *   @userdata       : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::addStream(QCameraAllocator &allocator,
+                                  QCameraHeapMemory *streamInfoBuf,
+                                  uint8_t minStreamBufNum,
+                                  cam_padding_info_t *paddingInfo,
+                                  stream_cb_routine stream_cb,
+                                  void *userdata)
+{
+    int32_t rc = NO_ERROR;
+    if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
+        ALOGE("%s: stream number (%d) exceeds max limit (%d)",
+              __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
+        return BAD_VALUE;
+    }
+    QCameraStream *pStream = new QCameraStream(allocator,
+                                               m_camHandle,
+                                               m_handle,
+                                               m_camOps,
+                                               paddingInfo);
+    if (pStream == NULL) {
+        ALOGE("%s: No mem for Stream", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamInfoBuf, minStreamBufNum, stream_cb, userdata);
+    if (rc == 0) {
+        mStreams[m_numStreams] = pStream;
+        m_numStreams++;
+    } else {
+        delete pStream;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::start()
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_numStreams > 1) {
+        // there is more than one stream in the channel
+        // we need to notify mctl that all streams in this channel need to be bundled
+        cam_bundle_config_t bundleInfo;
+        memset(&bundleInfo, 0, sizeof(bundleInfo));
+        rc = m_camOps->get_bundle_info(m_camHandle, m_handle, &bundleInfo);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: get_bundle_info failed", __func__);
+            return rc;
+        }
+        if (bundleInfo.num_of_streams > 1) {
+            for (int i = 0; i < bundleInfo.num_of_streams; i++) {
+                QCameraStream *pStream = getStreamByServerID(bundleInfo.stream_ids[i]);
+                if (pStream != NULL) {
+                    if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                        // Skip metadata for reprocess now because PP module cannot handle meta data
+                        // May need furthur discussion if Imaginglib need meta data
+                        continue;
+                    }
+
+                    cam_stream_parm_buffer_t param;
+                    memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+                    param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO;
+                    param.bundleInfo = bundleInfo;
+                    rc = pStream->setParameter(param);
+                    if (rc != NO_ERROR) {
+                        ALOGE("%s: stream setParameter for set bundle failed", __func__);
+                        return rc;
+                    }
+                }
+            }
+        }
+    }
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->start();
+        }
+    }
+    rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+    if (rc != NO_ERROR) {
+        for (int i = 0; i < m_numStreams; i++) {
+            if (mStreams[i] != NULL) {
+                mStreams[i]->stop();
+            }
+        }
+    } else {
+        m_bIsActive = true;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::stop()
+{
+    int32_t rc = NO_ERROR;
+    rc = m_camOps->stop_channel(m_camHandle, m_handle);
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->stop();
+        }
+    }
+
+    m_bIsActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+    for (int i = 0; i < recvd_frame->num_bufs; i++) {
+         if (recvd_frame->bufs[i] != NULL) {
+             for (int j = 0; j < m_numStreams; j++) {
+                 if (mStreams[j] != NULL &&
+                     mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+                     rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                     break; // break loop j
+                 }
+             }
+         }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : ptr to preview window ops table, needed to set preview
+ *                    crop information
+ *   @crop_info     : crop info as a result of zoom operation
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraChannel::processZoomDone(preview_stream_ops_t *previewWindow,
+                                        cam_crop_data_t &crop_info)
+{
+    int32_t rc = NO_ERROR;
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            rc = mStreams[i]->processZoomDone(previewWindow, crop_info);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByHandle(uint32_t streamHandle)
+{
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByServerID
+ *
+ * DESCRIPTION: return stream object by stream server ID from daemon
+ *
+ * PARAMETERS :
+ *   @serverID : stream server ID
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByServerID(uint32_t serverID)
+{
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyServerID() == serverID) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index of streams in the channel
+ *
+ * PARAMETERS :
+ *   @index : index of stream in the channel
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCameraStream *QCameraChannel::getStreamByIndex(uint8_t index)
+{
+    if (index < m_numStreams) {
+        return mStreams[index];
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: constructor of QCameraPicChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel(uint32_t cam_handle,
+                                     mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraPicChannel
+ *
+ * DESCRIPTION: default constructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::QCameraPicChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPicChannel
+ *
+ * DESCRIPTION: destructor of QCameraPicChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraPicChannel::~QCameraPicChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: send request for queued snapshot frames
+ *
+ * PARAMETERS :
+ *   @num_of_snapshot : number of snapshot frames requested
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::takePicture(uint8_t num_of_snapshot)
+{
+    int32_t rc = m_camOps->request_super_buf(m_camHandle,
+                                             m_handle,
+                                             num_of_snapshot);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cancelPicture
+ *
+ * DESCRIPTION: cancel request for queued snapshot frames
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPicChannel::cancelPicture()
+{
+    int32_t rc = m_camOps->cancel_super_buf_request(m_camHandle, m_handle);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: constructor of QCameraVideoChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel(uint32_t cam_handle,
+                                         mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoChannel
+ *
+ * DESCRIPTION: default constructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoChannel
+ *
+ * DESCRIPTION: destructor of QCameraVideoChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoChannel::~QCameraVideoChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrame
+ *
+ * DESCRIPTION: return video frame from app
+ *
+ * PARAMETERS :
+ *   @opaque     : ptr to video frame to be returned
+ *   @isMetaData : if frame is a metadata or real frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraVideoChannel::releaseFrame(const void * opaque, bool isMetaData)
+{
+    QCameraStream *pVideoStream = NULL;
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL && mStreams[i]->isTypeOf(CAM_STREAM_TYPE_VIDEO)) {
+            pVideoStream = mStreams[i];
+            break;
+        }
+    }
+
+    if (NULL == pVideoStream) {
+        ALOGE("%s: No video stream in the channel", __func__);
+        return BAD_VALUE;
+    }
+
+    int32_t rc = pVideoStream->bufDone(opaque, isMetaData);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel(uint32_t cam_handle,
+                                                 mm_camera_ops_t *cam_ops) :
+    QCameraChannel(cam_handle, cam_ops),
+    m_pSrcChannel(NULL)
+{
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraReprocessChannel
+ *
+ * DESCRIPTION: default constructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::QCameraReprocessChannel() :
+    m_pSrcChannel(NULL)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCameraReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraReprocessChannel::~QCameraReprocessChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @config         : pp feature configuration
+ *   @pSrcChannel    : ptr to input source channel that needs reprocess
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @paddingInfo    : padding information
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::addReprocStreamsFromSource(QCameraAllocator& allocator,
+                                                            cam_pp_feature_config_t &config,
+                                                            QCameraChannel *pSrcChannel,
+                                                            uint8_t minStreamBufNum,
+                                                            cam_padding_info_t *paddingInfo)
+{
+    int32_t rc = 0;
+    QCameraStream *pStream = NULL;
+    QCameraHeapMemory *pStreamInfoBuf = NULL;
+    cam_stream_info_t *streamInfo = NULL;
+
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+
+    for (int i = 0; i < pSrcChannel->getNumOfStreams(); i++) {
+        pStream = pSrcChannel->getStreamByIndex(i);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                // Skip metadata for reprocess now because PP module cannot handle meta data
+                // May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            pStreamInfoBuf = allocator.allocateStreamInfoBuf(CAM_STREAM_TYPE_OFFLINE_PROC);
+            if (pStreamInfoBuf == NULL) {
+                ALOGE("%s: no mem for stream info buf", __func__);
+                rc = NO_MEMORY;
+                break;
+            }
+
+            streamInfo = (cam_stream_info_t *)pStreamInfoBuf->getPtr(0);
+            memset(streamInfo, 0, sizeof(cam_stream_info_t));
+            streamInfo->stream_type = CAM_STREAM_TYPE_OFFLINE_PROC;
+            rc = pStream->getFormat(streamInfo->fmt);
+            rc = pStream->getFrameDimension(streamInfo->dim);
+            streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+            streamInfo->num_of_burst = minStreamBufNum;
+
+            streamInfo->reprocess_config.pp_type = CAM_ONLINE_REPROCESS_TYPE;
+            streamInfo->reprocess_config.online.input_stream_id = pStream->getMyServerID();
+            streamInfo->reprocess_config.online.input_stream_type = pStream->getMyType();
+            streamInfo->reprocess_config.pp_feature_config = config;
+
+            if (!(pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isTypeOf(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT))) {
+                streamInfo->reprocess_config.pp_feature_config.feature_mask &= ~CAM_QCOM_FEATURE_CAC;
+            }
+            if (streamInfo->reprocess_config.pp_feature_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
+                if (streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_90 ||
+                    streamInfo->reprocess_config.pp_feature_config.rotation == ROTATE_270) {
+                    // rotated by 90 or 270, need to switch width and height
+                    int32_t temp = streamInfo->dim.height;
+                    streamInfo->dim.height = streamInfo->dim.width;
+                    streamInfo->dim.width = temp;
+                }
+            }
+
+            // save source stream handler
+            mSrcStreamHandles[m_numStreams] = pStream->getMyHandle();
+
+            // add reprocess stream
+            rc = addStream(allocator,
+                           pStreamInfoBuf, minStreamBufNum,
+                           paddingInfo,
+                           NULL, NULL);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: add reprocess stream failed, ret = %d", __func__, rc);
+                break;
+            }
+        }
+    }
+
+    if (rc == NO_ERROR) {
+        m_pSrcChannel = pSrcChannel;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBySrouceHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCameraStream * QCameraReprocessChannel::getStreamBySrouceHandle(uint32_t srcHandle)
+{
+    QCameraStream *pStream = NULL;
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = mStreams[i];
+            break;
+        }
+    }
+
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(mm_camera_super_buf_t *frame)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        ALOGE("%s: No reprocess stream is created", __func__);
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        ALOGE("%s: No source channel for reprocess", __func__);
+        return -1;
+    }
+
+    // find meta data stream and index of meta data frame in the superbuf
+    QCameraStream *pMetaStream = NULL;
+    uint8_t meta_buf_index = 0;
+    for (int i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream = m_pSrcChannel->getStreamByHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                meta_buf_index = frame->bufs[i]->buf_idx;
+                pMetaStream = pStream;
+                break;
+            }
+        }
+    }
+
+    for (int i = 0; i < frame->num_bufs; i++) {
+        QCameraStream *pStream = getStreamBySrouceHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_METADATA)) {
+                // Skip metadata for reprocess now because PP module cannot handle meta data
+                // May need furthur discussion if Imaginglib need meta data
+                continue;
+            }
+
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+            param.reprocess.frame_idx = frame->bufs[i]->frame_idx;
+            if (pMetaStream != NULL) {
+                // we have meta data frame bundled, sent together with reprocess frame
+                param.reprocess.meta_present = 1;
+                param.reprocess.meta_stream_handle = pMetaStream->getMyServerID();
+                param.reprocess.meta_buf_index = meta_buf_index;
+            }
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: stream setParameter for reprocess failed", __func__);
+                break;
+            }
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @buf_fd     : fd to the input buffer that needs reprocess
+ *   @buf_lenght : length of the input buffer
+ *   @ret_val    : result of reprocess.
+ *                 Example: Could be faceID in case of register face image.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraReprocessChannel::doReprocess(int buf_fd,
+                                             uint32_t buf_length,
+                                             int32_t &ret_val)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        ALOGE("%s: No reprocess stream is created", __func__);
+        return -1;
+    }
+
+    uint32_t buf_idx = 0;
+    for (int i = 0; i < m_numStreams; i++) {
+        rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_idx, -1,
+                                 buf_fd, buf_length);
+
+        if (rc == NO_ERROR) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_idx;
+            rc = mStreams[i]->setParameter(param);
+            if (rc == NO_ERROR) {
+                ret_val = param.reprocess.ret_val;
+            }
+            mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                  buf_idx, -1);
+        }
+    }
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraChannel.h b/camera/QCamera2/HAL/QCameraChannel.h
new file mode 100644
index 0000000..465a5f6
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraChannel.h
@@ -0,0 +1,132 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CHANNEL_H__
+#define __QCAMERA_CHANNEL_H__
+
+#include <hardware/camera.h>
+#include "QCameraStream.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraChannel
+{
+public:
+    QCameraChannel(uint32_t cam_handle,
+                   mm_camera_ops_t *cam_ops);
+    QCameraChannel();
+    virtual ~QCameraChannel();
+    virtual int32_t init(mm_camera_channel_attr_t *attr,
+                         mm_camera_buf_notify_t dataCB, // data CB for channel data
+                         void *userData);
+    // Owner of memory is transferred from the caller to the caller with this call.
+    virtual int32_t addStream(QCameraAllocator& allocator,
+                              QCameraHeapMemory *streamInfoBuf,
+                              uint8_t minStreamBufnum,
+                              cam_padding_info_t *paddingInfo,
+                              stream_cb_routine stream_cb,
+                              void *userdata);
+    virtual int32_t start();
+    virtual int32_t stop();
+    virtual int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    QCameraStream *getStreamByHandle(uint32_t streamHandle);
+    uint32_t getMyHandle() const {return m_handle;};
+    uint8_t getNumOfStreams() const {return m_numStreams;};
+    QCameraStream *getStreamByIndex(uint8_t index);
+    QCameraStream *getStreamByServerID(uint32_t serverID);
+
+protected:
+    uint32_t m_camHandle;
+    mm_camera_ops_t *m_camOps;
+    bool m_bIsActive;
+
+    uint32_t m_handle;
+    uint8_t m_numStreams;
+    QCameraStream *mStreams[MAX_STREAM_NUM_IN_BUNDLE];
+    mm_camera_buf_notify_t mDataCB;
+    void *mUserData;
+};
+
+// burst pic channel: i.e. zsl burst mode
+class QCameraPicChannel : public QCameraChannel
+{
+public:
+    QCameraPicChannel(uint32_t cam_handle,
+                      mm_camera_ops_t *cam_ops);
+    QCameraPicChannel();
+    virtual ~QCameraPicChannel();
+    int32_t takePicture(uint8_t num_of_snapshot);
+    int32_t cancelPicture();
+};
+
+// video channel class
+class QCameraVideoChannel : public QCameraChannel
+{
+public:
+    QCameraVideoChannel(uint32_t cam_handle,
+                        mm_camera_ops_t *cam_ops);
+    QCameraVideoChannel();
+    virtual ~QCameraVideoChannel();
+    int32_t releaseFrame(const void *opaque, bool isMetaData);
+};
+
+// reprocess channel class
+class QCameraReprocessChannel : public QCameraChannel
+{
+public:
+    QCameraReprocessChannel(uint32_t cam_handle,
+                            mm_camera_ops_t *cam_ops);
+    QCameraReprocessChannel();
+    virtual ~QCameraReprocessChannel();
+    int32_t addReprocStreamsFromSource(QCameraAllocator& allocator,
+                                       cam_pp_feature_config_t &config,
+                                       QCameraChannel *pSrcChannel,
+                                       uint8_t minStreamBufNum,
+                                       cam_padding_info_t *paddingInfo);
+    // online reprocess
+    int32_t doReprocess(mm_camera_super_buf_t *frame);
+    // offline reprocess
+    int32_t doReprocess(int buf_fd, uint32_t buf_length, int32_t &ret_val);
+
+private:
+    QCameraStream *getStreamBySrouceHandle(uint32_t srcHandle);
+
+    uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+    QCameraChannel *m_pSrcChannel; // ptr to source channel for reprocess
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/camera/QCamera2/HAL/QCameraMem.cpp b/camera/QCamera2/HAL/QCameraMem.cpp
new file mode 100644
index 0000000..f7238f4
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMem.cpp
@@ -0,0 +1,1331 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraHWI_Mem"
+
+#include <string.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <utils/Errors.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+#include "QCamera2HWI.h"
+#include "QCameraMem.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION   : QCameraMemory
+ *
+ * DESCRIPTION: default constructor of QCameraMemory
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::QCameraMemory(bool cached)
+    :m_bCached(cached)
+{
+    mBufferCount = 0;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i++) {
+        mMemInfo[i].fd = 0;
+        mMemInfo[i].main_ion_fd = 0;
+        mMemInfo[i].handle = NULL;
+        mMemInfo[i].size = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraMemory::~QCameraMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *   @vaddr   : ptr to the virtual address
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::cacheOpsInternal(int index, unsigned int cmd, void *vaddr)
+{
+    if (!m_bCached) {
+        // Memory is not cached, no need for cache ops
+        ALOGV("%s: No cache ops here for uncached memory", __func__);
+        return OK;
+    }
+
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = OK;
+
+    if (index >= mBufferCount) {
+        ALOGE("%s: index %d out of bound [0, %d)", __func__, index, mBufferCount);
+        return BAD_INDEX;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = vaddr;
+    cache_inv_data.fd = mMemInfo[index].fd;
+    cache_inv_data.handle = mMemInfo[index].handle;
+    cache_inv_data.length = mMemInfo[index].size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    ALOGD("%s: addr = %p, fd = %d, handle = %p length = %d, ION Fd = %d",
+         __func__, cache_inv_data.vaddr, cache_inv_data.fd,
+         cache_inv_data.handle, cache_inv_data.length,
+         mMemInfo[index].main_ion_fd);
+    ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+    if (ret < 0)
+        ALOGE("%s: Cache Invalidate failed: %s\n", __func__, strerror(errno));
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCameraMemory::getFd(int index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+int QCameraMemory::getSize(int index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return (int)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+int QCameraMemory::getCnt() const
+{
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, int index) const
+{
+    if (!mBufferCount) {
+        ALOGE("Memory not allocated");
+        return;
+    }
+    bufDef.fd = mMemInfo[index].fd;
+    bufDef.frame_len = offset.frame_len;
+    bufDef.mem_info = (void *)this;
+    bufDef.num_planes = offset.num_planes;
+	bufDef.buffer = getPtr(index);
+    bufDef.buf_idx = index;
+
+    /* Plane 0 needs to be set separately. Set other planes in a loop */
+    bufDef.planes[0].length = offset.mp[0].len;
+    bufDef.planes[0].m.userptr = mMemInfo[index].fd;
+    bufDef.planes[0].data_offset = offset.mp[0].offset;
+    bufDef.planes[0].reserved[0] = 0;
+    for (int i = 1; i < bufDef.num_planes; i++) {
+         bufDef.planes[i].length = offset.mp[i].len;
+         bufDef.planes[i].m.userptr = mMemInfo[i].fd;
+         bufDef.planes[i].data_offset = offset.mp[i].offset;
+         bufDef.planes[i].reserved[0] =
+                 bufDef.planes[i-1].reserved[0] +
+                 bufDef.planes[i-1].length;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : alloc
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *   @heap_id : heap id to indicate where the buffers will be allocated from
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::alloc(int count, int size, int heap_id)
+{
+    int rc = OK;
+    if (count > MM_CAMERA_MAX_NUM_FRAMES) {
+        ALOGE("Buffer count %d out of bound. Max is %d", count, MM_CAMERA_MAX_NUM_FRAMES);
+        return BAD_INDEX;
+    }
+    if (mBufferCount) {
+        ALOGE("Allocating a already allocated heap memory");
+        return INVALID_OPERATION;
+    }
+
+    for (int i = 0; i < count; i ++) {
+        rc = allocOneBuffer(mMemInfo[i], heap_id, size);
+        if (rc < 0) {
+            ALOGE("AllocateIonMemory failed");
+            for (int j = i-1; j >= 0; j--)
+                deallocOneBuffer(mMemInfo[j]);
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dealloc
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::dealloc()
+{
+    for (int i = 0; i < mBufferCount; i++)
+        deallocOneBuffer(mMemInfo[i]);
+}
+
+/*===========================================================================
+ * FUNCTION   : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ *   @memInfo : [output] reference to struct to store additional memory allocation info
+ *   @heap    : [input] heap id to indicate where the buffers will be allocated from
+ *   @size    : [input] lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraMemory::allocOneBuffer(QCameraMemInfo &memInfo, int heap_id, int size)
+{
+    int rc = OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = 0;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd < 0) {
+        ALOGE("Ion dev open failed: %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095) & (~4095);
+    alloc.align = 4096;
+    if (m_bCached) {
+        alloc.flags = ION_FLAG_CACHED;
+    }
+    alloc.heap_id_mask = heap_id;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        ALOGE("ION allocation failed: %s\n", strerror(errno));
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        ALOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    memInfo.main_ion_fd = main_ion_fd;
+    memInfo.fd = ion_info_fd.fd;
+    memInfo.handle = ion_info_fd.handle;
+    memInfo.size = alloc.len;
+    return OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraMemory::deallocOneBuffer(QCameraMemInfo &memInfo)
+{
+    struct ion_handle_data handle_data;
+
+    if (memInfo.fd > 0) {
+        close(memInfo.fd);
+        memInfo.fd = 0;
+    }
+
+    if (memInfo.main_ion_fd > 0) {
+        memset(&handle_data, 0, sizeof(handle_data));
+        handle_data.handle = memInfo.handle;
+        ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+        close(memInfo.main_ion_fd);
+        memInfo.main_ion_fd = 0;
+    }
+    memInfo.handle = NULL;
+    memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraHeapMemory
+ *
+ * DESCRIPTION: constructor of QCameraHeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS :
+ *   @cached  : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::QCameraHeapMemory(bool cached)
+    : QCameraMemory(cached)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraHeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraHeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraHeapMemory::~QCameraHeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraHeapMemory::getPtr(int index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::allocate(int count, int size)
+{
+    int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            for (int j = i-1; j >= 0; j --) {
+                munmap(mPtr[i], mMemInfo[i].size);
+                rc = NO_MEMORY;
+                break;
+            }
+        } else
+            mPtr[i] = vaddr;
+    }
+    if (rc == 0)
+        mBufferCount = count;
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraHeapMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        munmap(mPtr[i], mMemInfo[i].size);
+        mPtr[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::cacheOps(int index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraHeapMemory::getRegFlags(uint8_t * /*regFlags*/) const
+{
+    return INVALID_OPERATION;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraHeapMemory::getMemory(
+                int /*index*/, bool /*metadata*/) const
+{
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraHeapMemory::getMatchBufIndex(const void *opaque,
+                                        bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mPtr[i] == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStreamMemory
+ *
+ * DESCRIPTION: constructor of QCameraStreamMemory
+ *              ION memory allocated directly from /dev/ion and shared with framework
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *   @cached    : flag indicates if using cached memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::QCameraStreamMemory(camera_request_memory getMemory,
+                                         bool cached)
+    :QCameraMemory(cached),
+     mGetMemory(getMemory)
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mCameraMemory[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStreamMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraStreamMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStreamMemory::~QCameraStreamMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::allocate(int count, int size)
+{
+    int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        mCameraMemory[i] = mGetMemory(mMemInfo[i].fd, mMemInfo[i].size, 1, this);
+    }
+    mBufferCount = count;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStreamMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i ++) {
+        mCameraMemory[i]->release(mCameraMemory[i]);
+        mCameraMemory[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::cacheOps(int index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraStreamMemory::getRegFlags(uint8_t *regFlags) const
+{
+    for (int i = 0; i < mBufferCount; i ++)
+        regFlags[i] = 1;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraStreamMemory::getMemory(int index, bool metadata) const
+{
+    if (index >= mBufferCount || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraStreamMemory::getMatchBufIndex(const void *opaque,
+                                          bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraStreamMemory::getPtr(int index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mCameraMemory[index]->data;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraVideoMemory
+ *
+ * DESCRIPTION: constructor of QCameraVideoMemory
+ *              VideoStream buffers also include metadata buffers
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *   @cached    : flag indicates if using cached ION memory
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::QCameraVideoMemory(camera_request_memory getMemory,
+                                       bool cached)
+    : QCameraStreamMemory(getMemory, cached)
+{
+    memset(mMetadata, 0, sizeof(mMetadata));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraVideoMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraVideoMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraVideoMemory::~QCameraVideoMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraVideoMemory::allocate(int count, int size)
+{
+    int rc = QCameraStreamMemory::allocate(count, size);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        mMetadata[i] = mGetMemory(-1,
+                sizeof(struct encoder_media_buffer_type), 1, this);
+        if (!mMetadata[i]) {
+            ALOGE("allocation of video metadata failed.");
+            for (int j = 0; j < i-1; j ++)
+                mMetadata[j]->release(mMetadata[j]);
+            QCameraStreamMemory::deallocate();
+            return NO_MEMORY;
+        }
+        struct encoder_media_buffer_type * packet =
+            (struct encoder_media_buffer_type *)mMetadata[i]->data;
+        packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+        packet->buffer_type = kMetadataBufferTypeCameraSource;
+        native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+        nh->data[0] = mMemInfo[i].fd;
+        nh->data[1] = 0;
+        nh->data[2] = mMemInfo[i].size;
+    }
+    mBufferCount = count;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraVideoMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i ++) {
+        mMetadata[i]->release(mMetadata[i]);
+        mMetadata[i] = NULL;
+    }
+    QCameraStreamMemory::deallocate();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraVideoMemory::getMemory(int index, bool metadata) const
+{
+    if (index >= mBufferCount)
+        return NULL;
+    if (metadata)
+        return mMetadata[index];
+    else
+        return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraVideoMemory::getMatchBufIndex(const void *opaque,
+                                         bool metadata) const
+{
+    int index = -1;
+    for (int i = 0; i < mBufferCount; i++) {
+        if (metadata) {
+            if (mMetadata[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        } else {
+            if (mCameraMemory[i]->data == opaque) {
+                index = i;
+                break;
+            }
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraGrallocMemory
+ *
+ * DESCRIPTION: constructor of QCameraGrallocMemory
+ *              preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::QCameraGrallocMemory(camera_request_memory getMemory)
+        : QCameraMemory(true)
+{
+    mMinUndequeuedBuffers = 0;
+    mWindow = NULL;
+    mWidth = mHeight = 0;
+    mFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+    mGetMemory = getMemory;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+        mBufferHandle[i] = NULL;
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+        mPrivateHandle[i] = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraGrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCameraGrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraGrallocMemory::~QCameraGrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : setWindowInfo
+ *
+ * DESCRIPTION: set native window gralloc ops table
+ *
+ * PARAMETERS :
+ *   @window  : gralloc ops table ptr
+ *   @width   : width of preview frame
+ *   @height  : height of preview frame
+ *   @foramt  : format of preview image
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::setWindowInfo(preview_stream_ops_t *window,
+        int width, int height, int format)
+{
+    mWindow = window;
+    mWidth = width;
+    mHeight = height;
+    mFormat = format;
+}
+
+/*===========================================================================
+ * FUNCTION   : displayBuffer
+ *
+ * DESCRIPTION: send received frame to display
+ *
+ * PARAMETERS :
+ *   @index   : index of preview frame
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::displayBuffer(int index)
+{
+    int err = NO_ERROR;
+    int dequeuedIdx = BAD_INDEX;
+
+    if (BUFFER_NOT_OWNED == mLocalFlag[index]) {
+        ALOGE("%s: buffer to be enqueued is not owned", __func__);
+        return INVALID_OPERATION;
+    }
+
+    err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
+    if(err != 0) {
+        ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+    } else {
+        ALOGV("%s: enqueue_buffer hdl=%p", __func__, *mBufferHandle[index]);
+        mLocalFlag[index] = BUFFER_NOT_OWNED;
+    }
+
+    buffer_handle_t *buffer_handle = NULL;
+    int stride = 0;
+    err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
+    if (err == NO_ERROR && buffer_handle != NULL) {
+        int i;
+        ALOGV("%s: dequed buf hdl =%p", __func__, *buffer_handle);
+        for(i = 0; i < mBufferCount; i++) {
+            if(mBufferHandle[i] == buffer_handle) {
+                ALOGV("%s: Found buffer in idx:%d", __func__, i);
+                mLocalFlag[i] = BUFFER_OWNED;
+                dequeuedIdx = i;
+                break;
+            }
+        }
+    } else {
+        ALOGD("%s: dequeue_buffer, no free buffer from display now", __func__);
+    }
+    return dequeuedIdx;
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::allocate(int count, int /*size*/)
+{
+    int err = 0;
+    status_t ret = NO_ERROR;
+    int gralloc_usage = 0;
+    struct ion_fd_data ion_info_fd;
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+    ALOGI(" %s : E ", __FUNCTION__);
+
+    if (!mWindow) {
+        ALOGE("Invalid native window");
+        return INVALID_OPERATION;
+    }
+
+    // Increment buffer count by min undequeued buffer.
+    err = mWindow->get_min_undequeued_buffer_count(mWindow,&mMinUndequeuedBuffers);
+    if (err != 0) {
+        ALOGE("get_min_undequeued_buffer_count  failed: %s (%d)",
+                strerror(-err), -err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+    count += mMinUndequeuedBuffers;
+
+    err = mWindow->set_buffer_count(mWindow, count);
+    if (err != 0) {
+         ALOGE("set_buffer_count failed: %s (%d)",
+                    strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    err = mWindow->set_buffers_geometry(mWindow, mWidth, mHeight, mFormat);
+    if (err != 0) {
+         ALOGE("%s: set_buffers_geometry failed: %s (%d)",
+               __func__, strerror(-err), -err);
+         ret = UNKNOWN_ERROR;
+         goto end;
+    }
+
+    gralloc_usage = GRALLOC_USAGE_PRIVATE_IOMMU_HEAP;
+    err = mWindow->set_usage(mWindow, gralloc_usage);
+    if(err != 0) {
+        /* set_usage error out */
+        ALOGE("%s: set_usage rc = %d", __func__, err);
+        ret = UNKNOWN_ERROR;
+        goto end;
+    }
+    ALOGD("%s: usage = %d, geometry: %p, %d, %d, %d",
+          __func__, gralloc_usage, mWindow, mWidth, mHeight, mFormat);
+
+    //Allocate cnt number of buffers from native window
+    for (int cnt = 0; cnt < count; cnt++) {
+        int stride;
+        err = mWindow->dequeue_buffer(mWindow, &mBufferHandle[cnt], &stride);
+        if(!err) {
+            ALOGV("dequeue buf hdl =%p", mBufferHandle[cnt]);
+            mLocalFlag[cnt] = BUFFER_OWNED;
+        } else {
+            mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+            ALOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+        }
+
+        ALOGV("%s: dequeue buf: %p\n", __func__, mBufferHandle[cnt]);
+
+        if(err != 0) {
+            ALOGE("%s: dequeue_buffer failed: %s (%d)",
+                  __func__, strerror(-err), -err);
+            ret = UNKNOWN_ERROR;
+            for(int i = 0; i < cnt; i++) {
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    ALOGD("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            memset(&mMemInfo, 0, sizeof(mMemInfo));
+            goto end;
+        }
+
+        mPrivateHandle[cnt] =
+            (struct private_handle_t *)(*mBufferHandle[cnt]);
+        mMemInfo[cnt].main_ion_fd = open("/dev/ion", O_RDONLY);
+        if (mMemInfo[cnt].main_ion_fd < 0) {
+            ALOGE("%s: failed: could not open ion device", __func__);
+            for(int i = 0; i < cnt; i++) {
+                struct ion_handle_data ion_handle;
+                memset(&ion_handle, 0, sizeof(ion_handle));
+                ion_handle.handle = mMemInfo[i].handle;
+                if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                    ALOGE("%s: ion free failed", __func__);
+                }
+                close(mMemInfo[i].main_ion_fd);
+                if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                    err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                    ALOGD("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                }
+                mLocalFlag[i] = BUFFER_NOT_OWNED;
+                mBufferHandle[i] = NULL;
+            }
+            memset(&mMemInfo, 0, sizeof(mMemInfo));
+            ret = UNKNOWN_ERROR;
+            goto end;
+        } else {
+            ion_info_fd.fd = mPrivateHandle[cnt]->fd;
+            if (ioctl(mMemInfo[cnt].main_ion_fd,
+                      ION_IOC_IMPORT, &ion_info_fd) < 0) {
+                ALOGE("%s: ION import failed\n", __func__);
+                for(int i = 0; i < cnt; i++) {
+                    struct ion_handle_data ion_handle;
+                    memset(&ion_handle, 0, sizeof(ion_handle));
+                    ion_handle.handle = mMemInfo[i].handle;
+                    if (ioctl(mMemInfo[i].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+                        ALOGE("ion free failed");
+                    }
+                    close(mMemInfo[i].main_ion_fd);
+
+                    if(mLocalFlag[i] != BUFFER_NOT_OWNED) {
+                        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+                        ALOGD("%s: cancel_buffer: hdl =%p", __func__, (*mBufferHandle[i]));
+                    }
+                    mLocalFlag[i] = BUFFER_NOT_OWNED;
+                    mBufferHandle[i] = NULL;
+                }
+                close(mMemInfo[cnt].main_ion_fd);
+                memset(&mMemInfo, 0, sizeof(mMemInfo));
+                ret = UNKNOWN_ERROR;
+                goto end;
+            }
+        }
+        mCameraMemory[cnt] =
+            mGetMemory(mPrivateHandle[cnt]->fd,
+                    mPrivateHandle[cnt]->size,
+                    1,
+                    (void *)this);
+        ALOGD("%s: idx = %d, fd = %d, size = %d, offset = %d",
+              __func__, cnt, mPrivateHandle[cnt]->fd,
+              mPrivateHandle[cnt]->size,
+              mPrivateHandle[cnt]->offset);
+        mMemInfo[cnt].fd =
+            mPrivateHandle[cnt]->fd;
+        mMemInfo[cnt].size =
+            mPrivateHandle[cnt]->size;
+        mMemInfo[cnt].handle = ion_info_fd.handle;
+    }
+    mBufferCount = count;
+
+    //Cancel min_undequeued_buffer buffers back to the window
+    for (int i = 0; i < mMinUndequeuedBuffers; i ++) {
+        err = mWindow->cancel_buffer(mWindow, mBufferHandle[i]);
+        mLocalFlag[i] = BUFFER_NOT_OWNED;
+    }
+
+end:
+    ALOGI(" %s : X ",__func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraGrallocMemory::deallocate()
+{
+    ALOGI("%s: E ", __FUNCTION__);
+
+    for (int cnt = 0; cnt < mBufferCount; cnt++) {
+        mCameraMemory[cnt]->release(mCameraMemory[cnt]);
+        struct ion_handle_data ion_handle;
+        memset(&ion_handle, 0, sizeof(ion_handle));
+        ion_handle.handle = mMemInfo[cnt].handle;
+        if (ioctl(mMemInfo[cnt].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+            ALOGE("ion free failed");
+        }
+        close(mMemInfo[cnt].main_ion_fd);
+        if(mLocalFlag[cnt] != BUFFER_NOT_OWNED) {
+            if (mWindow) {
+                mWindow->cancel_buffer(mWindow, mBufferHandle[cnt]);
+                ALOGD("cancel_buffer: hdl =%p", (*mBufferHandle[cnt]));
+            } else {
+                ALOGE("Preview window is NULL, cannot cancel_buffer: hdl =%p",
+                      (*mBufferHandle[cnt]));
+            }
+        }
+        mLocalFlag[cnt] = BUFFER_NOT_OWNED;
+        ALOGD("put buffer %d successfully", cnt);
+    }
+    mBufferCount = 0;
+    ALOGI(" %s : X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::cacheOps(int index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mCameraMemory[index]->data);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraGrallocMemory::getRegFlags(uint8_t *regFlags) const
+{
+    int i = 0;
+    for (i = 0; i < mMinUndequeuedBuffers; i ++)
+        regFlags[i] = 0;
+    for (; i < mBufferCount; i ++)
+        regFlags[i] = 1;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMemory
+ *
+ * DESCRIPTION: get camera memory
+ *
+ * PARAMETERS :
+ *   @index   : buffer index
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : camera memory ptr
+ *              NULL if not supported or failed
+ *==========================================================================*/
+camera_memory_t *QCameraGrallocMemory::getMemory(int index, bool metadata) const
+{
+    if (index >= mBufferCount || metadata)
+        return NULL;
+    return mCameraMemory[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by opaque ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *   @metadata: flag if it's metadata
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCameraGrallocMemory::getMatchBufIndex(const void *opaque,
+                                           bool metadata) const
+{
+    int index = -1;
+    if (metadata) {
+        return -1;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mCameraMemory[i]->data == opaque) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCameraGrallocMemory::getPtr(int index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mCameraMemory[index]->data;
+}
+
+}; //namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraMem.h b/camera/QCamera2/HAL/QCameraMem.h
new file mode 100644
index 0000000..2890c9d
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraMem.h
@@ -0,0 +1,182 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA2HWI_MEM_H__
+#define __QCAMERA2HWI_MEM_H__
+
+#include <hardware/camera.h>
+#include <utils/Mutex.h>
+
+extern "C" {
+#include <sys/types.h>
+#include <linux/msm_ion.h>
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+// Base class for all memory types. Abstract.
+class QCameraMemory {
+
+public:
+    int cleanCache(int index) {return cacheOps(index, ION_IOC_CLEAN_CACHES);}
+    int invalidateCache(int index) {return cacheOps(index, ION_IOC_INV_CACHES);}
+    int cleanInvalidateCache(int index) {return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);}
+    int getFd(int index) const;
+    int getSize(int index) const;
+    int getCnt() const;
+
+    virtual int allocate(int count, int size) = 0;
+    virtual void deallocate() = 0;
+    virtual int cacheOps(int index, unsigned int cmd) = 0;
+    virtual int getRegFlags(uint8_t *regFlags) const = 0;
+    virtual camera_memory_t *getMemory(int index, bool metadata) const = 0;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const = 0;
+    virtual void *getPtr(int index) const= 0;
+
+    QCameraMemory(bool cached);
+    virtual ~QCameraMemory();
+
+    void getBufDef(const cam_frame_len_offset_t &offset,
+                mm_camera_buf_def_t &bufDef, int index) const;
+
+protected:
+    struct QCameraMemInfo {
+        int fd;
+        int main_ion_fd;
+        struct ion_handle *handle;
+        uint32_t size;
+    };
+
+    int alloc(int count, int size, int heap_id);
+    void dealloc();
+    int allocOneBuffer(struct QCameraMemInfo &memInfo, int heap_id, int size);
+    void deallocOneBuffer(struct QCameraMemInfo &memInfo);
+    int cacheOpsInternal(int index, unsigned int cmd, void *vaddr);
+
+    bool m_bCached;
+    int mBufferCount;
+    struct QCameraMemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion.
+class QCameraHeapMemory : public QCameraMemory {
+public:
+    QCameraHeapMemory(bool cached);
+    virtual ~QCameraHeapMemory();
+
+    virtual int allocate(int count, int size);
+    virtual void deallocate();
+    virtual int cacheOps(int index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(int index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+	virtual void *getPtr(int index) const;
+
+private:
+    void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraStreamMemory : public QCameraMemory {
+public:
+    QCameraStreamMemory(camera_request_memory getMemory, bool cached);
+    virtual ~QCameraStreamMemory();
+
+    virtual int allocate(int count, int size);
+    virtual void deallocate();
+    virtual int cacheOps(int index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(int index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+	virtual void *getPtr(int index) const;
+
+protected:
+    camera_request_memory mGetMemory;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Externel heap memory is used for memories shared with
+// framework. They are allocated from /dev/ion or gralloc.
+class QCameraVideoMemory : public QCameraStreamMemory {
+public:
+    QCameraVideoMemory(camera_request_memory getMemory, bool cached);
+    virtual ~QCameraVideoMemory();
+
+    virtual int allocate(int count, int size);
+    virtual void deallocate();
+    virtual camera_memory_t *getMemory(int index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+
+private:
+    camera_memory_t *mMetadata[MM_CAMERA_MAX_NUM_FRAMES];
+};
+;
+
+// Gralloc Memory is acquired from preview window
+class QCameraGrallocMemory : public QCameraMemory {
+    enum {
+        BUFFER_NOT_OWNED,
+        BUFFER_OWNED,
+    };
+public:
+    QCameraGrallocMemory(camera_request_memory getMemory);
+    void setNativeWindow(preview_stream_ops_t *anw);
+    virtual ~QCameraGrallocMemory();
+
+    virtual int allocate(int count, int size);
+    virtual void deallocate();
+    virtual int cacheOps(int index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual camera_memory_t *getMemory(int index, bool metadata) const;
+    virtual int getMatchBufIndex(const void *opaque, bool metadata) const;
+	virtual void *getPtr(int index) const;
+
+    void setWindowInfo(preview_stream_ops_t *window, int width, int height, int format);
+    // Enqueue/display buffer[index] onto the native window,
+    // and dequeue one buffer from it.
+    // Returns the buffer index of the dequeued buffer.
+    int displayBuffer(int index);
+
+private:
+    buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    int mLocalFlag[MM_CAMERA_MAX_NUM_FRAMES];
+    struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    preview_stream_ops_t *mWindow;
+    int mWidth, mHeight, mFormat;
+    camera_request_memory mGetMemory;
+    camera_memory_t *mCameraMemory[MM_CAMERA_MAX_NUM_FRAMES];
+    int mMinUndequeuedBuffers;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HWI_MEM_H__ */
diff --git a/camera/QCamera2/HAL/QCameraParameters.cpp b/camera/QCamera2/HAL/QCameraParameters.cpp
new file mode 100644
index 0000000..61d4bcc
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParameters.cpp
@@ -0,0 +1,6148 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraParameters"
+
+#include <cutils/properties.h>
+#include <math.h>
+#include <utils/Errors.h>
+#include <string.h>
+#include <stdlib.h>
+#include <gralloc_priv.h>
+#include "QCamera2HWI.h"
+#include "QCameraParameters.h"
+
+#define ASPECT_TOLERANCE 0.001
+#define FLIP_V_H (FLIP_H | FLIP_V)
+
+namespace qcamera {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_QC_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_QC_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_QC_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_QC_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_QC_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_QC_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_QC_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_QC_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_QC_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_QC_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_QC_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_QC_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_QC_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_QC_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_QC_FOCUS_ALGO[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_SUPPORTED_FOCUS_ALGOS[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_QC_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_QC_FACE_RECOGNITION[] = "face-recognition";
+const char QCameraParameters::KEY_QC_SUPPORTED_FACE_RECOGNITION[] = "face-recognition-values";
+const char QCameraParameters::KEY_QC_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_QC_DIS[] = "dis";
+const char QCameraParameters::KEY_QC_SUPPORTED_DIS_MODES[] = "dis-values";
+const char QCameraParameters::KEY_QC_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_QC_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_QC_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_QC_ZSL[] = "zsl";
+const char QCameraParameters::KEY_QC_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_QC_ZSL_BURST_INTERVAL[] = "capture-burst-interval";
+const char QCameraParameters::KEY_QC_ZSL_BURST_LOOKBACK[] = "capture-burst-retroactive";
+const char QCameraParameters::KEY_QC_ZSL_QUEUE_DEPTH[] = "capture-burst-queue-depth";
+const char QCameraParameters::KEY_QC_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_QC_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_AE_BRACKET_MODES[] = "ae-bracket-hdr-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_RAW_FORMATS[] = "raw-format-values";
+const char QCameraParameters::KEY_QC_RAW_FORMAT[] = "raw-format";
+const char QCameraParameters::KEY_QC_ORIENTATION[] = "orientation";
+const char QCameraParameters::KEY_QC_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_QC_CAPTURE_BURST_EXPOSURE[] = "capture-burst-exposures";
+const char QCameraParameters::KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[] = "num-snaps-per-shutter";
+const char QCameraParameters::KEY_QC_NO_DISPLAY_MODE[] = "no-display-mode";
+const char QCameraParameters::KEY_QC_RAW_PICUTRE_SIZE[] = "raw-size";
+const char QCameraParameters::KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+const char QCameraParameters::KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[] = "supported-live-snapshot-sizes";
+const char QCameraParameters::KEY_QC_HDR_NEED_1X[] = "hdr-need-1x";
+const char QCameraParameters::KEY_QC_PREVIEW_FLIP[] = "preview-flip";
+const char QCameraParameters::KEY_QC_VIDEO_FLIP[] = "video-flip";
+const char QCameraParameters::KEY_QC_SNAPSHOT_PICTURE_FLIP[] = "snapshot-picture-flip";
+const char QCameraParameters::KEY_QC_SUPPORTED_FLIP_MODES[] = "flip-mode-values";
+const char QCameraParameters::KEY_QC_VIDEO_HDR[] = "video-hdr";
+const char QCameraParameters::KEY_QC_SUPPORTED_VIDEO_HDR_MODES[] = "video-hdr-values";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd";   // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+const char QCameraParameters::SCENE_MODE_HDR[] = "hdr";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+
+// Values for raw image formats
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[] = "yuv-raw8-yuyv";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[] = "yuv-raw8-yvyu";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[] = "yuv-raw8-uyvy";
+const char QCameraParameters::QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[] = "yuv-raw8-vyuy";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[] = "bayer-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[] = "bayer-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[] = "bayer-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[] = "bayer-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[] = "bayer-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[] = "bayer-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[] = "bayer-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[] = "bayer-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[] = "bayer-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[] = "bayer-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[] = "bayer-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[] = "bayer-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[] = "bayer-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[] = "bayer-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[] = "bayer-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[] = "bayer-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[] = "bayer-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[] = "bayer-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[] = "bayer-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[] = "bayer-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[] = "bayer-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[] = "bayer-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[] = "bayer-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[] = "bayer-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[] = "bayer-ideal-qcom-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[] = "bayer-ideal-qcom-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[] = "bayer-ideal-qcom-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[] = "bayer-ideal-qcom-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[] = "bayer-ideal-qcom-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[] = "bayer-ideal-qcom-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[] = "bayer-ideal-qcom-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[] = "bayer-ideal-qcom-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[] = "bayer-ideal-qcom-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[] = "bayer-ideal-qcom-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[] = "bayer-ideal-qcom-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[] = "bayer-ideal-qcom-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[] = "bayer-ideal-mipi-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[] = "bayer-ideal-mipi-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[] = "bayer-ideal-mipi-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[] = "bayer-ideal-mipi-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[] = "bayer-ideal-mipi-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[] = "bayer-ideal-mipi-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[] = "bayer-ideal-mipi-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[] = "bayer-ideal-mipi-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[] = "bayer-ideal-mipi-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[] = "bayer-ideal-mipi-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[] = "bayer-ideal-mipi-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[] = "bayer-ideal-mipi-12bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[] = "bayer-ideal-plain8-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[] = "bayer-ideal-plain8-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[] = "bayer-ideal-plain8-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[] = "bayer-ideal-plain8-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[] = "bayer-ideal-plain16-8gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[] = "bayer-ideal-plain16-8grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[] = "bayer-ideal-plain16-8rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[] = "bayer-ideal-plain16-8bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[] = "bayer-ideal-plain16-10gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[] = "bayer-ideal-plain16-10grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[] = "bayer-ideal-plain16-10rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[] = "bayer-ideal-plain16-10bggr";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[] = "bayer-ideal-plain16-12gbrg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[] = "bayer-ideal-plain16-12grbg";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[] = "bayer-ideal-plain16-12rggb";
+const char QCameraParameters::QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[] = "bayer-ideal-plain16-12bggr";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SMART_METERING[] = "smart-metering";
+const char QCameraParameters::AUTO_EXPOSURE_USER_METERING[] = "user-metering";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING_ADV[] = "spot-metering-adv";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[] = "center-weighted-adv";
+
+const char QCameraParameters::KEY_QC_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_QC_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_QC_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_QC_GPS_STATUS[] = "gps-status";
+
+const char QCameraParameters::KEY_QC_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_QC_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+const char QCameraParameters::VALUE_ENABLE[] = "enable";
+const char QCameraParameters::VALUE_DISABLE[] = "disable";
+const char QCameraParameters::VALUE_OFF[] = "off";
+const char QCameraParameters::VALUE_ON[] = "on";
+const char QCameraParameters::VALUE_TRUE[] = "true";
+const char QCameraParameters::VALUE_FALSE[] = "false";
+
+const char QCameraParameters::KEY_QC_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_QC_MIN_SHARPNESS[] = "min-sharpness";
+const char QCameraParameters::KEY_QC_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_QC_SHARPNESS_STEP[] = "sharpness-step";
+const char QCameraParameters::KEY_QC_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_QC_MIN_CONTRAST[] = "min-contrast";
+const char QCameraParameters::KEY_QC_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_QC_CONTRAST_STEP[] = "contrast-step";
+const char QCameraParameters::KEY_QC_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_QC_MIN_SATURATION[] = "min-saturation";
+const char QCameraParameters::KEY_QC_MAX_SATURATION[] = "max-saturation";
+const char QCameraParameters::KEY_QC_SATURATION_STEP[] = "saturation-step";
+const char QCameraParameters::KEY_QC_BRIGHTNESS[] = "luma-adaptation";
+const char QCameraParameters::KEY_QC_MIN_BRIGHTNESS[] = "min-brightness";
+const char QCameraParameters::KEY_QC_MAX_BRIGHTNESS[] = "max-brightness";
+const char QCameraParameters::KEY_QC_BRIGHTNESS_STEP[] = "brightness-step";
+const char QCameraParameters::KEY_QC_SCE_FACTOR[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_QC_MIN_SCE_FACTOR[] = "min-sce-factor";
+const char QCameraParameters::KEY_QC_MAX_SCE_FACTOR[] = "max-sce-factor";
+const char QCameraParameters::KEY_QC_SCE_FACTOR_STEP[] = "sce-factor-step";
+
+const char QCameraParameters::KEY_QC_SUPPORTED_CAMERA_FEATURES[] = "qc-camera-features";
+const char QCameraParameters::KEY_QC_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::FOCUS_ALGO_AUTO[] = "auto";
+const char QCameraParameters::FOCUS_ALGO_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::FOCUS_ALGO_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::FOCUS_ALGO_FRAME_AVERAGE[] = "frame-average";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+const char QCameraParameters::VIDEO_HFR_5X[] = "150";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+// Values for FLIP settings.
+const char QCameraParameters::FLIP_MODE_OFF[] = "off";
+const char QCameraParameters::FLIP_MODE_V[] = "flip-v";
+const char QCameraParameters::FLIP_MODE_H[] = "flip-h";
+const char QCameraParameters::FLIP_MODE_VH[] = "flip-vh";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+const cam_dimension_t QCameraParameters::THUMBNAIL_SIZES_MAP[] = {
+    { 512, 288 }, //1.777778
+    { 480, 288 }, //1.666667
+    { 256, 154 }, //1.66233
+    { 432, 288 }, //1.5
+    { 320, 240 }, //1.33333
+    { 176, 144 }, //1.222222
+    { 0, 0 }      // required by Android SDK
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::AUTO_EXPOSURE_MAP[] = {
+    { AUTO_EXPOSURE_FRAME_AVG,           CAM_AEC_MODE_FRAME_AVERAGE },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED,     CAM_AEC_MODE_CENTER_WEIGHTED },
+    { AUTO_EXPOSURE_SPOT_METERING,       CAM_AEC_MODE_SPOT_METERING },
+    { AUTO_EXPOSURE_SMART_METERING,      CAM_AEC_MODE_SMART_METERING },
+    { AUTO_EXPOSURE_USER_METERING,       CAM_AEC_MODE_USER_METERING },
+    { AUTO_EXPOSURE_SPOT_METERING_ADV,   CAM_AEC_MODE_SPOT_METERING_ADV },
+    { AUTO_EXPOSURE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::PREVIEW_FORMATS_MAP[] = {
+    {PIXEL_FORMAT_YUV420SP,        CAM_FORMAT_YUV_420_NV21},
+    {PIXEL_FORMAT_YUV420P,         CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_YUV420SP_ADRENO, CAM_FORMAT_YUV_420_NV21_ADRENO},
+    {PIXEL_FORMAT_YV12,            CAM_FORMAT_YUV_420_YV12},
+    {PIXEL_FORMAT_NV12,            CAM_FORMAT_YUV_420_NV12}
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::PICTURE_TYPES_MAP[] = {
+    {PIXEL_FORMAT_JPEG,                          CAM_FORMAT_JPEG},
+    {PIXEL_FORMAT_YUV422SP,                      CAM_FORMAT_YUV_422_NV16},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV,          CAM_FORMAT_YUV_RAW_8BIT_YUYV},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU,          CAM_FORMAT_YUV_RAW_8BIT_YVYU},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY,          CAM_FORMAT_YUV_RAW_8BIT_UYVY},
+    {QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY,          CAM_FORMAT_YUV_RAW_8BIT_VYUY},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR,       CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR,      CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR,       CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR,      CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR,     CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR,    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR,   CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR,  CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB},
+    {QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR, CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR}
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::RAW_FORMATS_MAP[] = {
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::FOCUS_MODES_MAP[] = {
+    { FOCUS_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
+    { FOCUS_MODE_INFINITY,           CAM_FOCUS_MODE_INFINITY },
+    { FOCUS_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
+    { FOCUS_MODE_FIXED,              CAM_FOCUS_MODE_FIXED },
+    { FOCUS_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
+    { FOCUS_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+    { FOCUS_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::EFFECT_MODES_MAP[] = {
+    { EFFECT_NONE,       CAM_EFFECT_MODE_OFF },
+    { EFFECT_MONO,       CAM_EFFECT_MODE_MONO },
+    { EFFECT_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
+    { EFFECT_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
+    { EFFECT_SEPIA,      CAM_EFFECT_MODE_SEPIA },
+    { EFFECT_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
+    { EFFECT_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+    { EFFECT_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+    { EFFECT_AQUA,       CAM_EFFECT_MODE_AQUA },
+    { EFFECT_EMBOSS,     CAM_EFFECT_MODE_EMBOSS },
+    { EFFECT_SKETCH,     CAM_EFFECT_MODE_SKETCH },
+    { EFFECT_NEON,       CAM_EFFECT_MODE_NEON }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::SCENE_MODES_MAP[] = {
+    { SCENE_MODE_AUTO,           CAM_SCENE_MODE_OFF },
+    { SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
+    { SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
+    { SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
+    { SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
+    { SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+    { SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
+    { SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
+    { SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
+    { SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
+    { SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
+    { SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
+    { SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
+    { SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
+    { SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
+    { SCENE_MODE_ASD,            CAM_SCENE_MODE_AUTO },
+    { SCENE_MODE_BACKLIGHT,      CAM_SCENE_MODE_BACKLIGHT },
+    { SCENE_MODE_FLOWERS,        CAM_SCENE_MODE_FLOWERS },
+    { SCENE_MODE_AR,             CAM_SCENE_MODE_AR },
+    { SCENE_MODE_HDR,            CAM_SCENE_MODE_OFF },
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::FLASH_MODES_MAP[] = {
+    { FLASH_MODE_OFF,   CAM_FLASH_MODE_OFF },
+    { FLASH_MODE_AUTO,  CAM_FLASH_MODE_AUTO },
+    { FLASH_MODE_ON,    CAM_FLASH_MODE_ON },
+    { FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::FOCUS_ALGO_MAP[] = {
+    { FOCUS_ALGO_AUTO,            CAM_FOCUS_ALGO_AUTO },
+    { FOCUS_ALGO_SPOT_METERING,   CAM_FOCUS_ALGO_SPOT },
+    { FOCUS_ALGO_CENTER_WEIGHTED, CAM_FOCUS_ALGO_CENTER_WEIGHTED },
+    { FOCUS_ALGO_FRAME_AVERAGE,   CAM_FOCUS_ALGO_AVERAGE }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::WHITE_BALANCE_MODES_MAP[] = {
+    { WHITE_BALANCE_AUTO,            CAM_WB_MODE_AUTO },
+    { WHITE_BALANCE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
+    { WHITE_BALANCE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
+    { WHITE_BALANCE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+    { WHITE_BALANCE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
+    { WHITE_BALANCE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+    { WHITE_BALANCE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
+    { WHITE_BALANCE_SHADE,           CAM_WB_MODE_SHADE }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::ANTIBANDING_MODES_MAP[] = {
+    { ANTIBANDING_OFF,  CAM_ANTIBANDING_MODE_OFF },
+    { ANTIBANDING_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+    { ANTIBANDING_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+    { ANTIBANDING_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::ISO_MODES_MAP[] = {
+    { ISO_AUTO,  CAM_ISO_MODE_AUTO },
+    { ISO_HJR,   CAM_ISO_MODE_DEBLUR },
+    { ISO_100,   CAM_ISO_MODE_100 },
+    { ISO_200,   CAM_ISO_MODE_200 },
+    { ISO_400,   CAM_ISO_MODE_400 },
+    { ISO_800,   CAM_ISO_MODE_800 },
+    { ISO_1600,  CAM_ISO_MODE_1600 }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::HFR_MODES_MAP[] = {
+    { VIDEO_HFR_OFF, CAM_HFR_MODE_OFF },
+    { VIDEO_HFR_2X,  CAM_HFR_MODE_60FPS },
+    { VIDEO_HFR_3X,  CAM_HFR_MODE_90FPS },
+    { VIDEO_HFR_4X,  CAM_HFR_MODE_120FPS },
+    { VIDEO_HFR_5X,  CAM_HFR_MODE_150FPS }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::BRACKETING_MODES_MAP[] = {
+    { AE_BRACKET_OFF, CAM_EXP_BRACKETING_OFF },
+    { AE_BRACKET,         CAM_EXP_BRACKETING_ON }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::ON_OFF_MODES_MAP[] = {
+    { VALUE_OFF, 0 },
+    { VALUE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::TOUCH_AF_AEC_MODES_MAP[] = {
+    { QCameraParameters::TOUCH_AF_AEC_OFF, 0 },
+    { QCameraParameters::TOUCH_AF_AEC_ON, 1 }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::ENABLE_DISABLE_MODES_MAP[] = {
+    { VALUE_ENABLE,  1 },
+    { VALUE_DISABLE, 0 }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::DENOISE_ON_OFF_MODES_MAP[] = {
+    { DENOISE_OFF, 0 },
+    { DENOISE_ON,  1 }
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::TRUE_FALSE_MODES_MAP[] = {
+    { VALUE_FALSE, 0},
+    { VALUE_TRUE,  1}
+};
+
+const QCameraParameters::QCameraMap QCameraParameters::FLIP_MODES_MAP[] = {
+    {FLIP_MODE_OFF, 0},
+    {FLIP_MODE_V, FLIP_V},
+    {FLIP_MODE_H, FLIP_H},
+    {FLIP_MODE_VH, FLIP_V_H}
+};
+
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: default constructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters()
+    : CameraParameters(),
+      m_pCapability(NULL),
+      m_pCamOpsTbl(NULL),
+      m_pParamHeap(NULL),
+      m_pParamBuf(NULL),
+      m_bZslMode(false),
+      m_bZslMode_new(false),
+      m_bRecordingHint(false),
+      m_bRecordingHint_new(false),
+      m_bHistogramEnabled(false),
+      m_nFaceProcMask(0),
+      m_bDebugFps(false),
+      mFocusMode(CAM_FOCUS_MODE_MAX),
+      mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+      mPictureFormat(CAM_FORMAT_JPEG),
+      m_bNeedRestart(false),
+      m_bNoDisplayMode(false),
+      m_bWNROn(false),
+      m_bNeedLockCAF(false),
+      m_bCAFLocked(false),
+      m_bAFRunning(false),
+      m_tempMap()
+{
+    char value[PROPERTY_VALUE_MAX];
+    // TODO: may move to parameter instead of sysprop
+    property_get("persist.debug.sf.showfps", value, "0");
+    m_bDebugFps = atoi(value) > 0 ? true : false;
+
+    // For thermal mode, it should be set as system property
+    // because system property applies to all applications, while
+    // parameters only apply to specific app.
+    property_get("persist.camera.thermal.mode", value, "fps");
+    if (!strcmp(value, "frameskip")) {
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FRAMESKIP;
+    } else {
+        if (strcmp(value, "fps"))
+            ALOGE("%s: Invalid camera thermal mode %s", __func__, value);
+        m_ThermalMode = QCAMERA_THERMAL_ADJUST_FPS;
+    }
+
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraParameters
+ *
+ * DESCRIPTION: constructor of QCameraParameters
+ *
+ * PARAMETERS :
+ *   @params  : parameters in string
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::QCameraParameters(const String8 &params)
+    : CameraParameters(params),
+    m_pCapability(NULL),
+    m_pCamOpsTbl(NULL),
+    m_pParamHeap(NULL),
+    m_pParamBuf(NULL),
+    m_bZslMode(false),
+    m_bZslMode_new(false),
+    m_bRecordingHint(false),
+    m_bRecordingHint_new(false),
+    m_bHistogramEnabled(false),
+    m_nFaceProcMask(0),
+    m_bDebugFps(false),
+    mFocusMode(CAM_FOCUS_MODE_MAX),
+    mPreviewFormat(CAM_FORMAT_YUV_420_NV21),
+    mPictureFormat(CAM_FORMAT_JPEG),
+    m_bNeedRestart(false),
+    m_bNoDisplayMode(false),
+    m_bWNROn(false),
+    m_bNeedLockCAF(false),
+    m_bCAFLocked(false),
+    m_bAFRunning(false),
+    m_tempMap()
+{
+    memset(&m_LiveSnapshotSize, 0, sizeof(m_LiveSnapshotSize));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraParameters
+ *
+ * DESCRIPTION: deconstructor of QCameraParameters
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraParameters::~QCameraParameters()
+{
+    deinit();
+}
+
+/*===========================================================================
+ * FUNCTION   : createSizesString
+ *
+ * DESCRIPTION: create string obj contains array of dimensions
+ *
+ * PARAMETERS :
+ *   @sizes   : array of dimensions
+ *   @len     : size of dimension array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createSizesString(const cam_dimension_t *sizes, int len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesString
+ *
+ * DESCRIPTION: create string obj contains array of values from map when matched
+ *              from input values array
+ *
+ * PARAMETERS :
+ *   @values  : array of values
+ *   @len     : size of values array
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createValuesString(const int *values,
+                                              int len,
+                                              const QCameraMap* map,
+                                              int map_len)
+{
+    String8 str;
+    int count = 0;
+
+    for (int i = 0; i < len; i++ ) {
+        for (int j = 0; j < map_len; j ++)
+            if (map[j].val == values[i]) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                    str.append(map[j].desc);
+                    count++;
+                    break; //loop j
+                }
+            }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createValuesStringFromMap
+ *
+ * DESCRIPTION: create string obj contains array of values directly from map
+ *
+ * PARAMETERS :
+ *   @map     : map contains the mapping between values and enums
+ *   @map_len : size of the map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createValuesStringFromMap(const QCameraMap* map,
+                                                     int map_len)
+{
+    String8 str;
+
+    for (int i = 0; i < map_len; i++) {
+        if (NULL != map[i].desc) {
+            if (i > 0) {
+                str.append(",");
+            }
+            str.append(map[i].desc);
+        }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createZoomRatioValuesString
+ *
+ * DESCRIPTION: create string obj contains array of zoom ratio values
+ *
+ * PARAMETERS :
+ *   @zoomRaios  : array of zoom ratios
+ *   @length     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createZoomRatioValuesString(int *zoomRatios, int length)
+{
+    String8 str;
+    char buffer[32] = {0};
+
+    if(length > 0){
+        snprintf(buffer, sizeof(buffer), "%d", zoomRatios[0]);
+        str.append(buffer);
+    }
+
+    for (int i =1;i<length;i++){
+        memset(buffer, 0, sizeof(buffer));
+        snprintf(buffer, sizeof(buffer), ",%d", zoomRatios[i]);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrValuesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr values from map when
+ *              matched from input hfr values
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *   @map     : map of hfr string value and enum
+ *   map_len  : size of map
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrValuesString(
+                                const cam_hfr_info_t *values,
+                                int len,
+                                const QCameraMap* map,
+                                int map_len)
+{
+    String8 str;
+    int count = 0;
+
+    for (int i = 0; i < len; i++ ) {
+        for (int j = 0; j < map_len; j ++)
+            if (map[j].val == (int)values[i].mode) {
+                if (NULL != map[j].desc) {
+                    if (count > 0) {
+                        str.append(",");
+                    }
+                     str.append(map[j].desc);
+                     count++;
+                     break; //loop j
+                }
+            }
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createHfrSizesString
+ *
+ * DESCRIPTION: create string obj contains array of hfr sizes
+ *
+ * PARAMETERS :
+ *   @values  : array of hfr info
+ *   @len     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createHfrSizesString(
+                                const cam_hfr_info_t *values,
+                                int len)
+{
+    String8 str;
+    char buffer[32];
+
+    if (len > 0) {
+        snprintf(buffer, sizeof(buffer), "%dx%d",
+                 values[0].dim.width, values[0].dim.height);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        snprintf(buffer, sizeof(buffer), ",%dx%d",
+                 values[i].dim.width, values[i].dim.height);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : compareFPSValues
+ *
+ * DESCRIPTION: helper function for fps sorting
+ *
+ * PARAMETERS :
+ *   @p1     : first array element
+ *   @p2     : second array element
+ *
+ * RETURN     : -1 - left element is greater than right
+ *              0  - elements are equals
+ *              1  - left element is less than right
+ *==========================================================================*/
+int QCameraParameters::compareFPSValues(const void *p1, const void *p2)
+{
+    if ( *( (int *) p1) > *( (int *) p2) ) {
+        return -1;
+    } else if (  *( (int *) p1) < *( (int *) p2) ) {
+        return 1;
+    }
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsString
+ *
+ * DESCRIPTION: create string obj contains array of FPS rates
+ *
+ * PARAMETERS :
+ *   @fps     : array of fps ranges
+ *   @len     : size of the array
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsString(const cam_fps_range_t *fps, int len)
+{
+    String8 str;
+    char buffer[32];
+    int duplicate = INT_MAX;
+
+    int *fpsValues = new int[len];
+
+    for (int i = 0; i < len; i++ ) {
+        fpsValues[i] = int(fps[i].max_fps);
+    }
+
+    qsort(fpsValues, len, sizeof(int), compareFPSValues);
+
+    for (int i = 0; i < len; i++ ) {
+        if ( duplicate != fpsValues[i] ) {
+            snprintf(buffer, sizeof(buffer), "%d", fpsValues[i]);
+            str.append(buffer);
+            if (i < len-1) {
+                str.append(",");
+            }
+            duplicate = fpsValues[i];
+        }
+    }
+
+    delete [] fpsValues;
+
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : createFpsRangeString
+ *
+ * DESCRIPTION: create string obj contains array of FPS ranges
+ *
+ * PARAMETERS :
+ *   @fps     : array of fps ranges
+ *   @len     : size of the array
+ *   @default_fps_index : reference to index of default fps range
+ *
+ * RETURN     : string obj
+ *==========================================================================*/
+String8 QCameraParameters::createFpsRangeString(const cam_fps_range_t* fps,
+                                                int len,
+                                                int &default_fps_index)
+{
+    String8 str;
+    char buffer[32];
+    int max_range = 0;
+    int min_fps, max_fps;
+
+    if (len > 0) {
+        min_fps = int(fps[0].min_fps * 1000);
+        max_fps = int(fps[0].max_fps * 1000);
+        max_range = max_fps - min_fps;
+        default_fps_index = 0;
+        snprintf(buffer, sizeof(buffer), "(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    for (int i = 1; i < len; i++) {
+        min_fps = int(fps[i].min_fps * 1000);
+        max_fps = int(fps[i].max_fps * 1000);
+        if (max_range < (max_fps - min_fps)) {
+            max_range = max_fps - min_fps;
+            default_fps_index = i;
+        }
+        snprintf(buffer, sizeof(buffer), ",(%d,%d)", min_fps, max_fps);
+        str.append(buffer);
+    }
+    return str;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupAttr
+ *
+ * DESCRIPTION: lookup a value by its name
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @name    : name to be looked up
+ *
+ * RETURN     : valid value if found
+ *              NAME_NOT_FOUND if not found
+ *==========================================================================*/
+int QCameraParameters::lookupAttr(const QCameraMap arr[], int len, const char *name)
+{
+    if (name) {
+        for (int i = 0; i < len; i++) {
+            if (!strcmp(arr[i].desc, name))
+                return arr[i].val;
+        }
+    }
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupNameByValue
+ *
+ * DESCRIPTION: lookup a name by its value
+ *
+ * PARAMETERS :
+ *   @attr    : map contains <name, value>
+ *   @len     : size of the map
+ *   @value   : value to be looked up
+ *
+ * RETURN     : name str or NULL if not found
+ *==========================================================================*/
+const char *QCameraParameters::lookupNameByValue(const QCameraMap arr[], int len, int value)
+{
+    for (int i = 0; i < len; i++) {
+        if (arr[i].val == value) {
+            return arr[i].desc;
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewSize
+ *
+ * DESCRIPTION: set preview size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPreviewSize(&width, &height);
+    ALOGV("Requested preview size %d x %d", width, height);
+
+    // Validate the preview size
+    for (size_t i = 0; i < m_pCapability->preview_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->preview_sizes_tbl[i].width
+           && height ==  m_pCapability->preview_sizes_tbl[i].height) {
+            // check if need to restart preview in case of preview size change
+            int old_width, old_height;
+            CameraParameters::getPreviewSize(&old_width, &old_height);
+            if (width != old_width || height != old_height) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            CameraParameters::setPreviewSize(width, height);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid preview size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureSize
+ *
+ * DESCRIPTION: set picture size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureSize(const QCameraParameters& params)
+{
+    int width, height;
+    params.getPictureSize(&width, &height);
+    ALOGV("Requested picture size %d x %d", width, height);
+
+    // Validate the picture size
+    for (size_t i = 0; i < m_pCapability->picture_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->picture_sizes_tbl[i].width
+           && height ==  m_pCapability->picture_sizes_tbl[i].height) {
+            // check if need to restart preview in case of picture size change
+            int old_width, old_height;
+            CameraParameters::getPictureSize(&old_width, &old_height);
+            if ((m_bZslMode || m_bRecordingHint) &&
+                (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            CameraParameters::setPictureSize(width, height);
+            return NO_ERROR;
+        }
+    }
+    ALOGE("Invalid picture size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoSize
+ *
+ * DESCRIPTION: set video size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoSize(const QCameraParameters& params)
+{
+    const char *str= NULL;
+    int width, height;
+    str = params.get(KEY_VIDEO_SIZE);
+    if(!str) {
+        //If application didn't set this parameter string, use the values from
+        //getPreviewSize() as video dimensions.
+        params.getPreviewSize(&width, &height);
+        ALOGE("No Record Size requested, use the preview dimensions");
+    } else {
+        params.getVideoSize(&width, &height);
+    }
+
+    // Validate the video size
+    for (size_t i = 0; i < m_pCapability->video_sizes_tbl_cnt; ++i) {
+        if (width ==  m_pCapability->video_sizes_tbl[i].width
+                && height ==  m_pCapability->video_sizes_tbl[i].height) {
+            // check if need to restart preview in case of video size change
+            int old_width, old_height;
+            CameraParameters::getVideoSize(&old_width, &old_height);
+            if (m_bRecordingHint &&
+               (width != old_width || height != old_height)) {
+                m_bNeedRestart = true;
+            }
+
+            // set the new value
+            CameraParameters::setVideoSize(width, height);
+            return NO_ERROR;
+        }
+    }
+
+    ALOGE("Invalid video size requested: %dx%d", width, height);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLiveSnapshotSize
+ *
+ * DESCRIPTION: set live snapshot size
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLiveSnapshotSize(const QCameraParameters& params)
+{
+    char value[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.opt.livepic", value, "1");
+    bool useOptimal = atoi(value) > 0 ? true : false;
+
+    // use picture size from user setting
+    params.getPictureSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+
+    uint8_t livesnapshot_sizes_tbl_cnt = m_pCapability->livesnapshot_sizes_tbl_cnt;
+    cam_dimension_t *livesnapshot_sizes_tbl = &m_pCapability->livesnapshot_sizes_tbl[0];
+
+    // check if HFR is enabled
+    const char *hfrStr = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
+    if (hfrStr != NULL) {
+        int32_t value = lookupAttr(HFR_MODES_MAP,
+                                   sizeof(HFR_MODES_MAP)/sizeof(QCameraMap),
+                                   hfrStr);
+        if (value != NAME_NOT_FOUND) {
+            // if HFR is enabled, change live snapshot size
+            if (value > CAM_HFR_MODE_OFF) {
+                for (int i = 0; i < m_pCapability->hfr_tbl_cnt; i++) {
+                    if (m_pCapability->hfr_tbl[i].mode == value) {
+                        livesnapshot_sizes_tbl_cnt =
+                            m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl_cnt;
+                        livesnapshot_sizes_tbl =
+                            &m_pCapability->hfr_tbl[i].livesnapshot_sizes_tbl[0];
+                        hfrMode = m_pCapability->hfr_tbl[i].mode;
+                        break;
+                    }
+                }
+            }
+        }
+    }
+
+    if (useOptimal || hfrMode != CAM_HFR_MODE_OFF) {
+        bool found = false;
+
+        // first check if picture size is within the list of supported sizes
+        for (int i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+            if (m_LiveSnapshotSize.width == livesnapshot_sizes_tbl[i].width &&
+                m_LiveSnapshotSize.height == livesnapshot_sizes_tbl[i].height) {
+                found = true;
+                break;
+            }
+        }
+
+        if (!found) {
+            // use optimal live snapshot size from supported list,
+            // that has same preview aspect ratio
+            int width = 0, height = 0;
+            params.getPreviewSize(&width, &height);
+
+            double previewAspectRatio = (double)width / height;
+            for (int i = 0; i < livesnapshot_sizes_tbl_cnt; ++i) {
+                double ratio = (double)livesnapshot_sizes_tbl[i].width /
+                                livesnapshot_sizes_tbl[i].height;
+                if (fabs(previewAspectRatio - ratio) <= ASPECT_TOLERANCE) {
+                    m_LiveSnapshotSize = livesnapshot_sizes_tbl[i];
+                    found = true;
+                    break;
+                }
+            }
+
+            if (!found && hfrMode != CAM_HFR_MODE_OFF) {
+                // Cannot find matching aspect ration from supported live snapshot list
+                // choose the max dim from preview and video size
+                ALOGI("%s: Cannot find matching aspect ratio, choose max of preview or video size", __func__);
+                params.getVideoSize(&m_LiveSnapshotSize.width, &m_LiveSnapshotSize.height);
+                if (m_LiveSnapshotSize.width < width && m_LiveSnapshotSize.height < height) {
+                    m_LiveSnapshotSize.width = width;
+                    m_LiveSnapshotSize.height = height;
+                }
+            }
+        }
+    }
+    ALOGI("%s: live snapshot size %d x %d", __func__,
+          m_LiveSnapshotSize.width, m_LiveSnapshotSize.height);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFormat
+ *
+ * DESCRIPTION: set preview format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPreviewFormat();
+    int32_t previewFormat =
+        lookupAttr(PREVIEW_FORMATS_MAP,
+                   sizeof(PREVIEW_FORMATS_MAP) / sizeof(QCameraMap),
+                   str);
+    if (previewFormat != NAME_NOT_FOUND) {
+        mPreviewFormat = (cam_format_t)previewFormat;
+
+        CameraParameters::setPreviewFormat(str);
+        ALOGV("%s: format %d\n", __func__, mPreviewFormat);
+        return NO_ERROR;
+    }
+    ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPictureFormat
+ *
+ * DESCRIPTION: set picture format from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPictureFormat(const QCameraParameters& params)
+{
+    const char *str = params.getPictureFormat();
+    int32_t pictureFormat =
+        lookupAttr(PICTURE_TYPES_MAP,
+                   sizeof(PICTURE_TYPES_MAP) / sizeof(QCameraMap),
+                   str);
+    if (pictureFormat != NAME_NOT_FOUND) {
+        mPictureFormat = pictureFormat;
+
+        CameraParameters::setPictureFormat(str);
+        ALOGE("%s: format %d\n", __func__, mPictureFormat);
+        return NO_ERROR;
+    }
+    ALOGE("Invalid picture format value: %s", (str == NULL) ? "NULL" : str);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegThumbnailSize
+ *
+ * DESCRIPTION: set jpeg thumbnail size from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegThumbnailSize(const QCameraParameters& params)
+{
+    int width = params.getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    int height = params.getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+
+    ALOGV("requested jpeg thumbnail size %d x %d", width, height);
+
+    int sizes_cnt = sizeof(THUMBNAIL_SIZES_MAP) / sizeof(cam_dimension_t);
+
+    int pic_width = 0, pic_height = 0;
+    params.getPictureSize(&pic_width, &pic_height);
+    if (pic_height == 0) {
+        ALOGE("%s: picture size is invalid (%d x %d)", __func__, pic_width, pic_height);
+        return BAD_VALUE;
+    }
+    double picAspectRatio = (double)pic_width / pic_height;
+
+    int optimalWidth = 0, optimalHeight = 0;
+    if (width != 0 || height != 0) {
+        // If input jpeg thumnmail size is (0,0), meaning no thumbnail needed
+        // hornor this setting.
+        // Otherwise, find optimal jpeg thumbnail size that has same aspect ration
+        // as picture size
+
+        // Try to find a size matches aspect ratio and has the largest width
+        for (int i = 0; i < sizes_cnt; i++) {
+            if (THUMBNAIL_SIZES_MAP[i].height == 0) {
+                // No thumbnail case, just skip
+                continue;
+            }
+            double ratio =
+                (double)THUMBNAIL_SIZES_MAP[i].width / THUMBNAIL_SIZES_MAP[i].height;
+            if (fabs(ratio - picAspectRatio) > ASPECT_TOLERANCE)  {
+                continue;
+            }
+            if (THUMBNAIL_SIZES_MAP[i].width > optimalWidth) {
+                optimalWidth = THUMBNAIL_SIZES_MAP[i].width;
+                optimalHeight = THUMBNAIL_SIZES_MAP[i].height;
+            }
+        }
+    }
+
+    set(KEY_JPEG_THUMBNAIL_WIDTH, optimalWidth);
+    set(KEY_JPEG_THUMBNAIL_HEIGHT, optimalHeight);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setJpegQuality
+ *
+ * DESCRIPTION: set jpeg encpding quality from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setJpegQuality(const QCameraParameters& params)
+{
+    int32_t rc = NO_ERROR;
+    int quality = params.getInt(KEY_JPEG_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_QUALITY, quality);
+    } else {
+        ALOGE("%s: Invalid jpeg quality=%d", __func__, quality);
+        rc = BAD_VALUE;
+    }
+
+    quality = params.getInt(KEY_JPEG_THUMBNAIL_QUALITY);
+    if (quality >= 0 && quality <= 100) {
+        set(KEY_JPEG_THUMBNAIL_QUALITY, quality);
+    } else {
+        ALOGE("%s: Invalid jpeg thumbnail quality=%d", __func__, quality);
+        rc = BAD_VALUE;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setOrientaion
+ *
+ * DESCRIPTION: set orientaion from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setOrientation(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ORIENTATION);
+
+    if (str != NULL) {
+        if (strcmp(str, portrait) == 0 || strcmp(str, landscape) == 0) {
+            // Camera service needs this to decide if the preview frames and raw
+            // pictures should be rotated.
+            set(KEY_QC_ORIENTATION, str);
+        } else {
+            ALOGE("%s: Invalid orientation value: %s", __func__, str);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_AUTO_EXPOSURE);
+    const char *prev_str = get(KEY_QC_AUTO_EXPOSURE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAutoExposure(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRange
+ *
+ * DESCRIPTION: set preview FPS range from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(const QCameraParameters& params)
+{
+    int minFps,maxFps;
+    int prevMinFps, prevMaxFps;
+    int rc = NO_ERROR;
+    bool found = false;
+
+    CameraParameters::getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+    ALOGV("%s: Existing FpsRange Values:(%d, %d)", __func__, prevMinFps, prevMaxFps);
+    params.getPreviewFpsRange(&minFps, &maxFps);
+    ALOGV("%s: Requested FpsRange Values:(%d, %d)", __func__, minFps, maxFps);
+
+    if(minFps == prevMinFps && maxFps == prevMaxFps) {
+        ALOGV("%s: No change in FpsRange", __func__);
+        rc = NO_ERROR;
+        goto end;
+    }
+    for(int i = 0; i < m_pCapability->fps_ranges_tbl_cnt; i++) {
+        // if the value is in the supported list
+        if(minFps >= m_pCapability->fps_ranges_tbl[i].min_fps * 1000 &&
+           maxFps <= m_pCapability->fps_ranges_tbl[i].max_fps * 1000) {
+            found = true;
+            ALOGV("%s: FPS i=%d : minFps = %d, maxFps = %d ", __func__, i, minFps, maxFps);
+            setPreviewFpsRange(minFps, maxFps);
+            break;
+        }
+    }
+    if(found == false){
+        ALOGE("%s: error: FPS range value not supported", __func__);
+        rc = BAD_VALUE;
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFrameRate
+ *
+ * DESCRIPTION: set preview frame rate from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFrameRate(const QCameraParameters& params)
+{
+    uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+    ALOGV("%s: requested preview frame rate is %d", __func__, fps);
+    CameraParameters::setPreviewFrameRate(fps);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_EFFECT);
+    const char *prev_str = get(KEY_EFFECT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setEffect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_MODE);
+    const char *prev_str = get(KEY_FOCUS_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(const QCameraParameters& params)
+{
+    int currentBrightness = getInt(KEY_QC_BRIGHTNESS);
+    int brightness = params.getInt(KEY_QC_BRIGHTNESS);
+    if (currentBrightness !=  brightness) {
+        if (brightness >= m_pCapability->brightness_ctrl.min_value &&
+            brightness <= m_pCapability->brightness_ctrl.max_value) {
+            ALOGV(" new brightness value : %d ", brightness);
+            return setBrightness(brightness);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, brightness,
+                  m_pCapability->brightness_ctrl.min_value,
+                  m_pCapability->brightness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No brightness value changed.", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(const QCameraParameters& params)
+{
+    int shaprness = params.getInt(KEY_QC_SHARPNESS);
+    int prev_sharp = getInt(KEY_QC_SHARPNESS);
+    if (prev_sharp !=  shaprness) {
+        if((shaprness >= m_pCapability->sharpness_ctrl.min_value) &&
+           (shaprness <= m_pCapability->sharpness_ctrl.max_value)) {
+            ALOGV(" new sharpness value : %d ", shaprness);
+            return setSharpness(shaprness);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, shaprness,
+                  m_pCapability->sharpness_ctrl.min_value,
+                  m_pCapability->sharpness_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No value change in shaprness", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkintoneEnahancement
+ *
+ * DESCRIPTION: set skin tone enhancement factor from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(const QCameraParameters& params)
+{
+    int sceFactor = params.getInt(KEY_QC_SCE_FACTOR);
+    int prev_sceFactor = getInt(KEY_QC_SCE_FACTOR);
+    if (prev_sceFactor !=  sceFactor) {
+        if((sceFactor >= m_pCapability->sce_ctrl.min_value) &&
+           (sceFactor <= m_pCapability->sce_ctrl.max_value)) {
+            ALOGV(" new Skintone Enhancement value : %d ", sceFactor);
+            return setSkinToneEnhancement(sceFactor);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, sceFactor,
+                  m_pCapability->sce_ctrl.min_value,
+                  m_pCapability->sce_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No value change in skintone enhancement factor", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(const QCameraParameters& params)
+{
+    int saturation = params.getInt(KEY_QC_SATURATION);
+    int prev_sat = getInt(KEY_QC_SATURATION);
+    if (prev_sat !=  saturation) {
+        if((saturation >= m_pCapability->saturation_ctrl.min_value) &&
+           (saturation <= m_pCapability->saturation_ctrl.max_value)) {
+            ALOGV(" new saturation value : %d ", saturation);
+            return setSaturation(saturation);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, saturation,
+                  m_pCapability->saturation_ctrl.min_value,
+                  m_pCapability->saturation_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No value change in saturation factor", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(const QCameraParameters& params)
+{
+    int contrast = params.getInt(KEY_QC_CONTRAST);
+    int prev_contrast = getInt(KEY_QC_CONTRAST);
+    if (prev_contrast !=  contrast) {
+        if((contrast >= m_pCapability->contrast_ctrl.min_value) &&
+           (contrast <= m_pCapability->contrast_ctrl.max_value)) {
+            ALOGV(" new contrast value : %d ", contrast);
+            int32_t rc = setContrast(contrast);
+            return rc;
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, contrast,
+                  m_pCapability->contrast_ctrl.min_value,
+                  m_pCapability->contrast_ctrl.max_value);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No value change in contrast", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(const QCameraParameters & params)
+{
+    int expComp = params.getInt(KEY_EXPOSURE_COMPENSATION);
+    int prev_expComp = getInt(KEY_EXPOSURE_COMPENSATION);
+    if (prev_expComp !=  expComp) {
+        if((expComp >= m_pCapability->exposure_compensation_min) &&
+           (expComp <= m_pCapability->exposure_compensation_max)) {
+            ALOGV(" new Exposure Compensation value : %d ", expComp);
+            return setExposureCompensation(expComp);
+        } else {
+            ALOGE("%s: invalid value %d out of (%d, %d)",
+                  __func__, expComp,
+                  m_pCapability->exposure_compensation_min,
+                  m_pCapability->exposure_compensation_max);
+            return BAD_VALUE;
+        }
+    } else {
+        ALOGV("%s: No value change in Exposure Compensation", __func__);
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_WHITE_BALANCE);
+    const char *prev_str = get(KEY_WHITE_BALANCE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWhiteBalance(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_ANTIBANDING);
+    const char *prev_str = get(KEY_ANTIBANDING);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAntibanding(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SCENE_DETECT);
+    const char *prev_str = get(KEY_QC_SCENE_DETECT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSceneDetect(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VIDEO_HDR);
+    const char *prev_str = get(KEY_QC_VIDEO_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setVideoHDR(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_FACE_RECOGNITION);
+    const char *prev_str = get(KEY_QC_FACE_RECOGNITION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            int maxFaces = params.getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+            return setFaceRecognition(str, maxFaces);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(const QCameraParameters& params)
+{
+    if ((m_pCapability->zoom_supported == 0 ||
+         m_pCapability->zoom_ratio_tbl_cnt == 0)) {
+        ALOGD("%s: no zoom support", __func__);
+        return NO_ERROR;
+    }
+
+    int zoomLevel = params.getInt(KEY_ZOOM);
+    if((zoomLevel < 0) ||
+       (zoomLevel >= m_pCapability->zoom_ratio_tbl_cnt)) {
+        ALOGE("%s: invalid value %d out of (%d, %d)",
+              __func__, zoomLevel,
+              0, m_pCapability->zoom_ratio_tbl_cnt-1);
+        return BAD_VALUE;
+    }
+
+    int prevZoomLevel = getInt(KEY_ZOOM);
+    if (prevZoomLevel == zoomLevel) {
+        ALOGV("%s: No value change in contrast", __func__);
+        return NO_ERROR;
+    }
+
+    return setZoom(zoomLevel);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_ISO_MODE);
+    const char *prev_str = get(KEY_QC_ISO_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setISOValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRotation
+ *
+ * DESCRIPTION: set rotation value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRotation(const QCameraParameters& params)
+{
+    int rotation = params.getInt(KEY_ROTATION);
+    if (rotation != -1) {
+        if (rotation == 0 || rotation == 90 ||
+            rotation == 180 || rotation == 270) {
+            set(KEY_ROTATION, rotation);
+        } else {
+            ALOGE("Invalid rotation value: %d", rotation);
+            return BAD_VALUE;
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set flash mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FLASH_MODE);
+    const char *prev_str = get(KEY_FLASH_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFlash(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_EXPOSURE_LOCK);
+    const char *prev_str = get(KEY_AUTO_EXPOSURE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAecLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_AUTO_WHITEBALANCE_LOCK);
+    const char *prev_str = get(KEY_AUTO_WHITEBALANCE_LOCK);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAwbLock(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    const char *prev_str = get(KEY_QC_MEMORY_COLOR_ENHANCEMENT);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setMCEValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: enable/disable DIS from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DIS);
+    const char *prev_str = get(KEY_QC_DIS);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setDISValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHighFrameRate
+ *
+ * DESCRIPTION: set hight frame rate value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHighFrameRate(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    const char *prev_str = get(KEY_QC_VIDEO_HIGH_FRAME_RATE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setHighFrameRate(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_LENSSHADE);
+    const char *prev_str = get(KEY_QC_LENSSHADE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setLensShadeValue(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_FOCUS_AREAS);
+    if (str != NULL) {
+        int max_num_af_areas = getInt(KEY_MAX_NUM_FOCUS_AREAS);
+        if(max_num_af_areas == 0) {
+            ALOGE("%s: max num of AF area is 0, cannot set focus areas", __func__);
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_FOCUS_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setFocusAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_METERING_AREAS);
+    if (str != NULL) {
+        int max_num_mtr_areas = getInt(KEY_MAX_NUM_METERING_AREAS);
+        if(max_num_mtr_areas == 0) {
+            ALOGE("%s: max num of metering areas is 0, cannot set focus areas", __func__);
+            return BAD_VALUE;
+        }
+
+        const char *prev_str = get(KEY_METERING_AREAS);
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setMeteringAreas(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scenen mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_SCENE_MODE);
+    const char *prev_str = get(KEY_SCENE_MODE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            if ((strcmp(str, SCENE_MODE_HDR) == 0) ||
+                ((prev_str != NULL) && (strcmp(prev_str, SCENE_MODE_HDR) == 0))) {
+                ALOGD("%s: scene mode changed between HDR and non-HDR, need restart", __func__);
+                m_bNeedRestart = true;
+
+                // set if hdr 1x image is needed
+                const char *need_hdr_1x = params.get(KEY_QC_HDR_NEED_1X);
+                int32_t value = 0;
+                if (need_hdr_1x != NULL && strcmp(need_hdr_1x, VALUE_TRUE) == 0) {
+                    value = 1;
+                    updateParamEntry(KEY_QC_HDR_NEED_1X, need_hdr_1x);
+                    AddSetParmEntryToBatch(m_pParamBuf,
+                                           CAM_INTF_PARM_HDR_NEED_1X,
+                                           sizeof(value),
+                                           &value);
+                }
+            }
+            return setSceneMode(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone auto focus value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_SELECTABLE_ZONE_AF);
+    const char *prev_str = get(KEY_QC_SELECTABLE_ZONE_AF);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setSelectableZoneAf(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const QCameraParameters& params)
+{
+    const char *scene_mode = params.get(KEY_SCENE_MODE);
+    if (scene_mode != NULL && strcmp(scene_mode, SCENE_MODE_HDR) == 0) {
+        ALOGE("%s: scene mode is HDR, overwrite AE bracket setting to off", __func__);
+        return setAEBracket(AE_BRACKET_OFF);
+    }
+
+    const char *expStr = params.get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+    if (NULL != expStr && strlen(expStr) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, expStr);
+    } else {
+        char prop[PROPERTY_VALUE_MAX];
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.capture.burst.exposures", prop, "");
+        if (strlen(prop) > 0) {
+            set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+        } else {
+            remove(KEY_QC_CAPTURE_BURST_EXPOSURE);
+        }
+    }
+
+    const char *str = params.get(KEY_QC_AE_BRACKET_HDR);
+    const char *prev_str = get(KEY_QC_AE_BRACKET_HDR);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setAEBracket(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction setting from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_REDEYE_REDUCTION);
+    const char *prev_str = get(KEY_QC_REDEYE_REDUCTION);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setRedeyeReduction(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setGpsLocation
+ *
+ * DESCRIPTION: set GPS location information from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setGpsLocation(const QCameraParameters& params)
+{
+    const char *method = params.get(KEY_GPS_PROCESSING_METHOD);
+    if (method) {
+        set(KEY_GPS_PROCESSING_METHOD, method);
+    }else {
+        remove(KEY_GPS_PROCESSING_METHOD);
+    }
+
+    const char *latitude = params.get(KEY_GPS_LATITUDE);
+    if (latitude) {
+        set(KEY_GPS_LATITUDE, latitude);
+    }else {
+        remove(KEY_GPS_LATITUDE);
+    }
+
+    const char *latitudeRef = params.get(KEY_QC_GPS_LATITUDE_REF);
+    if (latitudeRef) {
+        set(KEY_QC_GPS_LATITUDE_REF, latitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LATITUDE_REF);
+    }
+
+    const char *longitude = params.get(KEY_GPS_LONGITUDE);
+    if (longitude) {
+        set(KEY_GPS_LONGITUDE, longitude);
+    }else {
+        remove(KEY_GPS_LONGITUDE);
+    }
+
+    const char *longitudeRef = params.get(KEY_QC_GPS_LONGITUDE_REF);
+    if (longitudeRef) {
+        set(KEY_QC_GPS_LONGITUDE_REF, longitudeRef);
+    }else {
+        remove(KEY_QC_GPS_LONGITUDE_REF);
+    }
+
+    const char *altitudeRef = params.get(KEY_QC_GPS_ALTITUDE_REF);
+    if (altitudeRef) {
+        set(KEY_QC_GPS_ALTITUDE_REF, altitudeRef);
+    }else {
+        remove(KEY_QC_GPS_ALTITUDE_REF);
+    }
+
+    const char *altitude = params.get(KEY_GPS_ALTITUDE);
+    if (altitude) {
+        set(KEY_GPS_ALTITUDE, altitude);
+    }else {
+        remove(KEY_GPS_ALTITUDE);
+    }
+
+    const char *status = params.get(KEY_QC_GPS_STATUS);
+    if (status) {
+        set(KEY_QC_GPS_STATUS, status);
+    } else {
+        remove(KEY_QC_GPS_STATUS);
+    }
+
+    const char *timestamp = params.get(KEY_GPS_TIMESTAMP);
+    if (timestamp) {
+        set(KEY_GPS_TIMESTAMP, timestamp);
+    }else {
+        remove(KEY_GPS_TIMESTAMP);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNumOfSnapshot
+ *
+ * DESCRIPTION: set number of snapshot per shutter from user setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNumOfSnapshot()
+{
+    int nBurstNum = getBurstNum();
+    uint8_t nExpnum = 0;
+
+    const char *scene_mode = get(KEY_SCENE_MODE);
+    if (scene_mode != NULL && strcmp(scene_mode, SCENE_MODE_HDR) == 0) {
+        /* According to Android SDK, only one snapshot,
+         * but OEM might have different requirement */
+        const char *need_hdr_1x = get(KEY_QC_HDR_NEED_1X);
+        if (need_hdr_1x != NULL && strcmp(need_hdr_1x, VALUE_TRUE) == 0) {
+            nExpnum = 2; // HDR needs both 1X and processed img
+        } else {
+            nExpnum = 1; // HDR only needs processed img
+        }
+    } else {
+        const char *bracket_str = get(KEY_QC_AE_BRACKET_HDR);
+        if (bracket_str != NULL && strlen(bracket_str) > 0) {
+            int value = lookupAttr(BRACKETING_MODES_MAP,
+                                   sizeof(BRACKETING_MODES_MAP)/sizeof(QCameraMap),
+                                   bracket_str);
+            switch (value) {
+            case CAM_EXP_BRACKETING_ON:
+                {
+                    nExpnum = 0;
+                    const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+                    if ((str_val != NULL) && (strlen(str_val) > 0)) {
+                        char prop[PROPERTY_VALUE_MAX];
+                        memset(prop, 0, sizeof(prop));
+                        strcpy(prop, str_val);
+                        char *saveptr = NULL;
+                        char *token = strtok_r(prop, ",", &saveptr);
+                        while (token != NULL) {
+                            token = strtok_r(NULL, ",", &saveptr);
+                            nExpnum++;
+                        }
+                    }
+                    if (nExpnum == 0) {
+                        nExpnum = 1;
+                    }
+                }
+                break;
+            default:
+                nExpnum = 1;
+                break;
+            }
+        }
+    }
+
+    ALOGD("%s: nBurstNum = %d, nExpnum = %d", __func__, nBurstNum, nExpnum);
+    set(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER, nBurstNum * nExpnum);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHint
+ *
+ * DESCRIPTION: set recording hint value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRecordingHint(const QCameraParameters& params)
+{
+    const char * str = params.get(KEY_RECORDING_HINT);
+    const char *prev_str = get(KEY_RECORDING_HINT);
+    if (str != NULL) {
+        if (prev_str == NULL || strcmp(str, prev_str) != 0) {
+            int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP,
+                                       sizeof(TRUE_FALSE_MODES_MAP)/sizeof(QCameraMap),
+                                       str);
+            if(value != NAME_NOT_FOUND){
+                updateParamEntry(KEY_RECORDING_HINT, str);
+                setRecordingHintValue(value);
+                return NO_ERROR;
+            } else {
+                ALOGE("Invalid recording hint value: %s", str);
+                return BAD_VALUE;
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setNoDisplayMode
+ *
+ * DESCRIPTION: set no display mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setNoDisplayMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_NO_DISPLAY_MODE);
+    const char *prev_str = get(KEY_QC_NO_DISPLAY_MODE);
+    if(str_val && strlen(str_val) > 0) {
+        if (prev_str == NULL || strcmp(str_val, prev_str) != 0) {
+            m_bNoDisplayMode = atoi(str_val);
+            set(KEY_QC_NO_DISPLAY_MODE, str_val);
+            m_bNeedRestart = true;
+        }
+    } else {
+        m_bNoDisplayMode = false;
+    }
+    ALOGD("Param m_bNoDisplayMode = %d", m_bNoDisplayMode);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslMode
+ *
+ * DESCRIPTION: set ZSL mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslMode(const QCameraParameters& params)
+{
+    const char *str_val  = params.get(KEY_QC_ZSL);
+    const char *prev_val  = get(KEY_QC_ZSL);
+
+    if (str_val != NULL) {
+        if (prev_val == NULL || strcmp(str_val, prev_val) != 0) {
+            int32_t value = lookupAttr(ON_OFF_MODES_MAP,
+                                       sizeof(ON_OFF_MODES_MAP)/sizeof(QCameraMap),
+                                       str_val);
+            if (value != NAME_NOT_FOUND) {
+                set(KEY_QC_ZSL, str_val);
+                m_bZslMode_new = (value > 0)? true : false;
+
+                // ZSL mode changed, need restart preview
+                m_bNeedRestart = true;
+
+                return AddSetParmEntryToBatch(m_pParamBuf,
+                                              CAM_INTF_PARM_ZSL_MODE,
+                                              sizeof(value),
+                                              &value);
+            } else {
+                ALOGE("Invalid ZSL mode value: %s", str_val);
+                return BAD_VALUE;
+            }
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_DENOISE);
+    const char *prev_str = get(KEY_QC_DENOISE);
+    if (str != NULL) {
+        if (prev_str == NULL ||
+            strcmp(str, prev_str) != 0) {
+            return setWaveletDenoise(str);
+        }
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCameraMode
+ *
+ * DESCRIPTION: set camera mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setCameraMode(const QCameraParameters& params)
+{
+    const char *str = params.get(KEY_QC_CAMERA_MODE);
+    if (str != NULL) {
+        set(KEY_QC_CAMERA_MODE, str);
+    } else {
+        remove(KEY_QC_CAMERA_MODE);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZslAttributes
+ *
+ * DESCRIPTION: set ZSL related attributes from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZslAttributes(const QCameraParameters& params)
+{
+    // TODO: may switch to pure param instead of sysprop
+    char prop[PROPERTY_VALUE_MAX];
+
+    const char *str = params.get(KEY_QC_ZSL_BURST_INTERVAL);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_INTERVAL, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.interval", prop, "1");
+        set(KEY_QC_ZSL_BURST_INTERVAL, prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_BURST_LOOKBACK, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.backlookcnt", prop, "2");
+        set(KEY_QC_ZSL_BURST_LOOKBACK, prop);
+    }
+
+    str = params.get(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (str != NULL) {
+        set(KEY_QC_ZSL_QUEUE_DEPTH, str);
+    } else {
+        memset(prop, 0, sizeof(prop));
+        property_get("persist.camera.zsl.queuedepth", prop, "2");
+        set(KEY_QC_ZSL_QUEUE_DEPTH, prop);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlip
+ *
+ * DESCRIPTION: set preview/ video/ picture flip mode from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlip(const QCameraParameters& params)
+{
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) == 0) {
+        ALOGD("%s: flip is not supported.", __func__);
+        return NO_ERROR;
+    }
+
+    //check preview flip setting
+    const char *str = params.get(KEY_QC_PREVIEW_FLIP);
+    if(str != NULL){
+        int32_t value = lookupAttr(FLIP_MODES_MAP,
+                                   sizeof(FLIP_MODES_MAP)/sizeof(QCameraMap),
+                                   str);
+        if(value != NAME_NOT_FOUND){
+            set(KEY_QC_PREVIEW_FLIP, str);
+        }
+    }
+
+    // check video filp setting
+    str = params.get(KEY_QC_VIDEO_FLIP);
+    if(str != NULL){
+        int32_t value = lookupAttr(FLIP_MODES_MAP,
+                                   sizeof(FLIP_MODES_MAP)/sizeof(QCameraMap),
+                                   str);
+        if(value != NAME_NOT_FOUND){
+            set(KEY_QC_VIDEO_FLIP, str);
+        }
+    }
+
+    // check picture filp setting
+    str = params.get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+    if(str != NULL){
+        int32_t value = lookupAttr(FLIP_MODES_MAP,
+                                   sizeof(FLIP_MODES_MAP)/sizeof(QCameraMap),
+                                   str);
+        if(value != NAME_NOT_FOUND){
+            set(KEY_QC_SNAPSHOT_PICTURE_FLIP, str);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParameters
+ *
+ * DESCRIPTION: update parameters from user setting
+ *
+ * PARAMETERS :
+ *   @params  : user setting parameters
+ *   @needRestart : [output] if preview need restart upon setting changes
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParameters(QCameraParameters& params,
+                                            bool &needRestart)
+{
+    int32_t final_rc = NO_ERROR;
+    int32_t rc;
+    m_bNeedRestart = false;
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table",__func__);
+        rc = BAD_TYPE;
+        goto UPDATE_PARAM_DONE;
+    }
+
+    if ((rc = setPreviewSize(params)))                  final_rc = rc;
+    if ((rc = setVideoSize(params)))                    final_rc = rc;
+    if ((rc = setPictureSize(params)))                  final_rc = rc;
+    if ((rc = setPreviewFormat(params)))                final_rc = rc;
+    if ((rc = setPictureFormat(params)))                final_rc = rc;
+    if ((rc = setJpegThumbnailSize(params)))            final_rc = rc;
+    if ((rc = setJpegQuality(params)))                  final_rc = rc;
+    if ((rc = setOrientation(params)))                  final_rc = rc;
+    if ((rc = setRotation(params)))                     final_rc = rc;
+    if ((rc = setNoDisplayMode(params)))                final_rc = rc;
+    if ((rc = setZslMode(params)))                      final_rc = rc;
+    if ((rc = setZslAttributes(params)))                final_rc = rc;
+    if ((rc = setCameraMode(params)))                   final_rc = rc;
+    if ((rc = setRecordingHint(params)))                final_rc = rc;
+
+    if ((rc = setPreviewFpsRange(params)))              final_rc = rc;
+    if ((rc = setPreviewFrameRate(params)))             final_rc = rc;
+    if ((rc = setAutoExposure(params)))                 final_rc = rc;
+    if ((rc = setEffect(params)))                       final_rc = rc;
+    if ((rc = setBrightness(params)))                   final_rc = rc;
+    if ((rc = setZoom(params)))                         final_rc = rc;
+    if ((rc = setSharpness(params)))                    final_rc = rc;
+    if ((rc = setSaturation(params)))                   final_rc = rc;
+    if ((rc = setContrast(params)))                     final_rc = rc;
+    if ((rc = setFocusMode(params)))                    final_rc = rc;
+    if ((rc = setISOValue(params)))                     final_rc = rc;
+    if ((rc = setSkinToneEnhancement(params)))          final_rc = rc;
+    if ((rc = setFlash(params)))                        final_rc = rc;
+    if ((rc = setAecLock(params)))                      final_rc = rc;
+    if ((rc = setAwbLock(params)))                      final_rc = rc;
+    if ((rc = setLensShadeValue(params)))               final_rc = rc;
+    if ((rc = setMCEValue(params)))                     final_rc = rc;
+    if ((rc = setDISValue(params)))                     final_rc = rc;
+    if ((rc = setHighFrameRate(params)))                final_rc = rc;
+    if ((rc = setAntibanding(params)))                  final_rc = rc;
+    if ((rc = setExposureCompensation(params)))         final_rc = rc;
+    if ((rc = setWhiteBalance(params)))                 final_rc = rc;
+    if ((rc = setSceneMode(params)))                    final_rc = rc;
+    if ((rc = setFocusAreas(params)))                   final_rc = rc;
+    if ((rc = setMeteringAreas(params)))                final_rc = rc;
+    if ((rc = setSelectableZoneAf(params)))             final_rc = rc;
+    if ((rc = setRedeyeReduction(params)))              final_rc = rc;
+    if ((rc = setAEBracket(params)))                    final_rc = rc;
+    if ((rc = setGpsLocation(params)))                  final_rc = rc;
+    if ((rc = setWaveletDenoise(params)))               final_rc = rc;
+    if ((rc = setFaceRecognition(params)))              final_rc = rc;
+    if ((rc = setFlip(params)))                         final_rc = rc;
+    if ((rc = setVideoHDR(params)))                     final_rc = rc;
+
+    // update live snapshot size after all other parameters are set
+    if ((rc = setLiveSnapshotSize(params)))             final_rc = rc;
+
+UPDATE_PARAM_DONE:
+    needRestart = m_bNeedRestart;
+    return final_rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParameters
+ *
+ * DESCRIPTION: commit parameter changes to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParameters()
+{
+    return commitSetBatch();
+}
+
+/*===========================================================================
+ * FUNCTION   : initDefaultParameters
+ *
+ * DESCRIPTION: initialize default parameters for the first time
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initDefaultParameters()
+{
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    /*************************Initialize Values******************************/
+    // Set read only parameters from camera capability
+    set(KEY_SMOOTH_ZOOM_SUPPORTED,
+        m_pCapability->smooth_zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_ZOOM_SUPPORTED,
+        m_pCapability->zoom_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_SNAPSHOT_SUPPORTED,
+        m_pCapability->video_snapshot_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_VIDEO_STABILIZATION_SUPPORTED,
+        m_pCapability->video_stablization_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_EXPOSURE_LOCK_SUPPORTED,
+        m_pCapability->auto_exposure_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED,
+        m_pCapability->auto_wb_lock_supported? VALUE_TRUE : VALUE_FALSE);
+    set(KEY_QC_SUPPORTED_CAMERA_FEATURES,
+        m_pCapability->qcom_supported_feature_mask);
+    set(KEY_MAX_NUM_DETECTED_FACES_HW, m_pCapability->max_num_roi);
+    set(KEY_MAX_NUM_DETECTED_FACES_SW, m_pCapability->max_num_roi);
+    set(KEY_QC_MAX_NUM_REQUESTED_FACES, m_pCapability->max_num_roi);
+    // Set focal length, horizontal view angle, and vertical view angle
+    setFloat(KEY_FOCAL_LENGTH, m_pCapability->focal_length);
+    setFloat(KEY_HORIZONTAL_VIEW_ANGLE, m_pCapability->hor_view_angle);
+    setFloat(KEY_VERTICAL_VIEW_ANGLE, m_pCapability->ver_view_angle);
+
+    // Set supported preview sizes
+    if (m_pCapability->preview_sizes_tbl_cnt > 0 &&
+        m_pCapability->preview_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 previewSizeValues = createSizesString(
+                m_pCapability->preview_sizes_tbl, m_pCapability->preview_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PREVIEW_SIZES, previewSizeValues.string());
+        ALOGD("%s: supported preview sizes: %s", __func__, previewSizeValues.string());
+        // Set default preview size
+        CameraParameters::setPreviewSize(m_pCapability->preview_sizes_tbl[0].width,
+                                         m_pCapability->preview_sizes_tbl[0].height);
+    } else {
+        ALOGE("%s: supported preview sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported video sizes
+    if (m_pCapability->video_sizes_tbl_cnt > 0 &&
+        m_pCapability->video_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 videoSizeValues = createSizesString(
+                m_pCapability->video_sizes_tbl, m_pCapability->video_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_VIDEO_SIZES, videoSizeValues.string());
+        ALOGD("%s: supported video sizes: %s", __func__, videoSizeValues.string());
+        // Set default video size
+        CameraParameters::setVideoSize(m_pCapability->video_sizes_tbl[0].width,
+                                       m_pCapability->video_sizes_tbl[0].height);
+
+        //Set preferred Preview size for video
+        String8 vSize = createSizesString(&m_pCapability->video_sizes_tbl[0], 1);
+        set(KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, vSize.string());
+    } else {
+        ALOGE("%s: supported video sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported picture sizes
+    if (m_pCapability->picture_sizes_tbl_cnt > 0 &&
+        m_pCapability->picture_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 pictureSizeValues = createSizesString(
+                m_pCapability->picture_sizes_tbl, m_pCapability->picture_sizes_tbl_cnt);
+        set(KEY_SUPPORTED_PICTURE_SIZES, pictureSizeValues.string());
+        ALOGD("%s: supported pic sizes: %s", __func__, pictureSizeValues.string());
+        // Set default picture size to the smallest resolution
+        CameraParameters::setPictureSize(
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].width,
+           m_pCapability->picture_sizes_tbl[m_pCapability->picture_sizes_tbl_cnt-1].height);
+    } else {
+        ALOGE("%s: supported picture sizes cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported thumbnail sizes
+    String8 thumbnailSizeValues = createSizesString(
+            THUMBNAIL_SIZES_MAP,
+            sizeof(THUMBNAIL_SIZES_MAP)/sizeof(cam_dimension_t));
+    set(KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, thumbnailSizeValues.string());
+    // Set default thumnail size
+    set(KEY_JPEG_THUMBNAIL_WIDTH, THUMBNAIL_SIZES_MAP[0].width);
+    set(KEY_JPEG_THUMBNAIL_HEIGHT, THUMBNAIL_SIZES_MAP[0].height);
+
+    // Set supported livesnapshot sizes
+    if (m_pCapability->livesnapshot_sizes_tbl_cnt > 0 &&
+        m_pCapability->livesnapshot_sizes_tbl_cnt <= MAX_SIZES_CNT) {
+        String8 liveSnpashotSizeValues = createSizesString(
+                m_pCapability->livesnapshot_sizes_tbl,
+                m_pCapability->livesnapshot_sizes_tbl_cnt);
+        set(KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES, liveSnpashotSizeValues.string());
+        ALOGI("%s: supported live snapshot sizes: %s", __func__, liveSnpashotSizeValues.string());
+        m_LiveSnapshotSize =
+            m_pCapability->livesnapshot_sizes_tbl[m_pCapability->livesnapshot_sizes_tbl_cnt-1];
+    }
+
+    // Set supported preview formats
+    String8 previewFormatValues = createValuesString(
+            (int *)m_pCapability->supported_preview_fmts,
+            m_pCapability->supported_preview_fmt_cnt,
+            PREVIEW_FORMATS_MAP,
+            sizeof(PREVIEW_FORMATS_MAP)/sizeof(QCameraMap));
+    set(KEY_SUPPORTED_PREVIEW_FORMATS, previewFormatValues.string());
+    // Set default preview format
+    CameraParameters::setPreviewFormat(PIXEL_FORMAT_YUV420SP);
+
+    // Set default Video Format
+    set(KEY_VIDEO_FRAME_FORMAT, PIXEL_FORMAT_YUV420SP);
+
+    // Set supported picture formats
+    String8 pictureTypeValues(PIXEL_FORMAT_JPEG);
+    String8 str = createValuesString(
+            (int *)m_pCapability->supported_raw_fmts,
+            m_pCapability->supported_raw_fmt_cnt,
+            PICTURE_TYPES_MAP,
+            sizeof(PICTURE_TYPES_MAP)/sizeof(QCameraMap));
+    if (str.string() != NULL) {
+        pictureTypeValues.append(",");
+        pictureTypeValues.append(str);
+    }
+
+    set(KEY_SUPPORTED_PICTURE_FORMATS, pictureTypeValues.string());
+    // Set default picture Format
+    CameraParameters::setPictureFormat(PIXEL_FORMAT_JPEG);
+    // Set raw image size
+    char raw_size_str[32];
+    snprintf(raw_size_str, sizeof(raw_size_str), "%dx%d",
+             m_pCapability->raw_dim[0].width, m_pCapability->raw_dim[0].height);
+    set(KEY_QC_RAW_PICUTRE_SIZE, raw_size_str);
+
+    //set default jpeg quality and thumbnail quality
+    set(KEY_JPEG_QUALITY, 85);
+    set(KEY_JPEG_THUMBNAIL_QUALITY, 85);
+
+    // Set FPS ranges
+    if (m_pCapability->fps_ranges_tbl_cnt > 0 &&
+        m_pCapability->fps_ranges_tbl_cnt <= MAX_SIZES_CNT) {
+        int default_fps_index = 0;
+        String8 fpsRangeValues = createFpsRangeString(m_pCapability->fps_ranges_tbl,
+                                                      m_pCapability->fps_ranges_tbl_cnt,
+                                                      default_fps_index);
+        set(KEY_SUPPORTED_PREVIEW_FPS_RANGE, fpsRangeValues.string());
+
+        int min_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].min_fps * 1000);
+        int max_fps =
+            int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps * 1000);
+        setPreviewFpsRange(min_fps, max_fps);
+
+        // Set legacy preview fps
+        String8 fpsValues = createFpsString(m_pCapability->fps_ranges_tbl,
+                                            m_pCapability->fps_ranges_tbl_cnt);
+        set(KEY_SUPPORTED_PREVIEW_FRAME_RATES, fpsValues.string());
+        CameraParameters::setPreviewFrameRate(int(m_pCapability->fps_ranges_tbl[default_fps_index].max_fps));
+    } else {
+        ALOGE("%s: supported fps ranges cnt is 0 or exceeds max!!!", __func__);
+    }
+
+    // Set supported focus modes
+    if (m_pCapability->supported_focus_modes_cnt > 0) {
+        String8 focusModeValues = createValuesString(
+                (int *)m_pCapability->supported_focus_modes,
+                m_pCapability->supported_focus_modes_cnt,
+                FOCUS_MODES_MAP,
+                sizeof(FOCUS_MODES_MAP)/sizeof(QCameraMap));
+        set(KEY_SUPPORTED_FOCUS_MODES, focusModeValues);
+
+        // Set default focus mode and update corresponding parameter buf
+        const char *focusMode = lookupNameByValue(FOCUS_MODES_MAP,
+                                             sizeof(FOCUS_MODES_MAP)/sizeof(QCameraMap),
+                                             m_pCapability->supported_focus_modes[0]);
+        if (focusMode != NULL) {
+            setFocusMode(focusMode);
+        } else {
+            setFocusMode(FOCUS_MODE_FIXED);
+        }
+    } else {
+        ALOGE("%s: supported focus modes cnt is 0!!!", __func__);
+    }
+
+    // Set focus areas
+    if (m_pCapability->max_num_focus_areas > MAX_ROI) {
+        m_pCapability->max_num_focus_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_FOCUS_AREAS, m_pCapability->max_num_focus_areas);
+    if (m_pCapability->max_num_focus_areas > 0) {
+        setFocusAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // Set metering areas
+    if (m_pCapability->max_num_metering_areas > MAX_ROI) {
+        m_pCapability->max_num_metering_areas = MAX_ROI;
+    }
+    set(KEY_MAX_NUM_METERING_AREAS, m_pCapability->max_num_metering_areas);
+    if (m_pCapability->max_num_metering_areas > 0) {
+        setMeteringAreas(DEFAULT_CAMERA_AREA);
+    }
+
+    // Set Saturation
+    set(KEY_QC_MIN_SATURATION, m_pCapability->saturation_ctrl.min_value);
+    set(KEY_QC_MAX_SATURATION, m_pCapability->saturation_ctrl.max_value);
+    set(KEY_QC_SATURATION_STEP, m_pCapability->saturation_ctrl.step);
+    setSaturation(m_pCapability->saturation_ctrl.def_value);
+
+    // Set Sharpness
+    set(KEY_QC_MIN_SHARPNESS, m_pCapability->sharpness_ctrl.min_value);
+    set(KEY_QC_MAX_SHARPNESS, m_pCapability->sharpness_ctrl.max_value);
+    set(KEY_QC_SHARPNESS_STEP, m_pCapability->sharpness_ctrl.step);
+    setSharpness(m_pCapability->sharpness_ctrl.def_value);
+
+    // Set Contrast
+    set(KEY_QC_MIN_CONTRAST, m_pCapability->contrast_ctrl.min_value);
+    set(KEY_QC_MAX_CONTRAST, m_pCapability->contrast_ctrl.max_value);
+    set(KEY_QC_CONTRAST_STEP, m_pCapability->contrast_ctrl.step);
+    setContrast(m_pCapability->contrast_ctrl.def_value);
+
+    // Set SCE factor
+    set(KEY_QC_MIN_SCE_FACTOR, m_pCapability->sce_ctrl.min_value); // -100
+    set(KEY_QC_MAX_SCE_FACTOR, m_pCapability->sce_ctrl.max_value); // 100
+    set(KEY_QC_SCE_FACTOR_STEP, m_pCapability->sce_ctrl.step);     // 10
+    setSkinToneEnhancement(m_pCapability->sce_ctrl.def_value);     // 0
+
+    // Set Brightness
+    set(KEY_QC_MIN_BRIGHTNESS, m_pCapability->brightness_ctrl.min_value); // 0
+    set(KEY_QC_MAX_BRIGHTNESS, m_pCapability->brightness_ctrl.max_value); // 6
+    set(KEY_QC_BRIGHTNESS_STEP, m_pCapability->brightness_ctrl.step);     // 1
+    setBrightness(m_pCapability->brightness_ctrl.def_value);
+
+    // Set Auto exposure
+    String8 autoExposureValues = createValuesString(
+            (int *)m_pCapability->supported_aec_modes,
+            m_pCapability->supported_aec_modes_cnt,
+            AUTO_EXPOSURE_MAP,
+            sizeof(AUTO_EXPOSURE_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_AUTO_EXPOSURE, autoExposureValues.string());
+    setAutoExposure(AUTO_EXPOSURE_FRAME_AVG);
+
+    // Set Exposure Compensation
+    set(KEY_MAX_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_max); // 12
+    set(KEY_MIN_EXPOSURE_COMPENSATION, m_pCapability->exposure_compensation_min); // -12
+    setFloat(KEY_EXPOSURE_COMPENSATION_STEP, m_pCapability->exposure_compensation_step); // 1/6
+    setExposureCompensation(m_pCapability->exposure_compensation_default); // 0
+
+    // Set Antibanding
+    String8 antibandingValues = createValuesString(
+            (int *)m_pCapability->supported_antibandings,
+            m_pCapability->supported_antibandings_cnt,
+            ANTIBANDING_MODES_MAP,
+            sizeof(ANTIBANDING_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_SUPPORTED_ANTIBANDING, antibandingValues);
+    setAntibanding(ANTIBANDING_OFF);
+
+    // Set Effect
+    String8 effectValues = createValuesString(
+            (int *)m_pCapability->supported_effects,
+            m_pCapability->supported_effects_cnt,
+            EFFECT_MODES_MAP,
+            sizeof(EFFECT_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_SUPPORTED_EFFECTS, effectValues);
+    setEffect(EFFECT_NONE);
+
+    // Set WhiteBalance
+    String8 whitebalanceValues = createValuesString(
+            (int *)m_pCapability->supported_white_balances,
+            m_pCapability->supported_white_balances_cnt,
+            WHITE_BALANCE_MODES_MAP,
+            sizeof(WHITE_BALANCE_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_SUPPORTED_WHITE_BALANCE, whitebalanceValues);
+    setWhiteBalance(WHITE_BALANCE_AUTO);
+
+    // Set Flash mode
+    String8 flashValues = createValuesString(
+            (int *)m_pCapability->supported_flash_modes,
+            m_pCapability->supported_flash_modes_cnt,
+            FLASH_MODES_MAP,
+            sizeof(FLASH_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_SUPPORTED_FLASH_MODES, flashValues);
+    setFlash(FLASH_MODE_OFF);
+
+    // Set Scene Mode
+    String8 sceneModeValues = createValuesString(
+            (int *)m_pCapability->supported_scene_modes,
+            m_pCapability->supported_scene_modes_cnt,
+            SCENE_MODES_MAP,
+            sizeof(SCENE_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_SUPPORTED_SCENE_MODES, sceneModeValues);
+    setSceneMode(SCENE_MODE_AUTO);
+
+    // Set ISO Mode
+    String8 isoValues = createValuesString(
+            (int *)m_pCapability->supported_iso_modes,
+            m_pCapability->supported_iso_modes_cnt,
+            ISO_MODES_MAP,
+            sizeof(ISO_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_ISO_MODES, isoValues);
+    setISOValue(ISO_AUTO);
+
+    // Set HFR
+    String8 hfrValues = createHfrValuesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt,
+            HFR_MODES_MAP,
+            sizeof(HFR_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES, hfrValues.string());
+    String8 hfrSizeValues = createHfrSizesString(
+            m_pCapability->hfr_tbl,
+            m_pCapability->hfr_tbl_cnt);
+    set(KEY_QC_SUPPORTED_HFR_SIZES, hfrSizeValues.string());
+    setHighFrameRate(VIDEO_HFR_OFF);
+
+    // Set Focus algorithms
+    String8 focusAlgoValues = createValuesString(
+            (int *)m_pCapability->supported_focus_algos,
+            m_pCapability->supported_focus_algos_cnt,
+            FOCUS_ALGO_MAP,
+            sizeof(FOCUS_ALGO_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_FOCUS_ALGOS, focusAlgoValues);
+    setSelectableZoneAf(FOCUS_ALGO_AUTO);
+
+    // Set Zoom Ratios
+    if (m_pCapability->zoom_supported > 0) {
+        String8 zoomRatioValues = createZoomRatioValuesString(
+                m_pCapability->zoom_ratio_tbl,
+                m_pCapability->zoom_ratio_tbl_cnt);
+        set(KEY_ZOOM_RATIOS, zoomRatioValues);
+        set(KEY_MAX_ZOOM, m_pCapability->zoom_ratio_tbl_cnt - 1);
+        setZoom(0);
+    }
+
+    // Set Bracketing/HDR
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.capture.burst.exposures", prop, "");
+    if (strlen(prop) > 0) {
+        set(KEY_QC_CAPTURE_BURST_EXPOSURE, prop);
+    }
+    String8 bracketingValues = createValuesStringFromMap(
+            BRACKETING_MODES_MAP,
+            sizeof(BRACKETING_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_AE_BRACKET_MODES, bracketingValues);
+    setAEBracket(AE_BRACKET_OFF);
+
+    // Set Denoise
+    String8 denoiseValues = createValuesStringFromMap(
+       DENOISE_ON_OFF_MODES_MAP, sizeof(DENOISE_ON_OFF_MODES_MAP) / sizeof(QCameraMap));
+    set(KEY_QC_SUPPORTED_DENOISE, denoiseValues.string());
+#ifdef DEFAULT_DENOISE_MODE_ON
+    setWaveletDenoise(DENOISE_ON);
+#else
+    setWaveletDenoise(DENOISE_OFF);
+#endif
+
+    // Set feature enable/disable
+    String8 enableDisableValues = createValuesStringFromMap(
+        ENABLE_DISABLE_MODES_MAP, sizeof(ENABLE_DISABLE_MODES_MAP) / sizeof(QCameraMap));
+
+    // Set Lens Shading
+    set(KEY_QC_SUPPORTED_LENSSHADE_MODES, enableDisableValues);
+    setLensShadeValue(VALUE_ENABLE);
+
+    // Set MCE
+    set(KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES, enableDisableValues);
+    setMCEValue(VALUE_ENABLE);
+
+    // Set DIS
+    set(KEY_QC_SUPPORTED_DIS_MODES, enableDisableValues);
+    setDISValue(VALUE_DISABLE);
+
+    // Set Histogram
+    set(KEY_QC_SUPPORTED_HISTOGRAM_MODES, enableDisableValues);
+    set(KEY_QC_HISTOGRAM, VALUE_DISABLE);
+
+    //Set Red Eye Reduction
+    set(KEY_QC_SUPPORTED_REDEYE_REDUCTION, enableDisableValues);
+    setRedeyeReduction(VALUE_DISABLE);
+
+    //Set SkinTone Enhancement
+    set(KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES, enableDisableValues);
+
+    // Set feature on/off
+    String8 onOffValues = createValuesStringFromMap(
+        ON_OFF_MODES_MAP, sizeof(ON_OFF_MODES_MAP) / sizeof(QCameraMap));
+
+    //Set Scene Detection
+    set(KEY_QC_SUPPORTED_SCENE_DETECT, onOffValues);
+    setSceneDetect(VALUE_OFF);
+
+    //Set Face Detection
+    set(KEY_QC_SUPPORTED_FACE_DETECTION, onOffValues);
+    set(KEY_QC_FACE_DETECTION, VALUE_OFF);
+
+    //Set Face Recognition
+    set(KEY_QC_SUPPORTED_FACE_RECOGNITION, onOffValues);
+    set(KEY_QC_FACE_RECOGNITION, VALUE_OFF);
+
+    //Set ZSL
+    set(KEY_QC_SUPPORTED_ZSL_MODES, onOffValues);
+#ifdef DEFAULT_ZSL_MODE_ON
+    set(KEY_QC_ZSL, VALUE_ON);
+    m_bZslMode = true;
+#else
+    set(KEY_QC_ZSL, VALUE_OFF);
+    m_bZslMode = false;
+#endif
+    m_bZslMode_new = m_bZslMode;
+
+    //Set video HDR
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_VIDEO_HDR) > 0) {
+        set(KEY_QC_SUPPORTED_VIDEO_HDR_MODES, onOffValues);
+        set(KEY_QC_VIDEO_HDR, VALUE_OFF);
+    }
+
+    //Set Touch AF/AEC
+    String8 touchValues = createValuesStringFromMap(
+       TOUCH_AF_AEC_MODES_MAP, sizeof(TOUCH_AF_AEC_MODES_MAP) / sizeof(QCameraMap));
+
+    set(KEY_QC_SUPPORTED_TOUCH_AF_AEC, touchValues);
+    set(KEY_QC_TOUCH_AF_AEC, TOUCH_AF_AEC_OFF);
+
+    //set flip mode
+    if ((m_pCapability->qcom_supported_feature_mask & CAM_QCOM_FEATURE_FLIP) > 0) {
+        String8 flipModes = createValuesStringFromMap(
+           FLIP_MODES_MAP, sizeof(FLIP_MODES_MAP) / sizeof(QCameraMap));
+        set(KEY_QC_SUPPORTED_FLIP_MODES, flipModes);
+        set(KEY_QC_PREVIEW_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_VIDEO_FLIP, FLIP_MODE_OFF);
+        set(KEY_QC_SNAPSHOT_PICTURE_FLIP, FLIP_MODE_OFF);
+    }
+
+    // Set default Auto Exposure lock value
+    setAecLock(VALUE_FALSE);
+
+    // Set default AWB_LOCK lock value
+    setAwbLock(VALUE_FALSE);
+
+    // Set default Camera mode
+    set(KEY_QC_CAMERA_MODE, 0);
+
+    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
+    m_pCapability->min_num_pp_bufs = 3;
+
+    int32_t rc = commitParameters();
+    if (rc == NO_ERROR) {
+        rc = setNumOfSnapshot();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize parameter obj
+ *
+ * PARAMETERS :
+ *   @capabilities  : ptr to camera capabilities
+ *   @mmops         : ptr to memory ops table for mapping/unmapping
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::init(cam_capability_t *capabilities, mm_camera_vtbl_t *mmOps)
+{
+    int32_t rc = NO_ERROR;
+
+    m_pCapability = capabilities;
+    m_pCamOpsTbl = mmOps;
+
+    //Allocate Set Param Buffer
+    m_pParamHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
+    rc = m_pParamHeap->allocate(1, sizeof(parm_buffer_t));
+    if(rc != OK) {
+        rc = NO_MEMORY;
+        ALOGE("Failed to allocate SETPARM Heap memory");
+        goto TRANS_INIT_ERROR1;
+    }
+
+    //Map memory for parameters buffer
+    rc = m_pCamOpsTbl->ops->map_buf(m_pCamOpsTbl->camera_handle,
+                             CAM_MAPPING_BUF_TYPE_PARM_BUF,
+                             m_pParamHeap->getFd(0),
+                             sizeof(parm_buffer_t));
+    if(rc < 0) {
+        ALOGE("%s:failed to map SETPARM buffer",__func__);
+        rc = FAILED_TRANSACTION;
+        goto TRANS_INIT_ERROR2;
+    }
+    m_pParamBuf = (parm_buffer_t*) DATA_PTR(m_pParamHeap,0);
+
+    initDefaultParameters();
+
+    goto TRANS_INIT_DONE;
+
+TRANS_INIT_ERROR2:
+    m_pParamHeap->deallocate();
+
+TRANS_INIT_ERROR1:
+    delete m_pParamHeap;
+    m_pParamHeap = NULL;
+
+TRANS_INIT_DONE:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: deinitialize
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::deinit()
+{
+    //clear all entries in the map
+    String8 emptyStr;
+    QCameraParameters::unflatten(emptyStr);
+
+    if (NULL != m_pCamOpsTbl) {
+        m_pCamOpsTbl->ops->unmap_buf(
+                             m_pCamOpsTbl->camera_handle,
+                             CAM_MAPPING_BUF_TYPE_PARM_BUF);
+        m_pCamOpsTbl = NULL;
+    }
+    m_pCapability = NULL;
+    if (NULL != m_pParamHeap) {
+        m_pParamHeap->deallocate();
+        delete m_pParamHeap;
+        m_pParamHeap = NULL;
+        m_pParamBuf = NULL;
+    }
+
+    m_tempMap.clear();
+}
+
+/*===========================================================================
+ * FUNCTION   : parse_pair
+ *
+ * DESCRIPTION: helper function to parse string like "640x480" or "10000,20000"
+ *
+ * PARAMETERS :
+ *   @str     : input string to be parse
+ *   @first   : [output] first value of the pair
+ *   @second  : [output]  second value of the pair
+ *   @delim   : [input] delimeter to seperate the pair
+ *   @endptr  : [output] ptr to the end of the pair string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parse_pair(const char *str,
+                                      int *first,
+                                      int *second,
+                                      char delim,
+                                      char **endptr = NULL)
+{
+    // Find the first integer.
+    char *end;
+    int w = (int)strtol(str, &end, 10);
+    // If a delimeter does not immediately follow, give up.
+    if (*end != delim) {
+        ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+        return BAD_VALUE;
+    }
+
+    // Find the second integer, immediately after the delimeter.
+    int h = (int)strtol(end+1, &end, 10);
+
+    *first = w;
+    *second = h;
+
+    if (endptr) {
+        *endptr = end;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseSizesList
+ *
+ * DESCRIPTION: helper function to parse string containing sizes
+ *
+ * PARAMETERS :
+ *   @sizesStr: [input] input string to be parse
+ *   @sizes   : [output] reference to store parsed sizes
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+    if (sizesStr == 0) {
+        return;
+    }
+
+    char *sizeStartPtr = (char *)sizesStr;
+
+    while (true) {
+        int width, height;
+        int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+                                 &sizeStartPtr);
+        if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+            ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+            return;
+        }
+        sizes.push(Size(width, height));
+
+        if (*sizeStartPtr == '\0') {
+            return;
+        }
+        sizeStartPtr++;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getSupportedHfrSizes
+ *
+ * DESCRIPTION: return supported HFR sizes
+ *
+ * PARAMETERS :
+ *   @sizes  : [output] reference to a vector storing supported HFR sizes
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getSupportedHfrSizes(Vector<Size> &sizes)
+{
+    const char *hfrSizesStr = get(KEY_QC_SUPPORTED_HFR_SIZES);
+    parseSizesList(hfrSizesStr, sizes);
+}
+
+/*===========================================================================
+ * FUNCTION   : adjustPreviewFpsRanges
+ *
+ * DESCRIPTION: adjust preview FPS ranges
+ *              according to external events
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::adjustPreviewFpsRange(cam_fps_range_t *fpsRange)
+{
+    if ( fpsRange == NULL ) {
+        return BAD_VALUE;
+    }
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    int32_t rc = initBatchUpdate(m_pParamBuf);
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return rc;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_FPS_RANGE,
+                                  sizeof(cam_fps_range_t),
+                                  fpsRange);
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s: Parameters batch failed",__func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if ( rc != NO_ERROR ) {
+        ALOGE("%s:Failed to commit batch parameters", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFpsRanges
+ *
+ * DESCRIPTION: set preview FPS ranges
+ *
+ * PARAMETERS :
+ *   @minFPS  : min FPS value
+ *   @maxFPS  : max FPS value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setPreviewFpsRange(int minFPS, int maxFPS)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%d,%d", minFPS, maxFPS);
+    ALOGD("%s: Setting preview fps range %s", __func__, str);
+    updateParamEntry(KEY_PREVIEW_FPS_RANGE, str);
+    cam_fps_range_t fps_range = {(float)(minFPS / 1000.0), (float)(maxFPS / 1000.0)};
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_FPS_RANGE,
+                                  sizeof(cam_fps_range_t),
+                                  &fps_range);
+}
+
+/*===========================================================================
+ * FUNCTION   : setAutoExposure
+ *
+ * DESCRIPTION: set auto exposure
+ *
+ * PARAMETERS :
+ *   @autoExp : auto exposure value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAutoExposure(const char *autoExp)
+{
+    if (autoExp != NULL) {
+        int32_t value = lookupAttr(AUTO_EXPOSURE_MAP,
+                                   sizeof(AUTO_EXPOSURE_MAP)/sizeof(AUTO_EXPOSURE_MAP[0]),
+                                   autoExp);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting auto exposure %s", __func__, autoExp);
+            updateParamEntry(KEY_QC_AUTO_EXPOSURE, autoExp);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AEC_ALGO_TYPE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid auto exposure value: %s", (autoExp == NULL) ? "NULL" : autoExp);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setEffect
+ *
+ * DESCRIPTION: set effect
+ *
+ * PARAMETERS :
+ *   @effect  : effect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setEffect(const char *effect)
+{
+    if (effect != NULL) {
+        int32_t value = lookupAttr(EFFECT_MODES_MAP,
+                                   sizeof(EFFECT_MODES_MAP)/sizeof(QCameraMap),
+                                   effect);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting effect %s", __func__, effect);
+            updateParamEntry(KEY_EFFECT, effect);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_EFFECT,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid effect value: %s", (effect == NULL) ? "NULL" : effect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setBrightness
+ *
+ * DESCRIPTION: set brightness control value
+ *
+ * PARAMETERS :
+ *   @brightness  : brightness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setBrightness(int brightness)
+{
+    char val[16];
+    sprintf(val, "%d", brightness);
+    updateParamEntry(KEY_QC_BRIGHTNESS, val);
+
+    int32_t value = brightness;
+    ALOGD("%s: Setting brightness %s", __func__, val);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_BRIGHTNESS,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusMode
+ *
+ * DESCRIPTION: set focus mode
+ *
+ * PARAMETERS :
+ *   @focusMode  : focus mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusMode(const char *focusMode)
+{
+    if (focusMode != NULL) {
+        int32_t value = lookupAttr(FOCUS_MODES_MAP,
+                                   sizeof(FOCUS_MODES_MAP)/sizeof(QCameraMap),
+                                   focusMode);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting focus mode %s", __func__, focusMode);
+            mFocusMode = (cam_focus_mode_type)value;
+
+            // reset need lock CAF flag
+            m_bNeedLockCAF = false;
+            m_bCAFLocked = false;
+            m_bAFRunning = false;
+
+            updateParamEntry(KEY_FOCUS_MODE, focusMode);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_FOCUS_MODE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid focus mode value: %s", (focusMode == NULL) ? "NULL" : focusMode);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSharpness
+ *
+ * DESCRIPTION: set sharpness control value
+ *
+ * PARAMETERS :
+ *   @sharpness  : sharpness control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSharpness(int sharpness)
+{
+    char val[16];
+    sprintf(val, "%d", sharpness);
+    updateParamEntry(KEY_QC_SHARPNESS, val);
+    ALOGD("%s: Setting sharpness %s", __func__, val);
+
+    int32_t value = sharpness;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SHARPNESS,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSkinToneEnhancement
+ *
+ * DESCRIPTION: set skin tone enhancement value
+ *
+ * PARAMETERS :
+ *   @sceFactore  : skin tone enhancement factor value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSkinToneEnhancement(int sceFactor)
+{
+    char val[16];
+    sprintf(val, "%d", sceFactor);
+    updateParamEntry(KEY_QC_SCE_FACTOR, val);
+    ALOGD("%s: Setting skintone enhancement %s", __func__, val);
+
+    int32_t value = sceFactor;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SCE_FACTOR,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSaturation
+ *
+ * DESCRIPTION: set saturation control value
+ *
+ * PARAMETERS :
+ *   @saturation : saturation control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSaturation(int saturation)
+{
+    char val[16];
+    sprintf(val, "%d", saturation);
+    updateParamEntry(KEY_QC_SATURATION, val);
+    ALOGD("%s: Setting saturation %s", __func__, val);
+
+    int32_t value = saturation;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_SATURATION,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setContrast
+ *
+ * DESCRIPTION: set contrast control value
+ *
+ * PARAMETERS :
+ *   @contrast : contrast control value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setContrast(int contrast)
+{
+    char val[16];
+    sprintf(val, "%d", contrast);
+    updateParamEntry(KEY_QC_CONTRAST, val);
+    ALOGD("%s: Setting contrast %s", __func__, val);
+
+    int32_t value = contrast;
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_CONTRAST,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneDetect
+ *
+ * DESCRIPTION: set scenen detect value
+ *
+ * PARAMETERS :
+ *   @sceneDetect  : scene detect value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneDetect(const char *sceneDetect)
+{
+    if (sceneDetect != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP,
+                                   sizeof(ON_OFF_MODES_MAP)/sizeof(QCameraMap),
+                                   sceneDetect);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting Scene Detect %s", __func__, sceneDetect);
+            updateParamEntry(KEY_QC_SCENE_DETECT, sceneDetect);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ASD_ENABLE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Scene Detect value: %s",
+          (sceneDetect == NULL) ? "NULL" : sceneDetect);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setVideoHDR
+ *
+ * DESCRIPTION: set video HDR value
+ *
+ * PARAMETERS :
+ *   @videoHDR  : svideo HDR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setVideoHDR(const char *videoHDR)
+{
+    if (videoHDR != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP,
+                                   sizeof(ON_OFF_MODES_MAP)/sizeof(QCameraMap),
+                                   videoHDR);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting Video HDR %s", __func__, videoHDR);
+            updateParamEntry(KEY_QC_VIDEO_HDR, videoHDR);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_VIDEO_HDR,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid Video HDR value: %s",
+          (videoHDR == NULL) ? "NULL" : videoHDR);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceRecognition
+ *
+ * DESCRIPTION: set face recognition value
+ *
+ * PARAMETERS :
+ *   @faceRecog  : face recognition value string
+ *   @maxFaces   : number of max faces to be detected/recognized
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceRecognition(const char *faceRecog, int maxFaces)
+{
+    if (faceRecog != NULL) {
+        int32_t value = lookupAttr(ON_OFF_MODES_MAP,
+                                   sizeof(ON_OFF_MODES_MAP)/sizeof(QCameraMap),
+                                   faceRecog);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting face recognition %s", __func__, faceRecog);
+            updateParamEntry(KEY_QC_FACE_RECOGNITION, faceRecog);
+
+            int faceProcMask = m_nFaceProcMask;
+            if (value > 0) {
+                faceProcMask |= CAM_FACE_PROCESS_MASK_RECOGNITION;
+            } else {
+                faceProcMask &= ~CAM_FACE_PROCESS_MASK_RECOGNITION;
+            }
+
+            if(m_nFaceProcMask == faceProcMask) {
+                ALOGD("%s: face process mask not changed, no ops here", __func__);
+                return NO_ERROR;
+            }
+            m_nFaceProcMask = faceProcMask;
+            ALOGD("%s: FaceProcMask -> %d", __func__, m_nFaceProcMask);
+
+            // set parm for face process
+            cam_fd_set_parm_t fd_set_parm;
+            memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+            fd_set_parm.fd_mode = m_nFaceProcMask;
+            fd_set_parm.num_fd = maxFaces;
+
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                        CAM_INTF_PARM_FD,
+                                        sizeof(fd_set_parm),
+                                        &fd_set_parm);
+        }
+    }
+    ALOGE("Invalid face recognition value: %s", (faceRecog == NULL) ? "NULL" : faceRecog);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setZoom
+ *
+ * DESCRIPTION: set zoom level
+ *
+ * PARAMETERS :
+ *   @zoom_level : zoom level
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setZoom(int zoom_level)
+{
+    char val[16];
+    sprintf(val, "%d", zoom_level);
+    updateParamEntry(KEY_ZOOM, val);
+
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_ZOOM,
+                                  sizeof(zoom_level),
+                                  &zoom_level);
+}
+
+/*===========================================================================
+ * FUNCTION   : setISOValue
+ *
+ * DESCRIPTION: set ISO value
+ *
+ * PARAMETERS :
+ *   @isoValue : ISO value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t  QCameraParameters::setISOValue(const char *isoValue)
+{
+    if (isoValue != NULL) {
+        int32_t value = lookupAttr(ISO_MODES_MAP,
+                                   sizeof(ISO_MODES_MAP)/sizeof(QCameraMap),
+                                   isoValue);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting ISO value %s", __func__, isoValue);
+            updateParamEntry(KEY_QC_ISO_MODE, isoValue);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ISO,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid ISO value: %s",
+          (isoValue == NULL) ? "NULL" : isoValue);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFlash
+ *
+ * DESCRIPTION: set f;ash mode
+ *
+ * PARAMETERS :
+ *   @flashStr : LED flash mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFlash(const char *flashStr)
+{
+    if (flashStr != NULL) {
+        int32_t value = lookupAttr(FLASH_MODES_MAP,
+                                   sizeof(FLASH_MODES_MAP)/sizeof(QCameraMap),
+                                   flashStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting Flash value %s", __func__, flashStr);
+            updateParamEntry(KEY_FLASH_MODE, flashStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_LED_MODE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid flash value: %s", (flashStr == NULL) ? "NULL" : flashStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAecLock
+ *
+ * DESCRIPTION: set AEC lock value
+ *
+ * PARAMETERS :
+ *   @aecLockStr : AEC lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAecLock(const char *aecLockStr)
+{
+    if (aecLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP,
+                                   sizeof(TRUE_FALSE_MODES_MAP)/sizeof(QCameraMap),
+                                   aecLockStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting AECLock value %s", __func__, aecLockStr);
+            updateParamEntry(KEY_AUTO_EXPOSURE_LOCK, aecLockStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AEC_LOCK,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AECLock value: %s", (aecLockStr == NULL) ? "NULL" : aecLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAwbLock
+ *
+ * DESCRIPTION: set AWB lock value
+ *
+ * PARAMETERS :
+ *   @awbLockStr : AWB lock value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAwbLock(const char *awbLockStr)
+{
+    if (awbLockStr != NULL) {
+        int32_t value = lookupAttr(TRUE_FALSE_MODES_MAP,
+                                   sizeof(TRUE_FALSE_MODES_MAP)/sizeof(QCameraMap),
+                                   awbLockStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting AWBLock value %s", __func__, awbLockStr);
+            updateParamEntry(KEY_AUTO_WHITEBALANCE_LOCK, awbLockStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_AWB_LOCK,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AWBLock value: %s", (awbLockStr == NULL) ? "NULL" : awbLockStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setMCEValue
+ *
+ * DESCRIPTION: set memory color enhancement value
+ *
+ * PARAMETERS :
+ *   @mceStr : MCE value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMCEValue(const char *mceStr)
+{
+    if (mceStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                                   sizeof(ENABLE_DISABLE_MODES_MAP)/sizeof(QCameraMap),
+                                   mceStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting AWBLock value %s", __func__, mceStr);
+            updateParamEntry(KEY_QC_MEMORY_COLOR_ENHANCEMENT, mceStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_MCE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid MCE value: %s", (mceStr == NULL) ? "NULL" : mceStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setDISValue
+ *
+ * DESCRIPTION: set DIS value
+ *
+ * PARAMETERS :
+ *   @disStr : DIS value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setDISValue(const char *disStr)
+{
+    if (disStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                                   sizeof(ENABLE_DISABLE_MODES_MAP)/sizeof(QCameraMap),
+                                   disStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting DIS value %s", __func__, disStr);
+            updateParamEntry(KEY_QC_DIS, disStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_DIS_ENABLE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid DIS value: %s", (disStr == NULL) ? "NULL" : disStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHighFrameRate
+ *
+ * DESCRIPTION: set high frame rate
+ *
+ * PARAMETERS :
+ *   @hfrStr : HFR value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHighFrameRate(const char *hfrStr)
+{
+    if (hfrStr != NULL) {
+        int32_t value = lookupAttr(HFR_MODES_MAP,
+                                   sizeof(HFR_MODES_MAP)/sizeof(QCameraMap),
+                                   hfrStr);
+        if (value != NAME_NOT_FOUND) {
+            // HFR value changed, need to restart preview
+            m_bNeedRestart = true;
+            // Set HFR value
+            ALOGD("%s: Setting HFR value %s", __func__, hfrStr);
+            updateParamEntry(KEY_QC_VIDEO_HIGH_FRAME_RATE, hfrStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_HFR,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid HFR value: %s", (hfrStr == NULL) ? "NULL" : hfrStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLensShadeValue
+ *
+ * DESCRIPTION: set lens shade value
+ *
+ * PARAMETERS :
+ *   @lensSahdeStr : lens shade value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLensShadeValue(const char *lensShadeStr)
+{
+    if (lensShadeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                                   sizeof(ENABLE_DISABLE_MODES_MAP)/sizeof(QCameraMap),
+                                   lensShadeStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting LensShade value %s", __func__, lensShadeStr);
+            updateParamEntry(KEY_QC_LENSSHADE, lensShadeStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ROLLOFF,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid LensShade value: %s",
+          (lensShadeStr == NULL) ? "NULL" : lensShadeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setExposureCompensation
+ *
+ * DESCRIPTION: set exposure compensation value
+ *
+ * PARAMETERS :
+ *   @expComp : exposure compensation value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setExposureCompensation(int expComp)
+{
+    char val[16];
+    sprintf(val, "%d", expComp);
+    updateParamEntry(KEY_EXPOSURE_COMPENSATION, val);
+
+    // Don't need to pass step as part of setParameter because
+    // camera daemon is already aware of it.
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_EV,
+                                  sizeof(expComp),
+                                  &expComp);
+}
+
+/*===========================================================================
+ * FUNCTION   : setWhiteBalance
+ *
+ * DESCRIPTION: set white balance mode
+ *
+ * PARAMETERS :
+ *   @wbStr   : white balance mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWhiteBalance(const char *wbStr)
+{
+    if (wbStr != NULL) {
+        int32_t value = lookupAttr(WHITE_BALANCE_MODES_MAP,
+                                   sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(QCameraMap),
+                                   wbStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting WhiteBalance value %s", __func__, wbStr);
+            updateParamEntry(KEY_WHITE_BALANCE, wbStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_WHITE_BALANCE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid WhiteBalance value: %s", (wbStr == NULL) ? "NULL" : wbStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAntibanding
+ *
+ * DESCRIPTION: set antibanding value
+ *
+ * PARAMETERS :
+ *   @antiBandingStr : antibanding value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAntibanding(const char *antiBandingStr)
+{
+    if (antiBandingStr != NULL) {
+        int32_t value = lookupAttr(ANTIBANDING_MODES_MAP,
+                                   sizeof(ANTIBANDING_MODES_MAP)/sizeof(QCameraMap),
+                                   antiBandingStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGD("%s: Setting AntiBanding value %s", __func__, antiBandingStr);
+            updateParamEntry(KEY_ANTIBANDING, antiBandingStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_ANTIBANDING,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("Invalid AntiBanding value: %s",
+          (antiBandingStr == NULL) ? "NULL" : antiBandingStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFocusAreas
+ *
+ * DESCRIPTION: set focus areas
+ *
+ * PARAMETERS :
+ *   @focusAreasStr : focus areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFocusAreas(const char *focusAreasStr)
+{
+    if (m_pCapability->max_num_focus_areas == 0 ||
+        focusAreasStr == NULL) {
+        ALOGI("%s: Parameter string is null", __func__);
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    if (NULL == areas) {
+        ALOGE("%s: No memory for areas", __func__);
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_focus_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(focusAreasStr,
+                              m_pCapability->max_num_focus_areas,
+                              areas,
+                              num_areas_found) != NO_ERROR) {
+        ALOGE("%s: Failed to parse the string: %s", __func__, focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        ALOGE("%s: invalid areas specified : %s", __func__, focusAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_FOCUS_AREAS, focusAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+    cam_roi_info_t af_roi_value;
+    memset(&af_roi_value, 0, sizeof(cam_roi_info_t));
+    af_roi_value.num_roi = num_areas_found;
+    for (int i = 0; i < num_areas_found; i++) {
+        ALOGD("%s: FocusArea[%d] = (%d, %d, %d, %d)",
+              __func__, i, (areas[i].rect.top), (areas[i].rect.left),
+              (areas[i].rect.width), (areas[i].rect.height));
+
+        //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+        af_roi_value.roi[i].left = (int32_t)((areas[i].rect.left + 1000.0f) * (previewWidth / 2000.0f));
+        af_roi_value.roi[i].top = (int32_t)((areas[i].rect.top + 1000.0f) * (previewHeight / 2000.0f));
+        af_roi_value.roi[i].width = (int32_t)(areas[i].rect.width * previewWidth / 2000.0f);
+        af_roi_value.roi[i].height = (int32_t)(areas[i].rect.height * previewHeight / 2000.0f);
+        af_roi_value.weight[i] = areas[i].weight;
+    }
+    free(areas);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_AF_ROI,
+                                  sizeof(af_roi_value),
+                                  &af_roi_value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setMeteringAreas
+ *
+ * DESCRIPTION: set metering areas value
+ *
+ * PARAMETERS :
+ *   @meteringAreasStr : metering areas value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setMeteringAreas(const char *meteringAreasStr)
+{
+    if (m_pCapability->max_num_metering_areas == 0 ||
+        meteringAreasStr == NULL) {
+        ALOGI("%s: Parameter string is null", __func__);
+        return NO_ERROR;
+    }
+
+    cam_area_t *areas = (cam_area_t *)malloc(sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    if (NULL == areas) {
+        ALOGE("%s: No memory for areas", __func__);
+        return NO_MEMORY;
+    }
+    memset(areas, 0, sizeof(cam_area_t) * m_pCapability->max_num_metering_areas);
+    int num_areas_found = 0;
+    if (parseCameraAreaString(meteringAreasStr,
+                              m_pCapability->max_num_metering_areas,
+                              areas,
+                              num_areas_found) < 0) {
+        ALOGE("%s: Failed to parse the string: %s", __func__, meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    if (validateCameraAreas(areas, num_areas_found) == false) {
+        ALOGE("%s: invalid areas specified : %s", __func__, meteringAreasStr);
+        free(areas);
+        return BAD_VALUE;
+    }
+
+    updateParamEntry(KEY_METERING_AREAS, meteringAreasStr);
+
+    //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+    //so no action is takenby the lower layer
+    if (num_areas_found == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        num_areas_found = 0;
+    }
+    cam_set_aec_roi_t aec_roi_value;
+    int previewWidth, previewHeight;
+    getPreviewSize(&previewWidth, &previewHeight);
+
+    memset(&aec_roi_value, 0, sizeof(cam_set_aec_roi_t));
+    if (num_areas_found > 0) {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_ON;
+        aec_roi_value.aec_roi_type = CAM_AEC_ROI_BY_COORDINATE;
+
+        for (int i = 0; i < num_areas_found; i++) {
+            ALOGD("%s: MeteringArea[%d] = (%d, %d, %d, %d)",
+                  __func__, i, (areas[i].rect.top), (areas[i].rect.left),
+                  (areas[i].rect.width), (areas[i].rect.height));
+
+            //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+            aec_roi_value.cam_aec_roi_position.coordinate[i].x =
+                (uint32_t)(((areas[i].rect.left + areas[i].rect.width / 2) + 1000.0f) * previewWidth / 2000.0f) ;
+            aec_roi_value.cam_aec_roi_position.coordinate[i].y =
+                (uint32_t)(((areas[i].rect.top + areas[i].rect.height / 2) + 1000.0f) * previewHeight / 2000.0f) ;
+        }
+    } else {
+        aec_roi_value.aec_roi_enable = CAM_AEC_ROI_OFF;
+    }
+    free(areas);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_AEC_ROI,
+                                  sizeof(aec_roi_value),
+                                  &aec_roi_value);
+}
+
+/*===========================================================================
+ * FUNCTION   : setSceneMode
+ *
+ * DESCRIPTION: set scene mode
+ *
+ * PARAMETERS :
+ *   @sceneModeStr : scene mode value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSceneMode(const char *sceneModeStr)
+{
+    if (sceneModeStr != NULL) {
+        int32_t value = lookupAttr(SCENE_MODES_MAP,
+                                   sizeof(SCENE_MODES_MAP)/sizeof(QCameraMap),
+                                   sceneModeStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGV("%s: Setting SceneMode %s", __func__, sceneModeStr);
+            updateParamEntry(KEY_SCENE_MODE, sceneModeStr);
+            int32_t rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                                CAM_INTF_PARM_BESTSHOT_MODE,
+                                                sizeof(value),
+                                                &value);
+            return rc;
+        }
+    }
+    ALOGE("%s: Invalid Secene Mode: %s",
+          __func__, (sceneModeStr == NULL) ? "NULL" : sceneModeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setSelectableZoneAf
+ *
+ * DESCRIPTION: set selectable zone AF algorithm
+ *
+ * PARAMETERS :
+ *   @selZoneAFStr : selectable zone AF algorithm value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setSelectableZoneAf(const char *selZoneAFStr)
+{
+    if (selZoneAFStr != NULL) {
+        int32_t value = lookupAttr(FOCUS_ALGO_MAP,
+                                   sizeof(FOCUS_ALGO_MAP)/sizeof(QCameraMap),
+                                   selZoneAFStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGV("%s: Setting Selectable Zone AF value %s", __func__, selZoneAFStr);
+            updateParamEntry(KEY_QC_SELECTABLE_ZONE_AF, selZoneAFStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_FOCUS_ALGO_TYPE,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("%s: Invalid selectable zone af value: %s",
+          __func__, (selZoneAFStr == NULL) ? "NULL" : selZoneAFStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setAEBracket
+ *
+ * DESCRIPTION: set AE bracket value
+ *
+ * PARAMETERS :
+ *   @aecBracketStr : AE bracket value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setAEBracket(const char *aecBracketStr)
+{
+    if (aecBracketStr == NULL) {
+        ALOGI("%s: setAEBracket with NULL value", __func__);
+        return NO_ERROR;
+    }
+
+    cam_exp_bracketing_t expBracket;
+    memset(&expBracket, 0, sizeof(expBracket));
+
+    int value = lookupAttr(BRACKETING_MODES_MAP,
+                           sizeof(BRACKETING_MODES_MAP)/sizeof(QCameraMap),
+                           aecBracketStr);
+    switch (value) {
+    case CAM_EXP_BRACKETING_ON:
+        {
+            ALOGV("%s, EXP_BRACKETING_ON", __func__);
+            const char *str_val = get(KEY_QC_CAPTURE_BURST_EXPOSURE);
+            if ((str_val != NULL) && (strlen(str_val)>0)) {
+                expBracket.mode = CAM_EXP_BRACKETING_ON;
+                strlcpy(expBracket.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+                ALOGI("%s: setting Exposure Bracketing value of %s",
+                      __func__, expBracket.values);
+            }
+            else {
+                /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+                ALOGI("%s: capture-burst-exposures not set, back to HDR OFF mode", __func__);
+                expBracket.mode = CAM_EXP_BRACKETING_OFF;
+            }
+        }
+        break;
+    default:
+        {
+            ALOGD("%s, EXP_BRACKETING_OFF", __func__);
+            expBracket.mode = CAM_EXP_BRACKETING_OFF;
+        }
+        break;
+    }
+
+    /* save the value*/
+    updateParamEntry(KEY_QC_AE_BRACKET_HDR, aecBracketStr);
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_HDR,
+                                  sizeof(expBracket),
+                                  &expBracket);
+}
+
+/*===========================================================================
+ * FUNCTION   : setRedeyeReduction
+ *
+ * DESCRIPTION: set red eye reduction value
+ *
+ * PARAMETERS :
+ *   @redeyeStr : red eye reduction value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setRedeyeReduction(const char *redeyeStr)
+{
+    if (redeyeStr != NULL) {
+        int32_t value = lookupAttr(ENABLE_DISABLE_MODES_MAP,
+                                   sizeof(ENABLE_DISABLE_MODES_MAP)/sizeof(QCameraMap),
+                                   redeyeStr);
+        if (value != NAME_NOT_FOUND) {
+            ALOGV("%s: Setting RedEye Reduce value %s", __func__, redeyeStr);
+            updateParamEntry(KEY_QC_REDEYE_REDUCTION, redeyeStr);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_REDEYE_REDUCTION,
+                                          sizeof(value),
+                                          &value);
+        }
+    }
+    ALOGE("%s: Invalid RedEye Reduce value: %s",
+          __func__, (redeyeStr == NULL) ? "NULL" : redeyeStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : getWaveletDenoiseProcessPlate
+ *
+ * DESCRIPTION: query wavelet denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : WNR prcocess plate vlaue
+ *==========================================================================*/
+cam_denoise_process_type_t QCameraParameters::getWaveletDenoiseProcessPlate()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.denoise.process.plates", prop, "0");
+    int processPlate = atoi(prop);
+    switch(processPlate) {
+    case 0:
+        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
+    case 1:
+        return CAM_WAVELET_DENOISE_CBCR_ONLY;
+    case 2:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    case 3:
+        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+    default:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setWaveletDenoise
+ *
+ * DESCRIPTION: set wavelet denoise value
+ *
+ * PARAMETERS :
+ *   @wnrStr : wavelet denoise value string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setWaveletDenoise(const char *wnrStr)
+{
+    if (wnrStr != NULL) {
+        int value = lookupAttr(DENOISE_ON_OFF_MODES_MAP,
+                               sizeof(DENOISE_ON_OFF_MODES_MAP)/sizeof(QCameraMap),
+                               wnrStr);
+        if (value != NAME_NOT_FOUND) {
+            updateParamEntry(KEY_QC_DENOISE, wnrStr);
+
+            cam_denoise_param_t temp;
+            memset(&temp, 0, sizeof(temp));
+            temp.denoise_enable = value;
+            m_bWNROn = (value != 0);
+            if (m_bWNROn) {
+                temp.process_plates = getWaveletDenoiseProcessPlate();
+            }
+            ALOGI("%s: Denoise enable=%d, plates=%d",
+                  __func__, temp.denoise_enable, temp.process_plates);
+            return AddSetParmEntryToBatch(m_pParamBuf,
+                                          CAM_INTF_PARM_WAVELET_DENOISE,
+                                          sizeof(temp),
+                                          &temp);
+        }
+    }
+    ALOGE("%s: Invalid Denoise value: %s", __func__, (wnrStr == NULL) ? "NULL" : wnrStr);
+    return BAD_VALUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : setPreviewFrameRateMode
+ *
+ * DESCRIPTION: set preview frame rate mode
+ *
+ * PARAMETERS :
+ *   @mode    : preview frame rate mode
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setPreviewFrameRateMode(const char *mode)
+{
+    set(KEY_QC_PREVIEW_FRAME_RATE_MODE, mode);
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewFrameRateMode
+ *
+ * DESCRIPTION: get preview frame rate mode
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : preview frame rate mode string
+ *==========================================================================*/
+const char *QCameraParameters::getPreviewFrameRateMode() const
+{
+    return get(KEY_QC_PREVIEW_FRAME_RATE_MODE);
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchIndexAec
+ *
+ * DESCRIPTION: set touch index AEC
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setTouchIndexAec(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_QC_TOUCH_INDEX_AEC, str);
+}
+
+/*===========================================================================
+ * FUNCTION   : getTouchIndexAec
+ *
+ * DESCRIPTION: get touch index AEC
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getTouchIndexAec(int *x, int *y)
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_QC_TOUCH_INDEX_AEC);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setTouchIndexAf
+ *
+ * DESCRIPTION: set touch index AF
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::setTouchIndexAf(int x, int y)
+{
+    char str[32];
+    snprintf(str, sizeof(str), "%dx%d", x, y);
+    set(KEY_QC_TOUCH_INDEX_AF, str);
+}
+
+/*===========================================================================
+ * FUNCTION   : getTouchIndexAf
+ *
+ * DESCRIPTION: get touch index AF
+ *
+ * PARAMETERS :
+ *   @x,y     :
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getTouchIndexAf(int *x, int *y)
+{
+    *x = -1;
+    *y = -1;
+
+    // Get the current string, if it doesn't exist, leave the -1x-1
+    const char *p = get(KEY_QC_TOUCH_INDEX_AF);
+    if (p == 0)
+        return;
+
+    int tempX, tempY;
+    if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+        *x = tempX;
+        *y = tempY;
+	}
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamFormat
+ *
+ * DESCRIPTION: get stream format by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @format     : [output] stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamFormat(cam_stream_type_t streamType,
+                                            cam_format_t &format)
+{
+    int32_t ret = NO_ERROR;
+
+    format = CAM_FORMAT_MAX;
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        format = mPreviewFormat;
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        if ( mPictureFormat == CAM_FORMAT_YUV_422_NV16 ) {
+            format = CAM_FORMAT_YUV_422_NV16;
+        } else {
+            char prop[PROPERTY_VALUE_MAX];
+            int snapshotFormat;
+            memset(prop, 0, sizeof(prop));
+            property_get("persist.camera.snap.format", prop, "0");
+            snapshotFormat = atoi(prop);
+            if(snapshotFormat == 1) {
+                format = CAM_FORMAT_YUV_422_NV61;
+            } else {
+                format = CAM_FORMAT_YUV_420_NV21;
+            }
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        format = CAM_FORMAT_YUV_420_NV12;
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        if (mPictureFormat >= CAM_FORMAT_YUV_RAW_8BIT_YUYV) {
+            format = (cam_format_t)mPictureFormat;
+        } else {
+            ALOGE("%s: invalid raw picture format: %d", __func__, mPictureFormat);
+            ret = BAD_VALUE;
+        }
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        break;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFlipMode
+ *
+ * DESCRIPTION: get flip mode
+ *
+ * PARAMETERS :
+ *   @cam_intf_parm_type_t : [input] stream type
+ *
+ * RETURN     : int type of flip mode
+ *              0 - no filp
+ *              1 - FLIP_H
+ *              2 - FLIP_V
+ *              3 - FLIP_H | FLIP_V
+ *==========================================================================*/
+int QCameraParameters::getFlipMode(cam_stream_type_t type)
+{
+    const char *str = NULL;
+    int flipMode = 0; // no flip
+
+    switch(type){
+    case CAM_STREAM_TYPE_PREVIEW:
+        str = get(KEY_QC_PREVIEW_FLIP);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        str = get(KEY_QC_VIDEO_FLIP);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        str = get(KEY_QC_SNAPSHOT_PICTURE_FLIP);
+        break;
+    default:
+        ALOGI("%s: No flip mode for stream type %d", __func__, type);
+        break;
+    }
+
+    if(str != NULL){
+        //Need give corresponding filp value based on flip mode strings
+        int value = lookupAttr(FLIP_MODES_MAP,
+                sizeof(FLIP_MODES_MAP)/sizeof(QCameraMap), str);
+        if(value != NAME_NOT_FOUND)
+            flipMode = value;
+        }
+
+    ALOGD("%s: the filp mode of stream type %d is %d .", __func__, type, flipMode);
+    return flipMode;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamDimension
+ *
+ * DESCRIPTION: get stream dimension by its type
+ *
+ * PARAMETERS :
+ *   @streamType : [input] stream type
+ *   @dim        : [output] stream dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getStreamDimension(cam_stream_type_t streamType,
+                                               cam_dimension_t &dim)
+{
+    int32_t ret = NO_ERROR;
+    memset(&dim, 0, sizeof(cam_dimension_t));
+
+    switch (streamType) {
+    case CAM_STREAM_TYPE_PREVIEW:
+        getPreviewSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_POSTVIEW:
+        getPreviewSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        if (getRecordingHintValue() == true) {
+            // live snapshot
+            getLiveSnapshotSize(dim);
+        } else {
+            getPictureSize(&dim.width, &dim.height);
+        }
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        getVideoSize(&dim.width, &dim.height);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        dim = m_pCapability->raw_dim[0];
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        dim.width = sizeof(cam_metadata_info_t);
+        dim.height = 1;
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        break;
+    case CAM_STREAM_TYPE_DEFAULT:
+    default:
+        ALOGE("%s: no dimension for unsupported stream type %d",
+              __func__, streamType);
+        ret = BAD_VALUE;
+        break;
+    }
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewHalPixelFormat
+ *
+ * DESCRIPTION: get preview HAL pixel format
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : HAL pixel format
+ *==========================================================================*/
+int QCameraParameters::getPreviewHalPixelFormat() const
+{
+    int32_t halPixelFormat;
+
+    switch (mPreviewFormat) {
+    case CAM_FORMAT_YUV_420_NV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+    default:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    }
+    ALOGE("%s: format %d\n", __func__, halPixelFormat);
+    return halPixelFormat;
+}
+
+/*===========================================================================
+ * FUNCTION   : getthumbnailSize
+ *
+ * DESCRIPTION: get thumbnail size
+ *
+ * PARAMETERS :
+ *   @width, height : [output] thumbnail width and height
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraParameters::getThumbnailSize(int *width, int *height) const
+{
+    *width = getInt(KEY_JPEG_THUMBNAIL_WIDTH);
+    *height = getInt(KEY_JPEG_THUMBNAIL_HEIGHT);
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBurstInterval
+ *
+ * DESCRIPTION: get ZSL burst interval setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL burst interval value
+ *==========================================================================*/
+int QCameraParameters::getZSLBurstInterval()
+{
+    int interval = getInt(KEY_QC_ZSL_BURST_INTERVAL);
+    if (interval < 0) {
+        interval = 1;
+    }
+    return interval;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLQueueDepth
+ *
+ * DESCRIPTION: get ZSL queue depth
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL queue depth value
+ *==========================================================================*/
+int QCameraParameters::getZSLQueueDepth()
+{
+    int qdepth = getInt(KEY_QC_ZSL_QUEUE_DEPTH);
+    if (qdepth < 0) {
+        qdepth = 2;
+    }
+    return qdepth;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLBackLookCount
+ *
+ * DESCRIPTION: get ZSL backlook count setting
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+int QCameraParameters::getZSLBackLookCount()
+{
+    int look_back = getInt(KEY_QC_ZSL_BURST_LOOKBACK);
+    if (look_back < 0) {
+        look_back = 2;
+    }
+    return look_back;
+}
+
+/*===========================================================================
+ * FUNCTION   : getZSLMaxUnmatchedFrames
+ *
+ * DESCRIPTION: get allowed ZSL max unmatched frames number
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ZSL backlook count value
+ *==========================================================================*/
+int QCameraParameters::getMaxUnmatchedFramesInQueue()
+{
+    return m_pCapability->min_num_pp_bufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : setRecordingHintValue
+ *
+ * DESCRIPTION: set recording hint
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::setRecordingHintValue(int32_t value)
+{
+    ALOGD("%s: VideoHint = %d", __func__, value);
+    bool newValue = (value > 0)? true : false;
+
+    if ( m_bRecordingHint != newValue ) {
+        m_bNeedRestart = true;
+        m_bRecordingHint_new = newValue;
+    } else {
+        m_bRecordingHint_new = m_bRecordingHint;
+    }
+    return AddSetParmEntryToBatch(m_pParamBuf,
+                                  CAM_INTF_PARM_RECORDING_HINT,
+                                  sizeof(value),
+                                  &value);
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfSnapshots
+ *
+ * DESCRIPTION: get number of snapshot per shutter
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of snapshot per shutter
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfSnapshots()
+{
+    int numOfSnapshot = getInt(KEY_QC_NUM_SNAPSHOT_PER_SHUTTER);
+    if (numOfSnapshot <= 0) {
+        numOfSnapshot = 1; // set to default value
+    }
+    return (uint8_t)numOfSnapshot;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfExtraHDRBufsIfNeeded
+ *
+ * DESCRIPTION: get number of extra buffers needed by HDR if HDR is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of extra buffer needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfExtraHDRBufsIfNeeded()
+{
+    uint8_t numOfBufs = 0;
+    const char *scene_mode = get(KEY_SCENE_MODE);
+    if (scene_mode != NULL && strcmp(scene_mode, SCENE_MODE_HDR) == 0) {
+        // HDR mode
+        numOfBufs = getBurstNum() * m_pCapability->min_num_hdr_bufs;
+    }
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumOfHDRBufsIfNeeded
+ *
+ * DESCRIPTION: get number of buffers needed by HDR if HDR is enabled
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffer needed by HDR; 0 if not HDR enabled
+ *==========================================================================*/
+uint8_t QCameraParameters::getNumOfHDRBufsIfNeeded()
+{
+    uint8_t numOfBufs = 0;
+    const char *scene_mode = get(KEY_SCENE_MODE);
+    if (scene_mode != NULL && strcmp(scene_mode, SCENE_MODE_HDR) == 0) {
+        // HDR mode
+        const char *need_hdr_1x = get(KEY_QC_HDR_NEED_1X);
+        if (need_hdr_1x != NULL && strcmp(need_hdr_1x, VALUE_TRUE) == 0) {
+            numOfBufs = 2; // HDR needs both 1X and processed img
+        } else {
+            numOfBufs = 1; // HDR only needs processed img
+        }
+
+        numOfBufs += m_pCapability->min_num_hdr_bufs;
+    }
+    return numOfBufs;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBurstNum
+ *
+ * DESCRIPTION: get burst number of snapshot
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of burst
+ *==========================================================================*/
+int QCameraParameters::getBurstNum()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.camera.snapshot.number", prop, "0");
+    int nBurstNum = atoi(prop);
+    if (nBurstNum > 0) {
+        ALOGD("%s: Reading burst number = %d from properties",
+              __func__, nBurstNum);
+    } else {
+        nBurstNum = 1;
+    }
+    return nBurstNum;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegQuality
+ *
+ * DESCRIPTION: get jpeg encoding quality
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : jpeg encoding quality
+ *==========================================================================*/
+int QCameraParameters::getJpegQuality()
+{
+    int quality = getInt(KEY_JPEG_QUALITY);
+    if (quality < 0) {
+        quality = 85; // set to default quality value
+    }
+    return quality;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegRotation
+ *
+ * DESCRIPTION: get rotation value
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : rotation value
+ *==========================================================================*/
+int QCameraParameters::getJpegRotation() {
+    int rotation = getInt(KEY_ROTATION);
+    if (rotation < 0) {
+        rotation = 0;
+    }
+    return rotation;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ *   @coord_str : [input] coordinate string
+ *   @coord     : [output]  ptr to struct to store coordinate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCameraParameters::parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+    if(coord == NULL) {
+        ALOGE("%s: error, invalid argument coord == NULL", __func__);
+        return BAD_VALUE;
+    }
+    float degF = atof(coord_str);
+    if (degF < 0) {
+        degF = -degF;
+    }
+    float minF = (degF - (int) degF) * 60;
+    float secF = (minF - (int) minF) * 60;
+
+    getRational(&coord[0], (int)degF, 1);
+    getRational(&coord[1], (int)minF, 1);
+    getRational(&coord[2], (int)(secF * 10000), 10000);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ *   @dateTime : string to store exif date time
+ *   @count    : lenght of the dateTime string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifDateTime(char *dateTime, uint32_t &count)
+{
+    //get time and date from system
+    time_t rawtime;
+    struct tm * timeinfo = NULL;
+    memset(&rawtime, 0, sizeof(rawtime));
+    time(&rawtime);
+    timeinfo = localtime (&rawtime);
+    if (timeinfo != NULL && count >= 20) {
+        //Write datetime according to EXIF Spec
+        //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+        snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
+                 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+                 timeinfo->tm_mday, timeinfo->tm_hour,
+                 timeinfo->tm_min, timeinfo->tm_sec);
+        count = 20;
+        return NO_ERROR;
+    }
+    return UNKNOWN_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ *   @rat     : ptr to struct to store rational info
+ *   @num     :num of the rational
+ *   @denom   : denom of the rational
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getRational(rat_t *rat, int num, int denom)
+{
+    if (NULL == rat) {
+        ALOGE("%s: NULL rat input", __func__);
+        return BAD_VALUE;
+    }
+    rat->num = num;
+    rat->denom = denom;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal lenght
+ *
+ * PARAMETERS :
+ *   @focalLength : ptr to rational strcut to store focal lenght
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifFocalLength(rat_t *focalLength)
+{
+    int focalLengthValue =
+        (int)(getFloat(QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+    return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifIsoSpeed
+ *
+ * DESCRIPTION: get exif ISO speed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : ISO speed value
+ *==========================================================================*/
+uint16_t QCameraParameters::getExifIsoSpeed()
+{
+    uint16_t isoSpeed = 0;
+    const char *iso_str = get(QCameraParameters::KEY_QC_ISO_MODE);
+    int iso_index = lookupAttr(ISO_MODES_MAP,
+        sizeof(ISO_MODES_MAP)/sizeof(ISO_MODES_MAP[0]), iso_str);
+    switch (iso_index) {
+    case CAM_ISO_MODE_AUTO:
+        isoSpeed = 0;
+        break;
+    case CAM_ISO_MODE_DEBLUR:
+        isoSpeed = 1;
+        break;
+    case CAM_ISO_MODE_100:
+        isoSpeed = 100;
+        break;
+    case CAM_ISO_MODE_200:
+        isoSpeed = 200;
+        break;
+    case CAM_ISO_MODE_400:
+        isoSpeed = 400;
+        break;
+    case CAM_ISO_MODE_800:
+        isoSpeed = 800;
+        break;
+    case CAM_ISO_MODE_1600:
+        isoSpeed = 1600;
+        break;
+    }
+    return isoSpeed;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ *   @gpsProcessingMethod : string to store GPS process method
+ *   @count               : lenght of the string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsProcessingMethod(char *gpsProcessingMethod,
+                                                      uint32_t &count)
+{
+    const char *str = get(KEY_GPS_PROCESSING_METHOD);
+    if(str != NULL) {
+        memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+        count = EXIF_ASCII_PREFIX_SIZE;
+        strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str, strlen(str));
+        count += strlen(str);
+        gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ *   @latitude : ptr to rational struct to store latitude info
+ *   @ladRef   : charater to indicate latitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLatitude(rat_t *latitude,
+                                           char *latRef)
+{
+    const char *str = get(KEY_GPS_LATITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, latitude);
+
+        //set Latitude Ref
+        float latitudeValue = getFloat(KEY_GPS_LATITUDE);
+        if(latitudeValue < 0.0f) {
+            latRef[0] = 'S';
+        } else {
+            latRef[0] = 'N';
+        }
+        latRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ *   @longitude : ptr to rational struct to store longitude info
+ *   @lonRef    : charater to indicate longitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifLongitude(rat_t *longitude,
+                                            char *lonRef)
+{
+    const char *str = get(KEY_GPS_LONGITUDE);
+    if(str != NULL) {
+        parseGPSCoordinate(str, longitude);
+
+        //set Longitude Ref
+        float longitudeValue = getFloat(KEY_GPS_LONGITUDE);
+        if(longitudeValue < 0.0f) {
+            lonRef[0] = 'W';
+        } else {
+            lonRef[0] = 'E';
+        }
+        lonRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ *   @altitude : ptr to rational struct to store altitude info
+ *   @altRef   : charater to indicate altitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifAltitude(rat_t *altitude,
+                                           char *altRef)
+{
+    const char *str = get(KEY_GPS_ALTITUDE);
+    if(str != NULL) {
+        double value = atof(str);
+        *altRef = 0;
+        if(value < 0){
+            *altRef = 1;
+            value = -value;
+        }
+        return getRational(altitude, value*1000, 1000);
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @gpsDateStamp : GPS date time stamp string
+ *   @bufLen       : length of the string
+ *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::getExifGpsDateTimeStamp(char *gpsDateStamp,
+                                                   uint32_t bufLen,
+                                                   rat_t *gpsTimeStamp)
+{
+    const char *str = get(KEY_GPS_TIMESTAMP);
+    if(str != NULL) {
+        time_t unixTime = (time_t)atol(str);
+        struct tm *UTCTimestamp = gmtime(&unixTime);
+
+        strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+        getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+        getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+        getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : updateFocusDistances
+ *
+ * DESCRIPTION: update focus distances
+ *
+ * PARAMETERS :
+ *   @focusDistances : ptr to focus distance info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateFocusDistances(cam_focus_distances_info_t *focusDistances)
+{
+    String8 str;
+    char buffer[32] = {0};
+    //set all distances to infinity if focus mode is infinity
+    if(mFocusMode == CAM_FOCUS_MODE_INFINITY) {
+        str.append("Infinity,Infinity,Infinity");
+    } else {
+        snprintf(buffer, sizeof(buffer), "%f", focusDistances->focus_distance[0]);
+        str.append(buffer);
+        snprintf(buffer, sizeof(buffer), ",%f", focusDistances->focus_distance[1]);
+        str.append(buffer);
+        snprintf(buffer, sizeof(buffer), ",%f", focusDistances->focus_distance[2]);
+        str.append(buffer);
+    }
+    ALOGD("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+    set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : updateRecordingHintValue
+ *
+ * DESCRIPTION: update recording hint locally and to daemon
+ *
+ * PARAMETERS :
+ *   @value   : video hint value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateRecordingHintValue(int32_t value)
+{
+    int32_t rc = NO_ERROR;
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = setRecordingHintValue(value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update recording hint", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setHistogram
+ *
+ * DESCRIPTION: set histogram
+ *
+ * PARAMETERS :
+ *   @enabled : if histogram is enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setHistogram(bool enabled)
+{
+    if(m_bHistogramEnabled == enabled) {
+        ALOGD("%s: histogram flag not changed, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    // set parm for histogram
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    int32_t value = enabled;
+    int32_t rc = NO_ERROR;
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_HISTOGRAM,
+                                sizeof(value),
+                                &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set histogram", __func__);
+        return rc;
+    }
+
+    m_bHistogramEnabled = enabled;
+
+    ALOGD(" Histogram -> %s", m_bHistogramEnabled ? "Enabled" : "Disabled");
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setFaceDetection
+ *
+ * DESCRIPTION: set face detection
+ *
+ * PARAMETERS :
+ *   @enabled : if face detection is enabled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFaceDetection(bool enabled)
+{
+    int faceProcMask = m_nFaceProcMask;
+    // set face detection mask
+    if (enabled) {
+        faceProcMask |= CAM_FACE_PROCESS_MASK_DETECTION;
+    } else {
+        faceProcMask &= ~CAM_FACE_PROCESS_MASK_DETECTION;
+    }
+
+    if(m_nFaceProcMask == faceProcMask) {
+        ALOGD("%s: face process mask not changed, no ops here", __func__);
+        return NO_ERROR;
+    }
+
+    // set parm for face detection
+    int requested_faces = getInt(KEY_QC_MAX_NUM_REQUESTED_FACES);
+    cam_fd_set_parm_t fd_set_parm;
+    memset(&fd_set_parm, 0, sizeof(cam_fd_set_parm_t));
+    fd_set_parm.fd_mode = faceProcMask;
+    fd_set_parm.num_fd = requested_faces;
+
+    ALOGD("[KPI Perf] %s: Face detection value = %d num_fd = %d",
+          __func__, faceProcMask,requested_faces);
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+    int32_t rc = NO_ERROR;
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_FD,
+                                sizeof(fd_set_parm),
+                                &fd_set_parm);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set face detection parm", __func__);
+        return rc;
+    }
+
+    m_nFaceProcMask = faceProcMask;
+    ALOGD("%s: FaceProcMask -> %d", __func__, m_nFaceProcMask);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setLockCAF
+ *
+ * DESCRIPTION: Lock CAF
+ *
+ * PARAMETERS :
+ *   @bLock : if CAF needs to be locked
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setLockCAF(bool bLock)
+{
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+    int32_t rc = NO_ERROR;
+    int32_t value = bLock;
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_LOCK_CAF,
+                                sizeof(value),
+                                &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set lock CAF parm", __func__);
+        return rc;
+    } else {
+        m_bCAFLocked = bLock;
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameSkip
+ *
+ * DESCRIPTION: send ISP frame skip pattern to camera daemon
+ *
+ * PARAMETERS :
+ *   @pattern : skip pattern for ISP
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::setFrameSkip(enum msm_vfe_frame_skip_pattern pattern)
+{
+    int32_t rc = NO_ERROR;
+    int32_t value = (int32_t)pattern;
+
+    if ( m_pParamBuf == NULL ) {
+        return NO_INIT;
+    }
+
+    if(initBatchUpdate(m_pParamBuf) < 0 ) {
+        ALOGE("%s:Failed to initialize group update table", __func__);
+        return BAD_TYPE;
+    }
+
+    rc = AddSetParmEntryToBatch(m_pParamBuf,
+                                CAM_INTF_PARM_FRAMESKIP,
+                                sizeof(value),
+                                &value);
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to update table", __func__);
+        return rc;
+    }
+
+    rc = commitSetBatch();
+    if (rc != NO_ERROR) {
+        ALOGE("%s:Failed to set frameskip info parm", __func__);
+        return rc;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseNDimVector
+ *
+ * DESCRIPTION: helper function to parse a string like "(1, 2, 3, 4, ..., N)"
+ *              into N-dimension vector
+ *
+ * PARAMETERS :
+ *   @str     : string to be parsed
+ *   @num     : output array of size N to store vector element values
+ *   @N       : number of dimension
+ *   @delim   : delimeter to seperete string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+    char *start, *end;
+    if (num == NULL) {
+        ALOGE("%s: Invalid output array (num == NULL)", __func__);
+        return BAD_VALUE;
+    }
+
+    //check if string starts and ends with parantheses
+    if(str[0] != '(' || str[strlen(str)-1] != ')') {
+        ALOGE("%s: Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)",
+              __func__, str);
+        return BAD_VALUE;
+    }
+    start = (char*) str;
+    start++;
+    for(int i=0; i<N; i++) {
+        *(num+i) = (int) strtol(start, &end, 10);
+        if(*end != delim && i < N-1) {
+            ALOGE("%s: Cannot find delimeter '%c' in string \"%s\". end = %c",
+                  __func__, delim, str, *end);
+            return -1;
+        }
+        start = end+1;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseCameraAreaString
+ *
+ * DESCRIPTION: helper function to parse a string of camera areas like
+ *              "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+ *
+ * PARAMETERS :
+ *   @str             : string to be parsed
+ *   @max_num_areas   : max number of areas
+ *   @pAreas          : ptr to struct to store areas
+ *   @num_areas_found : number of areas found
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::parseCameraAreaString(const char *str,
+                                                 int max_num_areas,
+                                                 cam_area_t *pAreas,
+                                                 int& num_areas_found)
+{
+    char area_str[32];
+    const char *start, *end, *p;
+    start = str; end = NULL;
+    int values[5], index=0;
+    num_areas_found = 0;
+
+    memset(values, 0, sizeof(values));
+    while(start != NULL) {
+       if(*start != '(') {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return BAD_VALUE;
+       }
+       end = strchr(start, ')');
+       if(end == NULL) {
+            ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+            return BAD_VALUE;
+       }
+       int i;
+       for (i=0,p=start; p<=end; p++, i++) {
+           area_str[i] = *p;
+       }
+       area_str[i] = '\0';
+       if(parseNDimVector(area_str, values, 5) < 0){
+            ALOGE("%s: error: Failed to parse the area string: %s", __func__, area_str);
+            return BAD_VALUE;
+       }
+       // no more areas than max_num_areas are accepted.
+       if(index >= max_num_areas) {
+            ALOGE("%s: error: too many areas specified %s", __func__, str);
+            return BAD_VALUE;
+       }
+       pAreas[index].rect.left = values[0];
+       pAreas[index].rect.top = values[1];
+       pAreas[index].rect.width = values[2] - values[0];
+       pAreas[index].rect.height = values[3] - values[1];
+       pAreas[index].weight = values[4];
+
+       index++;
+       start = strchr(end, '('); // serach for next '('
+    }
+    num_areas_found = index;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateCameraAreas
+ *
+ * DESCRIPTION: helper function to validate camera areas within (-1000, 1000)
+ *
+ * PARAMETERS :
+ *   @areas     : ptr to array of areas
+ *   @num_areas : number of areas
+ *
+ * RETURN     : true --  area is in valid range
+ *              false -- not valid
+ *==========================================================================*/
+bool QCameraParameters::validateCameraAreas(cam_area_t *areas, int num_areas)
+{
+    // special case: default area
+    if (num_areas == 1 &&
+        areas[0].rect.left == 0 &&
+        areas[0].rect.top == 0 &&
+        areas[0].rect.width == 0 &&
+        areas[0].rect.height == 0 &&
+        areas[0].weight == 0) {
+        return true;
+    }
+
+    for(int i = 0; i < num_areas; i++) {
+        // left should be >= -1000
+        if(areas[i].rect.left < -1000) {
+            return false;
+        }
+
+        // top  should be >= -1000
+        if(areas[i].rect.top < -1000) {
+            return false;
+        }
+
+        // width or height should be > 0
+        if (areas[i].rect.width <= 0 || areas[i].rect.height <= 0) {
+            return false;
+        }
+
+        // right  should be <= 1000
+        if(areas[i].rect.left + areas[i].rect.width > 1000) {
+            return false;
+        }
+
+        // bottom should be <= 1000
+        if(areas[i].rect.top + areas[i].rect.height > 1000) {
+            return false;
+        }
+
+        // weight should be within (1, 1000)
+        if (areas[i].weight < 1 || areas[i].weight > 1000) {
+            return false;
+        }
+    }
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : initBatchUpdate
+ *
+ * DESCRIPTION: init camera parameters buf entries
+ *
+ * PARAMETERS :
+ *   @p_table : ptr to parameter buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::initBatchUpdate(parm_buffer_t *p_table)
+{
+    int32_t hal_version = CAM_HAL_V1;
+    m_tempMap.clear();
+
+    memset(p_table, 0, sizeof(parm_buffer_t));
+    p_table->first_flagged_entry = CAM_INTF_PARM_MAX;
+    AddSetParmEntryToBatch(p_table, CAM_INTF_PARM_HAL_VERSION,
+                sizeof(hal_version), &hal_version);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : AddSetParmEntryToBatch
+ *
+ * DESCRIPTION: add set parameter entry into batch
+ *
+ * PARAMETERS :
+ *   @p_table     : ptr to parameter buffer
+ *   @paramType   : parameter type
+ *   @paramLength : length of parameter value
+ *   @paramValue  : ptr to parameter value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::AddSetParmEntryToBatch(parm_buffer_t *p_table,
+                                                  cam_intf_parm_type_t paramType,
+                                                  uint32_t paramLength,
+                                                  void *paramValue)
+{
+    int position = paramType;
+    int current, next;
+
+    /*************************************************************************
+    *                 Code to take care of linking next flags                *
+    *************************************************************************/
+    current = GET_FIRST_PARAM_ID(p_table);
+    if (position == current){
+        //DO NOTHING
+    } else if (position < current){
+        SET_NEXT_PARAM_ID(position, p_table, current);
+        SET_FIRST_PARAM_ID(p_table, position);
+    } else {
+        /* Search for the position in the linked list where we need to slot in*/
+        while (position > GET_NEXT_PARAM_ID(current, p_table))
+            current = GET_NEXT_PARAM_ID(current, p_table);
+
+        /*If node already exists no need to alter linking*/
+        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
+            next = GET_NEXT_PARAM_ID(current, p_table);
+            SET_NEXT_PARAM_ID(current, p_table, position);
+            SET_NEXT_PARAM_ID(position, p_table, next);
+        }
+    }
+
+    /*************************************************************************
+    *                   Copy contents into entry                             *
+    *************************************************************************/
+
+    if (paramLength > sizeof(parm_type_t)) {
+        ALOGE("%s:Size of input larger than max entry size",__func__);
+        return BAD_VALUE;
+    }
+    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : AddGetParmEntryToBatch
+ *
+ * DESCRIPTION: add get parameter entry into batch
+ *
+ * PARAMETERS :
+ *   @p_table     : ptr to parameter buffer
+ *   @paramType   : parameter type
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::AddGetParmEntryToBatch(parm_buffer_t *p_table,
+                                                  cam_intf_parm_type_t paramType)
+{
+    int position = paramType;
+    int current, next;
+
+    /*************************************************************************
+    *                 Code to take care of linking next flags                *
+    *************************************************************************/
+    current = GET_FIRST_PARAM_ID(p_table);
+    if (position == current){
+        //DO NOTHING
+    } else if (position < current){
+        SET_NEXT_PARAM_ID(position, p_table, current);
+        SET_FIRST_PARAM_ID(p_table, position);
+    } else {
+        /* Search for the position in the linked list where we need to slot in*/
+        while (position > GET_NEXT_PARAM_ID(current, p_table))
+            current = GET_NEXT_PARAM_ID(current, p_table);
+
+        /*If node already exists no need to alter linking*/
+        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
+            next=GET_NEXT_PARAM_ID(current, p_table);
+            SET_NEXT_PARAM_ID(current, p_table, position);
+            SET_NEXT_PARAM_ID(position, p_table, next);
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitSetBatch
+ *
+ * DESCRIPTION: commit all set parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitSetBatch()
+{
+    int32_t rc = NO_ERROR;
+    if (m_pParamBuf->first_flagged_entry < CAM_INTF_PARM_MAX) {
+        rc = m_pCamOpsTbl->ops->set_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+    }
+    if (rc == NO_ERROR) {
+        // commit change from temp storage into param map
+        rc = commitParamChanges();
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitGetBatch
+ *
+ * DESCRIPTION: commit all get parameters in the batch work to backend
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitGetBatch()
+{
+    if (m_pParamBuf->first_flagged_entry < CAM_INTF_PARM_MAX) {
+        return m_pCamOpsTbl->ops->get_parms(m_pCamOpsTbl->camera_handle, m_pParamBuf);
+    } else {
+        return NO_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : updateParamEntry
+ *
+ * DESCRIPTION: update a parameter entry in the local temp map obj
+ *
+ * PARAMETERS :
+ *   @key     : key of the entry
+ *   @value   : value of the entry
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::updateParamEntry(const char *key, const char *value)
+{
+    m_tempMap.replaceValueFor(String8(key), String8(value));
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : commitParamChanges
+ *
+ * DESCRIPTION: commit all changes in local temp map obj into parameter obj
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraParameters::commitParamChanges()
+{
+    size_t size = m_tempMap.size();
+    for (size_t i = 0; i < size; i++) {
+        String8 k, v;
+        k = m_tempMap.keyAt(i);
+        v = m_tempMap.valueAt(i);
+        set(k, v);
+    }
+    m_tempMap.clear();
+
+    // update local changes
+    m_bRecordingHint = m_bRecordingHint_new;
+    m_bZslMode = m_bZslMode_new;
+
+    return NO_ERROR;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraParameters.h b/camera/QCamera2/HAL/QCameraParameters.h
new file mode 100644
index 0000000..e41db78
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraParameters.h
@@ -0,0 +1,596 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+** Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+** Not a Contribution. Apache license notifications and license are
+** retained for attribution purposes only.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+#include <camera/CameraParameters.h>
+#include <cutils/properties.h>
+#include <hardware/camera.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+#include "cam_intf.h"
+#include "QCameraMem.h"
+#include "QCameraThermalAdapter.h"
+
+extern "C" {
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+#define FOCAL_LENGTH_DECIMAL_PRECISION   100
+
+class QCameraParameters: public CameraParameters
+{
+public:
+    QCameraParameters();
+    QCameraParameters(const String8 &params);
+    ~QCameraParameters();
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_QC_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_QC_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_QC_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_QC_PREVIEW_FRAME_RATE_FIXED_MODE[];
+    static const char KEY_QC_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    // Supported live snapshot sizes
+    static const char KEY_QC_SUPPORTED_LIVESNAPSHOT_SIZES[];
+
+    // Supported Raw formats
+    static const char KEY_QC_SUPPORTED_RAW_FORMATS[];
+    static const char KEY_QC_RAW_FORMAT[];
+
+    //Touch Af/AEC settings.
+    static const char KEY_QC_TOUCH_AF_AEC[];
+    static const char KEY_QC_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_QC_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_QC_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or "on" constants. Read/write.
+    static const char KEY_QC_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,on". Read only.
+    static const char KEY_QC_SUPPORTED_SCENE_DETECT[];
+    static const char KEY_QC_SELECTABLE_ZONE_AF[];
+
+    static const char KEY_QC_ISO_MODE[];
+    static const char KEY_QC_SUPPORTED_ISO_MODES[];
+    static const char KEY_QC_LENSSHADE[] ;
+    static const char KEY_QC_SUPPORTED_LENSSHADE_MODES[] ;
+    static const char KEY_QC_AUTO_EXPOSURE[];
+    static const char KEY_QC_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_QC_GPS_LATITUDE_REF[];
+    static const char KEY_QC_GPS_LONGITUDE_REF[];
+    static const char KEY_QC_GPS_ALTITUDE_REF[];
+    static const char KEY_QC_GPS_STATUS[];
+    static const char KEY_QC_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_QC_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+    static const char KEY_QC_DIS[];
+    static const char KEY_QC_SUPPORTED_DIS_MODES[];
+
+    static const char KEY_QC_ZSL[];
+    static const char KEY_QC_SUPPORTED_ZSL_MODES[];
+    static const char KEY_QC_ZSL_BURST_INTERVAL[];
+    static const char KEY_QC_ZSL_BURST_LOOKBACK[];
+    static const char KEY_QC_ZSL_QUEUE_DEPTH[];
+
+    static const char KEY_QC_CAMERA_MODE[];
+    static const char KEY_QC_ORIENTATION[];
+
+    static const char KEY_QC_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_QC_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_QC_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_QC_AE_BRACKET_HDR[];
+    static const char KEY_QC_SUPPORTED_AE_BRACKET_MODES[];
+    static const char KEY_QC_CAPTURE_BURST_EXPOSURE[];
+    static const char KEY_QC_NUM_SNAPSHOT_PER_SHUTTER[];
+    static const char KEY_QC_NO_DISPLAY_MODE[];
+    static const char KEY_QC_RAW_PICUTRE_SIZE[];
+
+    // DENOISE
+    static const char KEY_QC_DENOISE[];
+    static const char KEY_QC_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_QC_FOCUS_ALGO[];
+    static const char KEY_QC_SUPPORTED_FOCUS_ALGOS[];
+
+    //Face Detection
+    static const char KEY_QC_FACE_DETECTION[];
+    static const char KEY_QC_SUPPORTED_FACE_DETECTION[];
+
+    //Face Recognition
+    static const char KEY_QC_FACE_RECOGNITION[];
+    static const char KEY_QC_SUPPORTED_FACE_RECOGNITION[];
+
+    // supported camera features to be queried by Snapdragon SDK
+    //Read only
+    static const char KEY_QC_SUPPORTED_CAMERA_FEATURES[];
+
+    //Indicates number of faces requested by the application.
+    //This value will be rejected if the requested faces
+    //greater than supported by hardware.
+    //Write only.
+    static const char KEY_QC_MAX_NUM_REQUESTED_FACES[];
+
+    //preview flip
+    static const char KEY_QC_PREVIEW_FLIP[];
+    //video flip
+    static const char KEY_QC_VIDEO_FLIP[];
+    //snapshot picture flip
+    static const char KEY_QC_SNAPSHOT_PICTURE_FLIP[];
+
+    static const char KEY_QC_SUPPORTED_FLIP_MODES[];
+
+    //Redeye Reduction
+    static const char KEY_QC_REDEYE_REDUCTION[];
+    static const char KEY_QC_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[];
+    static const char TOUCH_AF_AEC_ON[];
+
+    // Values for Scene mode
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+    static const char SCENE_MODE_HDR[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+
+    // Values for raw picture format
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YUYV[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_YVYU[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_UYVY[];
+    static const char QC_PIXEL_FORMAT_YUV_RAW_8BIT_VYUY[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_QCOM_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_MIPI_RAW_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_QCOM_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_MIPI_12BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN8_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_8BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_10BGGR[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GBRG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12GRBG[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12RGGB[];
+    static const char QC_PIXEL_FORMAT_BAYER_IDEAL_PLAIN16_12BGGR[];
+
+    // ISO values
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[];
+    static const char ISO_100[];
+    static const char ISO_200[];
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+    static const char AUTO_EXPOSURE_SMART_METERING[];
+    static const char AUTO_EXPOSURE_USER_METERING[];
+    static const char AUTO_EXPOSURE_SPOT_METERING_ADV[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED_ADV[];
+
+    static const char KEY_QC_SHARPNESS[];
+    static const char KEY_QC_MIN_SHARPNESS[];
+    static const char KEY_QC_MAX_SHARPNESS[];
+    static const char KEY_QC_SHARPNESS_STEP[];
+    static const char KEY_QC_CONTRAST[];
+    static const char KEY_QC_MIN_CONTRAST[];
+    static const char KEY_QC_MAX_CONTRAST[];
+    static const char KEY_QC_CONTRAST_STEP[];
+    static const char KEY_QC_SATURATION[];
+    static const char KEY_QC_MIN_SATURATION[];
+    static const char KEY_QC_MAX_SATURATION[];
+    static const char KEY_QC_SATURATION_STEP[];
+    static const char KEY_QC_BRIGHTNESS[];
+    static const char KEY_QC_MIN_BRIGHTNESS[];
+    static const char KEY_QC_MAX_BRIGHTNESS[];
+    static const char KEY_QC_BRIGHTNESS_STEP[];
+    static const char KEY_QC_SCE_FACTOR[];
+    static const char KEY_QC_MIN_SCE_FACTOR[];
+    static const char KEY_QC_MAX_SCE_FACTOR[];
+    static const char KEY_QC_SCE_FACTOR_STEP[];
+
+    static const char KEY_QC_HISTOGRAM[] ;
+    static const char KEY_QC_SUPPORTED_HISTOGRAM_MODES[] ;
+    static const char KEY_QC_HDR_NEED_1X[];
+    static const char KEY_QC_VIDEO_HDR[];
+    static const char KEY_QC_SUPPORTED_VIDEO_HDR_MODES[];
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char FOCUS_ALGO_AUTO[];
+    static const char FOCUS_ALGO_SPOT_METERING[];
+    static const char FOCUS_ALGO_CENTER_WEIGHTED[];
+    static const char FOCUS_ALGO_FRAME_AVERAGE[];
+
+    // Values for AE Bracketing settings.
+    static const char AE_BRACKET_OFF[];
+    static const char AE_BRACKET[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+    static const char VIDEO_HFR_5X[];
+
+    // Values for feature on/off settings.
+    static const char VALUE_OFF[];
+    static const char VALUE_ON[];
+
+    // Values for feature enable/disable settings.
+    static const char VALUE_ENABLE[];
+    static const char VALUE_DISABLE[];
+
+    // Values for feature true/false settings.
+    static const char VALUE_FALSE[];
+    static const char VALUE_TRUE[];
+
+    //Values for flip settings
+    static const char FLIP_MODE_OFF[];
+    static const char FLIP_MODE_V[];
+    static const char FLIP_MODE_H[];
+    static const char FLIP_MODE_VH[];
+
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+    typedef struct {
+        const char *const desc;
+        int val;
+    } QCameraMap;
+
+    void getSupportedHfrSizes(Vector<Size> &sizes);
+    void setPreviewFrameRateMode(const char *mode);
+    const char *getPreviewFrameRateMode() const;
+    void setTouchIndexAec(int x, int y);
+    void getTouchIndexAec(int *x, int *y);
+    void setTouchIndexAf(int x, int y);
+    void getTouchIndexAf(int *x, int *y);
+
+    int32_t init(cam_capability_t *, mm_camera_vtbl_t *);
+    void deinit();
+    int32_t assign(QCameraParameters& params);
+    int32_t initDefaultParameters();
+    int32_t updateParameters(QCameraParameters&, bool &needRestart);
+    int32_t commitParameters();
+    int getPreviewHalPixelFormat() const;
+    int32_t getStreamFormat(cam_stream_type_t streamType,
+                             cam_format_t &format);
+    int32_t getStreamDimension(cam_stream_type_t streamType,
+                                cam_dimension_t &dim);
+    void getThumbnailSize(int *width, int *height) const;
+
+    int getZSLBurstInterval();
+    int getZSLQueueDepth();
+    int getZSLBackLookCount();
+    int getMaxUnmatchedFramesInQueue();
+    bool isZSLMode() {return m_bZslMode;};
+    bool isNoDisplayMode() {return m_bNoDisplayMode;};
+    bool isWNREnabled() {return m_bWNROn;};
+    uint8_t getNumOfSnapshots();
+    uint8_t getNumOfExtraHDRBufsIfNeeded();
+    uint8_t getNumOfHDRBufsIfNeeded();
+    int getBurstNum();
+    bool getRecordingHintValue() {return m_bRecordingHint;}; // return local copy of video hint
+    int setRecordingHintValue(int32_t value); // set local copy of video hint and send to server
+                                              // no change in parameters value
+    int getJpegQuality();
+    int getJpegRotation();
+
+    int32_t getExifDateTime(char *dateTime, uint32_t &count);
+    int32_t getExifFocalLength(rat_t *focalLenght);
+    uint16_t getExifIsoSpeed();
+    int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, uint32_t &count);
+    int32_t getExifLatitude(rat_t *latitude, char *latRef);
+    int32_t getExifLongitude(rat_t *longitude, char *lonRef);
+    int32_t getExifAltitude(rat_t *altitude, char *altRef);
+    int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, uint32_t bufLen, rat_t *gpsTimeStamp);
+    int32_t updateFocusDistances(cam_focus_distances_info_t *focusDistances);
+
+    bool isFpsDebugEnabled() {return m_bDebugFps;};
+    bool isHistogramEnabled() {return m_bHistogramEnabled;};
+    bool isFaceDetectionEnabled() {return ((m_nFaceProcMask & CAM_FACE_PROCESS_MASK_DETECTION) != 0);};
+    int32_t setHistogram(bool enabled);
+    int32_t setFaceDetection(bool enabled);
+    int32_t setLockCAF(bool bLock);
+    int32_t setFrameSkip(enum msm_vfe_frame_skip_pattern pattern);
+    qcamera_thermal_mode getThermalMode() {return m_ThermalMode;};
+    int32_t updateRecordingHintValue(int32_t value);
+
+    cam_focus_mode_type getFocusMode() const {return mFocusMode;};
+    int32_t setNumOfSnapshot();
+    int32_t adjustPreviewFpsRange(cam_fps_range_t *fpsRange);
+    bool isJpegPictureFormat() {return (mPictureFormat == CAM_FORMAT_JPEG);};
+    bool isNV16PictureFormat() {return (mPictureFormat == CAM_FORMAT_YUV_422_NV16);};
+    cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
+    int32_t getLiveSnapshotSize(cam_dimension_t &dim) {dim = m_LiveSnapshotSize; return NO_ERROR;};
+    int getFlipMode(cam_stream_type_t streamType);
+
+    void setLockCAFNeeded(bool bNeedflag) {m_bNeedLockCAF = bNeedflag;};
+    bool isLockCAFNeeded() {return m_bNeedLockCAF;};
+    bool isCAFLocked() {return m_bCAFLocked;};
+    void setAFRunning(bool bflag) {m_bAFRunning = bflag;};
+    bool isAFRunning() {return m_bAFRunning;};
+
+private:
+    int32_t setPreviewSize(const QCameraParameters& );
+    int32_t setVideoSize(const QCameraParameters& );
+    int32_t setPictureSize(const QCameraParameters& );
+    int32_t setLiveSnapshotSize(const QCameraParameters& );
+    int32_t setPreviewFormat(const QCameraParameters& );
+    int32_t setPictureFormat(const QCameraParameters& );
+    int32_t setOrientation(const QCameraParameters& );
+    int32_t setJpegThumbnailSize(const QCameraParameters& );
+    int32_t setJpegQuality(const QCameraParameters& );
+    int32_t setPreviewFpsRange(const QCameraParameters& );
+    int32_t setPreviewFrameRate(const QCameraParameters& );
+    int32_t setAutoExposure(const QCameraParameters& );
+    int32_t setEffect(const QCameraParameters& );
+    int32_t setBrightness(const QCameraParameters& );
+    int32_t setFocusMode(const QCameraParameters& );
+    int32_t setSharpness(const QCameraParameters& );
+    int32_t setSaturation(const QCameraParameters& );
+    int32_t setContrast(const QCameraParameters& );
+    int32_t setSkinToneEnhancement(const QCameraParameters& );
+    int32_t setSceneDetect(const QCameraParameters& );
+    int32_t setVideoHDR(const QCameraParameters& );
+    int32_t setZoom(const QCameraParameters& );
+    int32_t setISOValue(const QCameraParameters& );
+    int32_t setRotation(const QCameraParameters& );
+    int32_t setFlash(const QCameraParameters& );
+    int32_t setAecLock(const QCameraParameters& );
+    int32_t setAwbLock(const QCameraParameters& );
+    int32_t setMCEValue(const QCameraParameters& );
+    int32_t setDISValue(const QCameraParameters& params);
+    int32_t setHighFrameRate(const QCameraParameters& );
+    int32_t setLensShadeValue(const QCameraParameters& );
+    int32_t setExposureCompensation(const QCameraParameters& );
+    int32_t setWhiteBalance(const QCameraParameters& );
+    int32_t setAntibanding(const QCameraParameters& );
+    int32_t setFocusAreas(const QCameraParameters& );
+    int32_t setMeteringAreas(const QCameraParameters& );
+    int32_t setSceneMode(const QCameraParameters& );
+    int32_t setSelectableZoneAf(const QCameraParameters& );
+    int32_t setAEBracket(const QCameraParameters& );
+    int32_t setRedeyeReduction(const QCameraParameters& );
+    int32_t setGpsLocation(const QCameraParameters& );
+    int32_t setRecordingHint(const QCameraParameters& );
+    int32_t setNoDisplayMode(const QCameraParameters& );
+    int32_t setWaveletDenoise(const QCameraParameters& );
+    int32_t setZslMode(const QCameraParameters& );
+    int32_t setZslAttributes(const QCameraParameters& );
+    int32_t setCameraMode(const QCameraParameters& );
+    int32_t setFaceRecognition(const QCameraParameters& );
+    int32_t setFlip(const QCameraParameters& );
+
+    int32_t setAutoExposure(const char *autoExp);
+    int32_t setPreviewFpsRange(int minFPS,int maxFPS);
+    int32_t setEffect(const char *effect);
+    int32_t setBrightness(int brightness);
+    int32_t setFocusMode(const char *focusMode);
+    int32_t setSharpness(int sharpness);
+    int32_t setSaturation(int saturation);
+    int32_t setContrast(int contrast);
+    int32_t setSkinToneEnhancement(int sceFactor);
+    int32_t setSceneDetect(const char *scendDetect);
+    int32_t setVideoHDR(const char *videoHDR);
+    int32_t setZoom(int zoom_level);
+    int32_t setISOValue(const char *isoValue);
+    int32_t setFlash(const char *flashStr);
+    int32_t setAecLock(const char *aecStr);
+    int32_t setAwbLock(const char *awbStr);
+    int32_t setMCEValue(const char *mceStr);
+    int32_t setDISValue(const char *disStr);
+    int32_t setHighFrameRate(const char *hfrStr);
+    int32_t setLensShadeValue(const char *lensShadeStr);
+    int32_t setExposureCompensation(int expComp);
+    int32_t setWhiteBalance(const char *wbStr);
+    int32_t setAntibanding(const char *antiBandingStr);
+    int32_t setFocusAreas(const char *focusAreasStr);
+    int32_t setMeteringAreas(const char *meteringAreasStr);
+    int32_t setSceneMode(const char *sceneModeStr);
+    int32_t setSelectableZoneAf(const char *selZoneAFStr);
+    int32_t setAEBracket(const char *aecBracketStr);
+    int32_t setRedeyeReduction(const char *redeyeStr);
+    int32_t setWaveletDenoise(const char *wnrStr);
+    int32_t setFaceRecognition(const char *faceRecog, int maxFaces);
+
+    int32_t parse_pair(const char *str, int *first, int *second,
+                       char delim, char **endptr);
+    void parseSizesList(const char *sizesStr, Vector<Size> &sizes);
+    int32_t parseNDimVector(const char *str, int *num, int N, char delim);
+    int32_t parseCameraAreaString(const char *str, int max_num_areas,
+                                  cam_area_t *pAreas, int& num_areas_found);
+    bool validateCameraAreas(cam_area_t *areas, int num_areas);
+    int parseGPSCoordinate(const char *coord_str, rat_t *coord);
+    int32_t getRational(rat_t *rat, int num, int denom);
+    String8 createSizesString(const cam_dimension_t *sizes, int len);
+    String8 createValuesString(const int *values, int len,
+                               const QCameraMap *map, int map_len);
+    String8 createValuesStringFromMap(const QCameraMap *map,
+                                      int map_len);
+    String8 createHfrValuesString(const cam_hfr_info_t *values, int len,
+                                  const QCameraMap *map, int map_len);
+    String8 createHfrSizesString(const cam_hfr_info_t *values, int len);
+    String8 createFpsRangeString(const cam_fps_range_t *fps,
+                                 int len,
+                                 int &default_fps_index);
+    static int compareFPSValues(const void *p1, const void *p2);
+    String8 createFpsString(const cam_fps_range_t *fps, int len);
+    String8 createZoomRatioValuesString(int *zoomRatios, int length);
+    int lookupAttr(const QCameraMap arr[], int len, const char *name);
+    const char *lookupNameByValue(const QCameraMap arr[], int len, int value);
+
+    // ops for batch set/get params with server
+    int32_t initBatchUpdate(parm_buffer_t *p_table);
+    int32_t AddSetParmEntryToBatch(parm_buffer_t *p_table,
+                                   cam_intf_parm_type_t paramType,
+                                   uint32_t paramLength,
+                                   void *paramValue);
+    int32_t commitSetBatch();
+    int32_t AddGetParmEntryToBatch(parm_buffer_t *p_table,
+                                   cam_intf_parm_type_t paramType);
+    int32_t commitGetBatch();
+
+    // ops to tempororily update parameter entries and commit
+    int32_t updateParamEntry(const char *key, const char *value);
+    int32_t commitParamChanges();
+
+    // Map from strings to values
+    static const cam_dimension_t THUMBNAIL_SIZES_MAP[];
+    static const QCameraMap AUTO_EXPOSURE_MAP[];
+    static const QCameraMap PREVIEW_FORMATS_MAP[];
+    static const QCameraMap PICTURE_TYPES_MAP[];
+    static const QCameraMap RAW_FORMATS_MAP[];
+    static const QCameraMap FOCUS_MODES_MAP[];
+    static const QCameraMap EFFECT_MODES_MAP[];
+    static const QCameraMap SCENE_MODES_MAP[];
+    static const QCameraMap FLASH_MODES_MAP[];
+    static const QCameraMap FOCUS_ALGO_MAP[];
+    static const QCameraMap WHITE_BALANCE_MODES_MAP[];
+    static const QCameraMap ANTIBANDING_MODES_MAP[];
+    static const QCameraMap ISO_MODES_MAP[];
+    static const QCameraMap HFR_MODES_MAP[];
+    static const QCameraMap BRACKETING_MODES_MAP[];
+    static const QCameraMap ON_OFF_MODES_MAP[];
+    static const QCameraMap ENABLE_DISABLE_MODES_MAP[];
+    static const QCameraMap DENOISE_ON_OFF_MODES_MAP[];
+    static const QCameraMap TRUE_FALSE_MODES_MAP[];
+    static const QCameraMap TOUCH_AF_AEC_MODES_MAP[];
+    static const QCameraMap FLIP_MODES_MAP[];
+
+    cam_capability_t *m_pCapability;
+    mm_camera_vtbl_t *m_pCamOpsTbl;
+    QCameraHeapMemory *m_pParamHeap;
+    parm_buffer_t     *m_pParamBuf;  // ptr to param buf in m_pParamHeap
+
+    bool m_bZslMode;                // if ZSL is enabled
+    bool m_bZslMode_new;
+    bool m_bRecordingHint;          // local copy of recording hint
+    bool m_bRecordingHint_new;
+    bool m_bHistogramEnabled;       // if histogram is enabled
+    int  m_nFaceProcMask;           // face process mask
+    bool m_bDebugFps;               // if FPS need to be logged
+    cam_focus_mode_type mFocusMode;
+    cam_format_t mPreviewFormat;
+    int32_t mPictureFormat;         // could be CAMERA_PICTURE_TYPE_JPEG or cam_format_t
+    bool m_bNeedRestart;            // if preview needs restart after parameters updated
+    bool m_bNoDisplayMode;
+    bool m_bWNROn;
+    bool m_bNeedLockCAF;
+    bool m_bCAFLocked;
+    bool m_bAFRunning;
+    qcamera_thermal_mode m_ThermalMode; // adjust fps vs adjust frameskip
+    cam_dimension_t m_LiveSnapshotSize; // live snapshot size
+
+    DefaultKeyedVector<String8,String8> m_tempMap; // map for temororily store parameters to be set
+};
+
+}; // namespace qcamera
+
+#endif
diff --git a/camera/QCamera2/HAL/QCameraPostProc.cpp b/camera/QCamera2/HAL/QCameraPostProc.cpp
new file mode 100644
index 0000000..c301bde
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraPostProc.cpp
@@ -0,0 +1,1700 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraPostProc"
+
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraPostProc.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraPostProcessor
+ *
+ * DESCRIPTION: constructor of QCameraPostProcessor.
+ *
+ * PARAMETERS :
+ *   @cam_ctrl : ptr to HWI object
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl)
+    : m_parent(cam_ctrl),
+      mJpegCB(NULL),
+      mJpegUserData(NULL),
+      mJpegClientHandle(0),
+      mJpegSessionId(0),
+      m_pJpegOutputMem(NULL),
+      m_pJpegExifObj(NULL),
+      m_bThumbnailNeeded(TRUE),
+      m_pReprocChannel(NULL),
+      m_inputPPQ(releasePPInputData, this),
+      m_ongoingPPQ(releaseOngoingPPData, this),
+      m_inputJpegQ(releaseJpegData, this),
+      m_ongoingJpegQ(releaseJpegData, this),
+      m_inputRawQ(releasePPInputData, this)
+{
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraPostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCameraPostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraPostProcessor::~QCameraPostProcessor()
+{
+    if (m_pJpegOutputMem != NULL) {
+        m_pJpegOutputMem->deallocate();
+        delete m_pJpegOutputMem;
+        m_pJpegOutputMem = NULL;
+    }
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    if (m_pReprocChannel != NULL) {
+        m_pReprocChannel->stop();
+        delete m_pReprocChannel;
+        m_pReprocChannel = NULL;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ *   @jpeg_cb      : callback to handle jpeg event from mm-camera-interface
+ *   @user_data    : user data ptr for jpeg callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::init(jpeg_encode_callback_t jpeg_cb, void *user_data)
+{
+    mJpegCB = jpeg_cb;
+    mJpegUserData = user_data;
+
+    mJpegClientHandle = jpeg_open(&mJpegHandle);
+    if(!mJpegClientHandle) {
+        ALOGE("%s : jpeg_open did not work", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    m_dataProcTh.launch(dataProcessRoutine, this);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::deinit()
+{
+    m_dataProcTh.exit();
+
+    if(mJpegClientHandle > 0) {
+        int rc = mJpegHandle.close(mJpegClientHandle);
+        ALOGE("%s: Jpeg closed, rc = %d, mJpegClientHandle = %x",
+              __func__, rc, mJpegClientHandle);
+        mJpegClientHandle = 0;
+        memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ *              will be launched.
+ *
+ * PARAMETERS :
+ *   @pSrcChannel : source channel obj ptr that possibly needs reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : if any reprocess is needed, a reprocess channel/stream
+ *              will be started.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::start(QCameraChannel *pSrcChannel)
+{
+    int32_t rc = NO_ERROR;
+    if (m_parent->needReprocess()) {
+        if (m_pReprocChannel != NULL) {
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+        }
+        // if reprocess is needed, start reprocess channel
+        m_pReprocChannel = m_parent->addOnlineReprocChannel(pSrcChannel);
+        if (m_pReprocChannel == NULL) {
+            ALOGE("%s: cannot add reprocess channel", __func__);
+            return UNKNOWN_ERROR;
+        }
+
+        rc = m_pReprocChannel->start();
+        if (rc != 0) {
+            ALOGE("%s: cannot start reprocess channel", __func__);
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+            return rc;
+        }
+    }
+
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+    m_parent->m_cbNotifier.startSnapshots();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCameraPostProcessor::stop()
+{
+    m_parent->m_cbNotifier.stopSnapshots();
+    // dataProc Thread need to process "stop" as sync call because abort jpeg job should be a sync call
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegEncodingConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                                    QCameraStream *main_stream,
+                                                    QCameraStream *thumb_stream)
+{
+    ALOGV("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+    camera_memory_t *jpeg_mem = NULL;
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    m_bThumbnailNeeded = TRUE; // need encode thumbnail by default
+    cam_dimension_t thumbnailSize;
+    memset(&thumbnailSize, 0, sizeof(cam_dimension_t));
+    m_parent->getThumbnailSize(thumbnailSize);
+    if (thumbnailSize.width == 0 || thumbnailSize.height == 0) {
+        // (0,0) means no thumbnail
+        m_bThumbnailNeeded = FALSE;
+    }
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;
+    main_stream->getFormat(img_fmt);
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    encode_parm.quality = m_parent->getJpegQuality();
+    if (encode_parm.quality <= 0) {
+        encode_parm.quality = 85;
+    }
+
+    // get exif data
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    m_pJpegExifObj = m_parent->getExifData();
+    if (m_pJpegExifObj != NULL) {
+        encode_parm.exif_info.exif_data = m_pJpegExifObj->getEntries();
+        encode_parm.exif_info.numOfEntries = m_pJpegExifObj->getNumOfEntries();
+    }
+
+    cam_frame_len_offset_t main_offset;
+    memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+    main_stream->getFrameOffset(main_offset);
+
+    // src buf config
+    QCameraMemory *pStreamMem = main_stream->getStreamBufs();
+    if (pStreamMem == NULL) {
+        ALOGE("%s: cannot get stream bufs from main stream", __func__);
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_src_bufs = pStreamMem->getCnt();
+    for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+        camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+        if (stream_mem != NULL) {
+            encode_parm.src_main_buf[i].index = i;
+            encode_parm.src_main_buf[i].buf_size = stream_mem->size;
+            encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+            encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+            encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+            encode_parm.src_main_buf[i].offset = main_offset;
+        }
+    }
+
+    if (m_bThumbnailNeeded == TRUE) {
+        if (thumb_stream == NULL) {
+            thumb_stream = main_stream;
+        }
+        pStreamMem = thumb_stream->getStreamBufs();
+        if (pStreamMem == NULL) {
+            ALOGE("%s: cannot get stream bufs from thumb stream", __func__);
+            ret = BAD_VALUE;
+            goto on_error;
+        }
+        cam_frame_len_offset_t thumb_offset;
+        memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+        thumb_stream->getFrameOffset(thumb_offset);
+        encode_parm.num_tmb_bufs =  pStreamMem->getCnt();
+        for (int i = 0; i < pStreamMem->getCnt(); i++) {
+            camera_memory_t *stream_mem = pStreamMem->getMemory(i, false);
+            if (stream_mem != NULL) {
+                encode_parm.src_thumb_buf[i].index = i;
+                encode_parm.src_thumb_buf[i].buf_size = stream_mem->size;
+                encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)stream_mem->data;
+                encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+                encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+                encode_parm.src_thumb_buf[i].offset = thumb_offset;
+            }
+        }
+    }
+
+    // allocate output buf for jpeg encoding
+    if (m_pJpegOutputMem != NULL) {
+        m_pJpegOutputMem->deallocate();
+        delete m_pJpegOutputMem;
+        m_pJpegOutputMem = NULL;
+    }
+    m_pJpegOutputMem = new QCameraStreamMemory(m_parent->mGetMemory,
+                                               QCAMERA_ION_USE_CACHE);
+    if (NULL == m_pJpegOutputMem) {
+        ret = NO_MEMORY;
+        ALOGE("%s : No memory for m_pJpegOutputMem", __func__);
+        goto on_error;
+    }
+    ret = m_pJpegOutputMem->allocate(1, main_offset.frame_len);
+    if(ret != OK) {
+        ret = NO_MEMORY;
+        ALOGE("%s : No memory for m_pJpegOutputMem", __func__);
+        goto on_error;
+    }
+    jpeg_mem = m_pJpegOutputMem->getMemory(0, false);
+    if (NULL == jpeg_mem) {
+        ret = NO_MEMORY;
+        ALOGE("%s : initHeapMem for jpeg, ret = NO_MEMORY", __func__);
+        goto on_error;
+    }
+    encode_parm.num_dst_bufs = 1;
+    encode_parm.dest_buf[0].index = 0;
+    encode_parm.dest_buf[0].buf_size = jpeg_mem->size;
+    encode_parm.dest_buf[0].buf_vaddr = (uint8_t *)jpeg_mem->data;
+    encode_parm.dest_buf[0].fd = m_pJpegOutputMem->getFd(0);
+    encode_parm.dest_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_parm.dest_buf[0].offset = main_offset;
+
+    ALOGV("%s : X", __func__);
+    return NO_ERROR;
+
+on_error:
+    if (m_pJpegOutputMem != NULL) {
+        m_pJpegOutputMem->deallocate();
+        delete m_pJpegOutputMem;
+        m_pJpegOutputMem = NULL;
+    }
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    ALOGV("%s : X with error %d", __func__, ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendEvtNotify
+ *
+ * DESCRIPTION: send event notify through notify callback registered by upper layer
+ *
+ * PARAMETERS :
+ *   @msg_type: msg type of notify
+ *   @ext1    : extension
+ *   @ext2    : extension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendEvtNotify(int32_t msg_type,
+                                            int32_t ext1,
+                                            int32_t ext2)
+{
+    return m_parent->sendEvtNotify(msg_type, ext1, ext2);
+}
+
+/*===========================================================================
+ * FUNCTION   : sendDataNotify
+ *
+ * DESCRIPTION: enqueue data into dataNotify thread
+ *
+ * PARAMETERS :
+ *   @msg_type: data callback msg type
+ *   @data    : ptr to data memory struct
+ *   @index   : index to data buffer
+ *   @metadata: ptr to meta data buffer if there is any
+ *   @release_data : ptr to struct indicating if data need to be released
+ *                   after notify
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::sendDataNotify(int32_t msg_type,
+                                             camera_memory_t *data,
+                                             uint8_t index,
+                                             camera_frame_metadata_t *metadata,
+                                             qcamera_release_data_t *release_data)
+{
+    qcamera_data_argm_t *data_cb = (qcamera_data_argm_t *)malloc(sizeof(qcamera_data_argm_t));
+    if (NULL == data_cb) {
+        ALOGE("%s: no mem for acamera_data_argm_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(data_cb, 0, sizeof(qcamera_data_argm_t));
+    data_cb->msg_type = msg_type;
+    data_cb->data = data;
+    data_cb->index = index;
+    data_cb->metadata = metadata;
+    if (release_data != NULL) {
+        data_cb->release_data = *release_data;
+    }
+
+    qcamera_callback_argm_t cbArg;
+    memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+    cbArg.cb_type = QCAMERA_DATA_SNAPSHOT_CALLBACK;
+    cbArg.msg_type = msg_type;
+    cbArg.data = data;
+    cbArg.metadata = metadata;
+    cbArg.user_data = data_cb;
+    cbArg.cookie = this;
+    cbArg.release_cb = releaseNotifyData;
+    int rc = m_parent->m_cbNotifier.notifyCallback(cbArg);
+    if ( NO_ERROR != rc ) {
+        ALOGE("%s: Error enqueuing jpeg data into notify queue", __func__);
+        free(data_cb);
+        return UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processData(mm_camera_super_buf_t *frame)
+{
+    if (m_parent->needReprocess()) {
+        ALOGD("%s: need reprocess", __func__);
+        // enqueu to post proc input queue
+        m_inputPPQ.enqueue((void *)frame);
+    } else if (m_parent->mParameters.isNV16PictureFormat()) {
+        processRawData(frame);
+    } else {
+        ALOGD("%s: no need offline reprocess, sending to jpeg encoding", __func__);
+        qcamera_jpeg_data_t *jpeg_job =
+            (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            ALOGE("%s: No memory for jpeg job", __func__);
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+        jpeg_job->src_frame = frame;
+
+        // enqueu to jpeg input queue
+        m_inputJpegQ.enqueue((void *)jpeg_job);
+    }
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawData
+ *
+ * DESCRIPTION: enqueue raw data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawData(mm_camera_super_buf_t *frame)
+{
+    // enqueu to raw input queue
+    m_inputRawQ.enqueue((void *)frame);
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegEvt
+ *
+ * DESCRIPTION: process jpeg event from mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt     : payload of jpeg event, including information about jpeg encoding
+ *              status, jpeg size and so on.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : This event will also trigger DataProc thread to move to next job
+ *              processing (i.e., send a new jpeg encoding job to mm-jpeg-interface
+ *              if there is any pending job in jpeg input queue)
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processJpegEvt(qcamera_jpeg_evt_payload_t *evt)
+{
+    int32_t rc = NO_ERROR;
+    camera_memory_t *jpeg_mem = NULL;
+
+    // find job by jobId
+    qcamera_jpeg_data_t *job = findJpegJobByJobId(evt->jobId);
+
+    if (job == NULL) {
+        ALOGE("%s: Cannot find jpeg job by jobId(%d)", __func__, evt->jobId);
+        rc = BAD_VALUE;
+        goto end;
+    }
+
+    ALOGD("[KPI Perf] %s : jpeg job %d", __func__, evt->jobId);
+
+    if (m_parent->mDataCb == NULL ||
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) == 0 ) {
+        ALOGD("%s: No dataCB or CAMERA_MSG_COMPRESSED_IMAGE not enabled",
+              __func__);
+        rc = NO_ERROR;
+        goto end;
+    }
+
+    if(evt->status == JPEG_JOB_STATUS_ERROR) {
+        ALOGE("%s: Error event handled from jpeg, status = %d",
+              __func__, evt->status);
+        rc = FAILED_TRANSACTION;
+        goto end;
+    }
+
+    m_parent->dumpFrameToFile(evt->out_data.buf_vaddr,
+                              evt->out_data.buf_filled_len,
+                              evt->jobId,
+                              QCAMERA_DUMP_FRM_JPEG);
+    ALOGD("%s: Dump jpeg_size=%d", __func__, evt->out_data.buf_filled_len);
+
+    // alloc jpeg memory to pass to upper layer
+    jpeg_mem = m_parent->mGetMemory(-1, evt->out_data.buf_filled_len, 1, m_parent->mCallbackCookie);
+    if (NULL == jpeg_mem) {
+        rc = NO_MEMORY;
+        ALOGE("%s : getMemory for jpeg, ret = NO_MEMORY", __func__);
+        goto end;
+    }
+    memcpy(jpeg_mem->data, evt->out_data.buf_vaddr, evt->out_data.buf_filled_len);
+
+    ALOGE("%s : Calling upperlayer callback to store JPEG image", __func__);
+    qcamera_release_data_t release_data;
+    memset(&release_data, 0, sizeof(qcamera_release_data_t));
+    release_data.data = jpeg_mem;
+    rc = sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                        jpeg_mem,
+                        0,
+                        NULL,
+                        &release_data);
+
+end:
+    if (rc != NO_ERROR) {
+        // send error msg to upper layer
+        sendEvtNotify(CAMERA_MSG_ERROR,
+                      UNKNOWN_ERROR,
+                      0);
+
+        if (NULL != jpeg_mem) {
+            jpeg_mem->release(jpeg_mem);
+            jpeg_mem = NULL;
+        }
+    }
+
+    // release internal data for jpeg job
+    if (job != NULL) {
+        releaseJpegJobData(job);
+        free(job);
+    }
+
+    // wait up data proc thread to do next job,
+    // if previous request is blocked due to ongoing jpeg job
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ *   @frame   : received frame from reprocess channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+    qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
+
+    if (job == NULL || job->src_frame == NULL) {
+        ALOGE("%s: Cannot find reprocess job", __func__);
+        return BAD_VALUE;
+    }
+
+    if (m_parent->mParameters.isNV16PictureFormat()) {
+        releaseSuperBuf(job->src_frame);
+        free(job->src_frame);
+        free(job);
+        return processRawData(frame);
+    }
+
+    qcamera_jpeg_data_t *jpeg_job =
+        (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+    if (jpeg_job == NULL) {
+        ALOGE("%s: No memory for jpeg job", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+    jpeg_job->src_frame = frame;
+    jpeg_job->src_reproc_frame = job->src_frame;
+
+    // free pp job buf
+    free(job);
+
+    // enqueu reprocessed frame to jpeg input queue
+    m_inputJpegQ.enqueue((void *)jpeg_job);
+
+    // wait up data proc thread
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ *   @jobId   : job Id of the job
+ *
+ * RETURN     : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE       : Currently only one job is sending to mm-jpeg-interface for jpeg
+ *              encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ *              will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_jpeg_data_t *QCameraPostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+    qcamera_jpeg_data_t * job = NULL;
+    if (jobId == 0) {
+        ALOGE("%s: not a valid jpeg jobId", __func__);
+        return NULL;
+    }
+
+    // currely only one jpeg job ongoing, so simply dequeue the head
+    job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    return job;
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releasePPInputData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseSuperBuf((mm_camera_super_buf_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to ongoing jpeg job data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to onging postprocess job
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
+        if (NULL != pp_job->src_frame) {
+            pme->releaseSuperBuf(pp_job->src_frame);
+            free(pp_job->src_frame);
+            pp_job->src_frame = NULL;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseNotifyData
+ *
+ * DESCRIPTION: function to release internal resources in notify data struct
+ *
+ * PARAMETERS :
+ *   @user_data  : ptr user data
+ *   @cookie     : callback cookie
+ *
+ * RETURN     : None
+ *
+ * NOTE       : deallocate jpeg heap memory if it's not NULL
+ *==========================================================================*/
+void QCameraPostProcessor::releaseNotifyData(void *user_data, void *cookie)
+{
+    qcamera_data_argm_t *app_cb = ( qcamera_data_argm_t * ) user_data;
+    QCameraPostProcessor *postProc = ( QCameraPostProcessor * ) cookie;
+    if ( ( NULL != app_cb ) && ( NULL != postProc ) ) {
+        if (app_cb && NULL != app_cb->release_data.data) {
+            app_cb->release_data.data->release(app_cb->release_data.data);
+            app_cb->release_data.data = NULL;
+        }
+        if (app_cb && NULL != app_cb->release_data.frame) {
+            postProc->releaseSuperBuf(app_cb->release_data.frame);
+            free(app_cb->release_data.frame);
+            app_cb->release_data.frame = NULL;
+        }
+        free(app_cb);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ *   @super_buf : ptr to the superbuf frame
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraPostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+    QCameraChannel *pChannel = NULL;
+
+    if (NULL != super_buf) {
+        pChannel = m_parent->getChannelByHandle(super_buf->ch_id);
+
+        if ( NULL == pChannel ) {
+            if (m_pReprocChannel != NULL &&
+                m_pReprocChannel->getMyHandle() == super_buf->ch_id) {
+                pChannel = m_pReprocChannel;
+            }
+        }
+
+        if (pChannel != NULL) {
+            pChannel->bufDone(super_buf);
+        } else {
+            ALOGE(" %s : Channel id %d not found!!",
+                  __func__,
+                  super_buf->ch_id);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to jpeg job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : original source frame need to be queued back to kernel for
+ *              future use. Output buf of jpeg job need to be released since
+ *              it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCameraPostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
+{
+    ALOGV("%s: E", __func__);
+    if (NULL != job) {
+        if (NULL != job->src_reproc_frame) {
+            releaseSuperBuf(job->src_reproc_frame);
+            free(job->src_reproc_frame);
+            job->src_reproc_frame = NULL;
+        }
+
+        if (NULL != job->src_frame) {
+            releaseSuperBuf(job->src_frame);
+            free(job->src_frame);
+            job->src_frame = NULL;
+        }
+    }
+    ALOGV("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCameraPostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_420_YV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_422_NV61:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+    default:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCameraPostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_YV12:
+    case CAM_FORMAT_YUV_422_NV61:
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_FMT_YUV;
+    default:
+        return MM_JPEG_FMT_YUV;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                                         uint8_t &needNewSess)
+{
+    ALOGV("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    QCameraStream *main_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    QCameraStream *thumb_stream = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    mm_camera_super_buf_t *recvd_frame = jpeg_job_data->src_frame;
+
+    // find channel
+    QCameraChannel *pChannel = m_parent->getChannelByHandle(recvd_frame->ch_id);
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        }
+    }
+    if (pChannel == NULL) {
+        ALOGE("%s: No corresponding channel (ch_id = %d) exist, return here",
+              __func__, recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame and thumnail frame
+    for (int i = 0; i < recvd_frame->num_bufs; i++) {
+        QCameraStream *pStream =
+            pChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            if (pStream->isTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isTypeOf(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_SNAPSHOT) ||
+                pStream->isOrignalTypeOf(CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT)) {
+                main_stream = pStream;
+                main_frame = recvd_frame->bufs[i];
+            } else if (pStream->isTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                       pStream->isTypeOf(CAM_STREAM_TYPE_POSTVIEW) ||
+                       pStream->isOrignalTypeOf(CAM_STREAM_TYPE_PREVIEW) ||
+                       pStream->isOrignalTypeOf(CAM_STREAM_TYPE_POSTVIEW)) {
+                thumb_stream = pStream;
+                thumb_frame = recvd_frame->bufs[i];
+            }
+        }
+    }
+
+    if(NULL == main_frame){
+       ALOGE("%s : Main frame is NULL", __func__);
+       return BAD_VALUE;
+    }
+
+    QCameraMemory *memObj = (QCameraMemory *)main_frame->mem_info;
+    if (NULL == memObj) {
+        ALOGE("%s : Memeory Obj of main frame is NULL", __func__);
+        return NO_MEMORY;
+    }
+
+    // dump snapshot frame if enabled
+    m_parent->dumpFrameToFile(main_frame->buffer, main_frame->frame_len,
+                              main_frame->frame_idx, QCAMERA_DUMP_FRM_SNAPSHOT);
+
+    // send upperlayer callback for raw image
+    camera_memory_t *mem = memObj->getMemory(main_frame->buf_idx, false);
+    if (NULL != m_parent->mDataCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+        cbArg.data = mem;
+        cbArg.index = 1;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+    if (NULL != m_parent->mNotifyCb &&
+        m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+        qcamera_callback_argm_t cbArg;
+        memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+        cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+        cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+        cbArg.ext1 = 0;
+        cbArg.ext2 = 0;
+        m_parent->m_cbNotifier.notifyCallback(cbArg);
+    }
+
+    if (thumb_frame != NULL) {
+        // dump thumbnail frame if enabled
+        m_parent->dumpFrameToFile(thumb_frame->buffer, thumb_frame->frame_len,
+                                  thumb_frame->frame_idx, QCAMERA_DUMP_FRM_THUMBNAIL);
+    }
+
+    if (mJpegClientHandle <= 0) {
+        ALOGE("%s: Error: bug here, mJpegClientHandle is 0", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    if (needNewSess) {
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+        getJpegEncodingConfig(encodeParam, main_stream, thumb_stream);
+        ALOGD("[KPI Perf] %s : call jpeg create_session", __func__);
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            ALOGE("%s: error creating a new jpeg encoding session", __func__);
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = main_frame->buf_idx;
+    jpg_job.encode_job.dst_index = 0;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    main_stream->getCropInfo(crop);
+
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    // main dim
+    jpg_job.encode_job.main_dim.src_dim = src_dim;
+    jpg_job.encode_job.main_dim.dst_dim = src_dim;
+    jpg_job.encode_job.main_dim.crop = crop;
+
+    // thumbnail dim
+    if (m_bThumbnailNeeded == TRUE) {
+        if (thumb_stream == NULL) {
+            // need jpeg thumbnail, but no postview/preview stream exists
+            // we use the main stream/frame to encode thumbnail
+            thumb_stream = main_stream;
+            thumb_frame = main_frame;
+        }
+        memset(&crop, 0, sizeof(cam_rect_t));
+        thumb_stream->getCropInfo(crop);
+        memset(&src_dim, 0, sizeof(cam_dimension_t));
+        thumb_stream->getFrameDimension(src_dim);
+        jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+        m_parent->getThumbnailSize(jpg_job.encode_job.thumb_dim.dst_dim);
+        int rotation = m_parent->getJpegRotation();
+        if (rotation == 90 || rotation ==270) {
+            // swap dimension if rotation is 90 or 270
+            int32_t temp = jpg_job.encode_job.thumb_dim.dst_dim.height;
+            jpg_job.encode_job.thumb_dim.dst_dim.height =
+                jpg_job.encode_job.thumb_dim.dst_dim.width;
+            jpg_job.encode_job.thumb_dim.dst_dim.width = temp;
+        }
+        jpg_job.encode_job.thumb_dim.crop = crop;
+        jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
+    }
+
+    // set rotation only when no online rotation or offline pp rotation is done before
+    if (!m_parent->needRotationReprocess()) {
+        jpg_job.encode_job.rotation = m_parent->getJpegRotation();
+    }
+    ALOGV("%s: jpeg rotation is set to %d", __func__, jpg_job.encode_job.rotation);
+
+    // find meta data frame
+    mm_camera_buf_def_t *meta_frame = NULL;
+    for (int i = 0; i < jpeg_job_data->src_frame->num_bufs; i++) {
+        // look through input superbuf
+        if (jpeg_job_data->src_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+            meta_frame = jpeg_job_data->src_frame->bufs[i];
+            break;
+        }
+    }
+    if (meta_frame == NULL && jpeg_job_data->src_reproc_frame != NULL) {
+        // look through reprocess source superbuf
+        for (int i = 0; i < jpeg_job_data->src_reproc_frame->num_bufs; i++) {
+            if (jpeg_job_data->src_reproc_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_METADATA) {
+                meta_frame = jpeg_job_data->src_reproc_frame->bufs[i];
+                break;
+            }
+        }
+    }
+    if (meta_frame != NULL) {
+        // fill in meta data frame ptr
+        jpg_job.encode_job.p_metadata_v1 = (cam_metadata_info_t *)meta_frame->buffer;
+    }
+
+    ALOGD("[KPI Perf] %s : call jpeg start_job", __func__);
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawImageImpl
+ *
+ * DESCRIPTION: function to send raw image to upper layer
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : frame to be encoded
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::processRawImageImpl(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+
+    mm_camera_buf_def_t *frame = NULL;
+    for ( int i= 0 ; i < recvd_frame->num_bufs ; i++ ) {
+        if ( recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_SNAPSHOT ||
+            recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT ||
+             recvd_frame->bufs[i]->stream_type == CAM_STREAM_TYPE_RAW ) {
+            frame = recvd_frame->bufs[i];
+            break;
+        }
+    }
+    if ( NULL == frame ) {
+        ALOGE("%s: No valid raw buffer", __func__);
+        return BAD_VALUE;
+    }
+
+    QCameraMemory *rawMemObj = (QCameraMemory *)frame->mem_info;
+    camera_memory_t *raw_mem = NULL;
+
+    if (rawMemObj != NULL) {
+        raw_mem = rawMemObj->getMemory(frame->buf_idx, false);
+    }
+
+    if (NULL != rawMemObj && NULL != raw_mem) {
+        // dump frame into file
+        m_parent->dumpFrameToFile(frame->buffer, frame->frame_len,
+                                  frame->frame_idx, QCAMERA_DUMP_FRM_RAW);
+
+        // send data callback / notify for RAW_IMAGE
+        if (NULL != m_parent->mDataCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_DATA_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE;
+            cbArg.data = raw_mem;
+            cbArg.index = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+        if (NULL != m_parent->mNotifyCb &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_RAW_IMAGE_NOTIFY) > 0) {
+            qcamera_callback_argm_t cbArg;
+            memset(&cbArg, 0, sizeof(qcamera_callback_argm_t));
+            cbArg.cb_type = QCAMERA_NOTIFY_CALLBACK;
+            cbArg.msg_type = CAMERA_MSG_RAW_IMAGE_NOTIFY;
+            cbArg.ext1 = 0;
+            cbArg.ext2 = 0;
+            m_parent->m_cbNotifier.notifyCallback(cbArg);
+        }
+
+        if ((m_parent->mDataCb != NULL) &&
+            m_parent->msgTypeEnabledWithLock(CAMERA_MSG_COMPRESSED_IMAGE) > 0) {
+            qcamera_release_data_t release_data;
+            memset(&release_data, 0, sizeof(qcamera_release_data_t));
+            release_data.frame = recvd_frame;
+            sendDataNotify(CAMERA_MSG_COMPRESSED_IMAGE,
+                           raw_mem,
+                           0,
+                           NULL,
+                           &release_data);
+        }
+    } else {
+        ALOGE("%s: Cannot get raw mem", __func__);
+        rc = UNKNOWN_ERROR;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ *              Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ *              reprocess.
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCameraPostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCameraPostProcessor::dataProcessRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    uint8_t needNewSess = TRUE;
+    QCameraPostProcessor *pme = (QCameraPostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+
+    ALOGD("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            ALOGD("%s: start data proc", __func__);
+            is_active = TRUE;
+            needNewSess = TRUE;
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                ALOGD("%s: stop data proc", __func__);
+                is_active = FALSE;
+
+                // cancel all ongoing jpeg jobs
+                qcamera_jpeg_data_t *jpeg_job =
+                    (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                while (jpeg_job != NULL) {
+                    pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+                    pme->releaseJpegJobData(jpeg_job);
+                    free(jpeg_job);
+
+                    jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                }
+
+                // destroy jpeg encoding session
+                if ( 0 < pme->mJpegSessionId ) {
+                    pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+                    pme->mJpegSessionId = 0;
+                }
+
+                // free jpeg out buf and exif obj
+                if (pme->m_pJpegOutputMem != NULL) {
+                    pme->m_pJpegOutputMem->deallocate();
+                    delete pme->m_pJpegOutputMem;
+                    pme->m_pJpegOutputMem = NULL;
+                }
+                if (pme->m_pJpegExifObj != NULL) {
+                    delete pme->m_pJpegExifObj;
+                    pme->m_pJpegExifObj = NULL;
+                }
+                needNewSess = TRUE;
+
+                // stop reproc channel if exists
+                if (pme->m_pReprocChannel != NULL) {
+                    pme->m_pReprocChannel->stop();
+                    delete pme->m_pReprocChannel;
+                    pme->m_pReprocChannel = NULL;
+                }
+
+                // flush ongoing postproc Queue
+                pme->m_ongoingPPQ.flush();
+
+                // flush input jpeg Queue
+                pme->m_inputJpegQ.flush();
+
+                // flush input Postproc Queue
+                pme->m_inputPPQ.flush();
+
+                // flush input raw Queue
+                pme->m_inputRawQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                ALOGD("%s: Do next job, active is %d", __func__, is_active);
+                if (is_active == TRUE) {
+                    // check if there is any ongoing jpeg jobs
+                    if (pme->m_ongoingJpegQ.isEmpty()) {
+                        // no ongoing jpeg job, we are fine to send jpeg encoding job
+                        qcamera_jpeg_data_t *jpeg_job =
+                            (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+                        if (NULL != jpeg_job) {
+                            //play shutter sound
+                            pme->m_parent->playShutter();
+
+                            // add into ongoing jpeg job Q
+                            pme->m_ongoingJpegQ.enqueue((void *)jpeg_job);
+                            ret = pme->encodeData(jpeg_job, needNewSess);
+                            if (NO_ERROR != ret) {
+                                // dequeue the last one
+                                pme->m_ongoingJpegQ.dequeue(false);
+
+                                pme->releaseJpegJobData(jpeg_job);
+                                free(jpeg_job);
+                                pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                            }
+                        }
+                    }
+
+                    // process raw data if any
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+
+                    if (NULL != super_buf) {
+                        //play shutter sound
+                        pme->m_parent->playShutter();
+                        ret = pme->processRawImageImpl(super_buf);
+                        if (NO_ERROR != ret) {
+                            pme->releaseSuperBuf(super_buf);
+                            free(super_buf);
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                        }
+                    }
+
+                    mm_camera_super_buf_t *pp_frame =
+                        (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                    if (NULL != pp_frame) {
+                        qcamera_pp_data_t *pp_job =
+                            (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
+                        if (pp_job != NULL) {
+                            memset(pp_job, 0, sizeof(qcamera_pp_data_t));
+                            if (pme->m_pReprocChannel != NULL) {
+                                // add into ongoing PP job Q
+                                pp_job->src_frame = pp_frame;
+                                pme->m_ongoingPPQ.enqueue((void *)pp_job);
+                                ret = pme->m_pReprocChannel->doReprocess(pp_frame);
+                                if (NO_ERROR != ret) {
+                                    // remove from ongoing PP job Q
+                                    pme->m_ongoingPPQ.dequeue(false);
+                                }
+                            } else {
+                                ALOGE("%s: Reprocess channel is NULL", __func__);
+                                ret = -1;
+                            }
+                        } else {
+                            ALOGE("%s: no mem for qcamera_pp_data_t", __func__);
+                            ret = -1;
+                        }
+
+                        if (0 != ret) {
+                            // free pp_job
+                            if (pp_job != NULL) {
+                                free(pp_job);
+                            }
+                            // free frame
+                            if (pp_frame != NULL) {
+                                pme->releaseSuperBuf(pp_frame);
+                                free(pp_frame);
+                            }
+                            // send error notify
+                            pme->sendEvtNotify(CAMERA_MSG_ERROR, UNKNOWN_ERROR, 0);
+                        }
+                    }
+                } else {
+                    // not active, simply return buf and do no op
+                    qcamera_jpeg_data_t *jpeg_data =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+                    if (NULL != jpeg_data) {
+                        pme->releaseJpegJobData(jpeg_data);
+                        free(jpeg_data);
+                    }
+                    mm_camera_super_buf_t *super_buf =
+                        (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                    super_buf = (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    ALOGD("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegPaddingReq
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @padding_info : jpeg specific padding requirement
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraPostProcessor::getJpegPaddingReq(cam_padding_info_t &padding_info)
+{
+    // TODO: hardcode for now, needs to query from mm-jpeg-interface
+    padding_info.width_padding  = CAM_PAD_NONE;
+    padding_info.height_padding  = CAM_PAD_TO_16;
+    padding_info.plane_padding  = CAM_PAD_TO_WORD;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraExif
+ *
+ * DESCRIPTION: constructor of QCameraExif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::QCameraExif()
+    : m_nNumEntries(0)
+{
+    memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraExif
+ *
+ * DESCRIPTION: deconstructor of QCameraExif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraExif::~QCameraExif()
+{
+    for (uint32_t i = 0; i < m_nNumEntries; i++) {
+        switch (m_Entries[i].tag_entry.type) {
+        case EXIF_BYTE:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._bytes != NULL) {
+                    free(m_Entries[i].tag_entry.data._bytes);
+                    m_Entries[i].tag_entry.data._bytes = NULL;
+                }
+            }
+            break;
+        case EXIF_ASCII:
+            {
+                if (m_Entries[i].tag_entry.data._ascii != NULL) {
+                    free(m_Entries[i].tag_entry.data._ascii);
+                    m_Entries[i].tag_entry.data._ascii = NULL;
+                }
+            }
+            break;
+        case EXIF_SHORT:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._shorts != NULL) {
+                    free(m_Entries[i].tag_entry.data._shorts);
+                    m_Entries[i].tag_entry.data._shorts = NULL;
+                }
+            }
+            break;
+        case EXIF_LONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._longs != NULL) {
+                    free(m_Entries[i].tag_entry.data._longs);
+                    m_Entries[i].tag_entry.data._longs = NULL;
+                }
+            }
+            break;
+        case EXIF_RATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._rats != NULL) {
+                    free(m_Entries[i].tag_entry.data._rats);
+                    m_Entries[i].tag_entry.data._rats = NULL;
+                }
+            }
+            break;
+        case EXIF_UNDEFINED:
+            {
+                if (m_Entries[i].tag_entry.data._undefined != NULL) {
+                    free(m_Entries[i].tag_entry.data._undefined);
+                    m_Entries[i].tag_entry.data._undefined = NULL;
+                }
+            }
+            break;
+        case EXIF_SLONG:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._slongs != NULL) {
+                    free(m_Entries[i].tag_entry.data._slongs);
+                    m_Entries[i].tag_entry.data._slongs = NULL;
+                }
+            }
+            break;
+        case EXIF_SRATIONAL:
+            {
+                if (m_Entries[i].tag_entry.count > 1 &&
+                    m_Entries[i].tag_entry.data._srats != NULL) {
+                    free(m_Entries[i].tag_entry.data._srats);
+                    m_Entries[i].tag_entry.data._srats = NULL;
+                }
+            }
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraExif::addEntry(exif_tag_id_t tagid,
+                              exif_tag_type_t type,
+                              uint32_t count,
+                              void *data)
+{
+    int32_t rc = NO_ERROR;
+    if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return NO_MEMORY;
+    }
+
+    m_Entries[m_nNumEntries].tag_id = tagid;
+    m_Entries[m_nNumEntries].tag_entry.type = type;
+    m_Entries[m_nNumEntries].tag_entry.count = count;
+    m_Entries[m_nNumEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE:
+        {
+            if (count > 1) {
+                uint8_t *values = (uint8_t *)malloc(count);
+                if (values == NULL) {
+                    ALOGE("%s: No memory for byte array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._byte = *(uint8_t *)data;
+            }
+        }
+        break;
+    case EXIF_ASCII:
+        {
+            char *str = NULL;
+            str = (char *)malloc(count + 1);
+            if (str == NULL) {
+                ALOGE("%s: No memory for ascii string", __func__);
+                rc = NO_MEMORY;
+            } else {
+                memset(str, 0, count + 1);
+                memcpy(str, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+            }
+        }
+        break;
+    case EXIF_SHORT:
+        {
+            if (count > 1) {
+                uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for short array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(uint16_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._shorts = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._short = *(uint16_t *)data;
+            }
+        }
+        break;
+    case EXIF_LONG:
+        {
+            if (count > 1) {
+                uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for long array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(uint32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._long = *(uint32_t *)data;
+            }
+        }
+        break;
+    case EXIF_RATIONAL:
+        {
+            if (count > 1) {
+                rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for rational array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(rat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._rat = *(rat_t *)data;
+            }
+        }
+        break;
+    case EXIF_UNDEFINED:
+        {
+            uint8_t *values = (uint8_t *)malloc(count);
+            if (values == NULL) {
+                ALOGE("%s: No memory for undefined array", __func__);
+                rc = NO_MEMORY;
+            } else {
+                memcpy(values, data, count);
+                m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+            }
+        }
+        break;
+    case EXIF_SLONG:
+        {
+            if (count > 1) {
+                int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for signed long array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(int32_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._slongs = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._slong = *(int32_t *)data;
+            }
+        }
+        break;
+    case EXIF_SRATIONAL:
+        {
+            if (count > 1) {
+                srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+                if (values == NULL) {
+                    ALOGE("%s: No memory for signed rational array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count * sizeof(srat_t));
+                    m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+                }
+            } else {
+                m_Entries[m_nNumEntries].tag_entry.data._srat = *(srat_t *)data;
+            }
+        }
+        break;
+    }
+
+    // Increase number of entries
+    m_nNumEntries++;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraPostProc.h b/camera/QCamera2/HAL/QCameraPostProc.h
new file mode 100644
index 0000000..180c3d7
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraPostProc.h
@@ -0,0 +1,162 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_POSTPROC_H__
+#define __QCAMERA_POSTPROC_H__
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+#include "QCamera2HWI.h"
+
+namespace qcamera {
+
+class QCameraExif;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    uint32_t client_hdl;             // handle of jpeg client (obtained when open jpeg)
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    mm_camera_super_buf_t *src_reproc_frame; // original source frame for reproc if not NULL
+} qcamera_jpeg_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+} qcamera_pp_data_t;
+
+typedef struct {
+    mm_camera_super_buf_t *frame;    // source frame that needs post process
+} qcamera_pp_request_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID (obtained when start_jpeg_job)
+    jpeg_job_status_t status;        // jpeg encoding status
+    mm_jpeg_output_t out_data;         // ptr to jpeg output buf
+} qcamera_jpeg_evt_payload_t;
+
+typedef struct {
+    camera_memory_t *        data;     // ptr to data memory struct
+    mm_camera_super_buf_t *  frame;    // ptr to frame
+} qcamera_release_data_t;
+
+typedef struct {
+    int32_t                  msg_type; // msg type of data notify
+    camera_memory_t *        data;     // ptr to data memory struct
+    unsigned int             index;    // index of the buf in the whole buffer
+    camera_frame_metadata_t *metadata; // ptr to meta data
+    qcamera_release_data_t   release_data; // any data needs to be release after notify
+} qcamera_data_argm_t;
+
+#define MAX_EXIF_TABLE_ENTRIES 17
+class QCameraExif
+{
+public:
+    QCameraExif();
+    virtual ~QCameraExif();
+
+    int32_t addEntry(exif_tag_id_t tagid,
+                     exif_tag_type_t type,
+                     uint32_t count,
+                     void *data);
+    uint32_t getNumOfEntries() {return m_nNumEntries;};
+    QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+    QEXIF_INFO_DATA m_Entries[MAX_EXIF_TABLE_ENTRIES];  // exif tags for JPEG encoder
+    uint32_t  m_nNumEntries;                            // number of valid entries
+};
+
+class QCameraPostProcessor
+{
+public:
+    QCameraPostProcessor(QCamera2HardwareInterface *cam_ctrl);
+    virtual ~QCameraPostProcessor();
+
+    int32_t init(jpeg_encode_callback_t jpeg_cb, void *user_data);
+    int32_t deinit();
+    int32_t start(QCameraChannel *pSrcChannel);
+    int32_t stop();
+    int32_t processData(mm_camera_super_buf_t *frame);
+    int32_t processRawData(mm_camera_super_buf_t *frame);
+    int32_t processPPData(mm_camera_super_buf_t *frame);
+    int32_t processJpegEvt(qcamera_jpeg_evt_payload_t *evt);
+    int32_t getJpegPaddingReq(cam_padding_info_t &padding_info);
+
+private:
+    int32_t sendDataNotify(int32_t msg_type,
+                           camera_memory_t *data,
+                           uint8_t index,
+                           camera_frame_metadata_t *metadata,
+                           qcamera_release_data_t *release_data);
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    qcamera_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+    mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+    mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+    int32_t getJpegEncodingConfig(mm_jpeg_encode_params_t& encode_parm,
+                                  QCameraStream *main_stream,
+                                  QCameraStream *thumb_stream);
+    int32_t encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                       uint8_t &needNewSess);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    static void releaseNotifyData(void *user_data, void *cookie);
+    void releaseJpegJobData(qcamera_jpeg_data_t *job);
+    int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+    static void releaseJpegData(void *data, void *user_data);
+    static void releasePPInputData(void *data, void *user_data);
+    static void releaseOngoingPPData(void *data, void *user_data);
+
+    static void *dataProcessRoutine(void *data);
+
+private:
+    QCamera2HardwareInterface *m_parent;
+    jpeg_encode_callback_t     mJpegCB;
+    void *                     mJpegUserData;
+    mm_jpeg_ops_t              mJpegHandle;
+    uint32_t                   mJpegClientHandle;
+    uint32_t                   mJpegSessionId;
+
+    QCameraStreamMemory *      m_pJpegOutputMem;
+    QCameraExif *              m_pJpegExifObj;
+    int8_t                     m_bThumbnailNeeded;
+    QCameraReprocessChannel *  m_pReprocChannel;
+
+    QCameraQueue m_inputPPQ;            // input queue for postproc
+    QCameraQueue m_ongoingPPQ;          // ongoing postproc queue
+    QCameraQueue m_inputJpegQ;          // input jpeg job queue
+    QCameraQueue m_ongoingJpegQ;        // ongoing jpeg job queue
+    QCameraQueue m_inputRawQ;           // input raw job queue
+    QCameraCmdThread m_dataProcTh;      // thread for data processing
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_POSTPROC_H__ */
diff --git a/camera/QCamera2/HAL/QCameraStateMachine.cpp b/camera/QCamera2/HAL/QCameraStateMachine.cpp
new file mode 100644
index 0000000..44ff872
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStateMachine.cpp
@@ -0,0 +1,2586 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStateMachine"
+
+#include <utils/Errors.h>
+#include "QCamera2HWI.h"
+#include "QCameraStateMachine.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : smEvtProcRoutine
+ *
+ * DESCRIPTION: Statemachine process thread routine to handle events
+ *              in different state.
+ *
+ * PARAMETERS :
+ *   @data    : ptr to QCameraStateMachine object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStateMachine::smEvtProcRoutine(void *data)
+{
+    int running = 1, ret;
+    QCameraStateMachine *pme = (QCameraStateMachine *)data;
+
+    ALOGD("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&pme->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        // first check API cmd queue
+        qcamera_sm_cmd_t *node = (qcamera_sm_cmd_t *)pme->api_queue.dequeue();
+        if (node == NULL) {
+            // no API cmd, then check evt cmd queue
+            node = (qcamera_sm_cmd_t *)pme->evt_queue.dequeue();
+        }
+        if (node != NULL) {
+            switch (node->cmd) {
+            case QCAMERA_SM_CMD_TYPE_API:
+                pme->stateMachine(node->evt, node->evt_payload);
+                // API is in a way sync call, so evt_payload is managed by HWI
+                // no need to free payload for API
+                break;
+            case QCAMERA_SM_CMD_TYPE_EVT:
+                pme->stateMachine(node->evt, node->evt_payload);
+
+                // EVT is async call, so payload need to be free after use
+                free(node->evt_payload);
+                node->evt_payload = NULL;
+                break;
+            case QCAMERA_SM_CMD_TYPE_EXIT:
+                running = 0;
+                break;
+            default:
+                break;
+            }
+            free(node);
+            node = NULL;
+        }
+    } while (running);
+    ALOGD("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStateMachine
+ *
+ * DESCRIPTION: constructor of QCameraStateMachine. Will start process thread
+ *
+ * PARAMETERS :
+ *   @ctrl    : ptr to HWI object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::QCameraStateMachine(QCamera2HardwareInterface *ctrl) :
+    api_queue(),
+    evt_queue()
+{
+    m_parent = ctrl;
+    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+    cmd_pid = 0;
+    cam_sem_init(&cmd_sem, 0);
+    pthread_create(&cmd_pid,
+                   NULL,
+                   smEvtProcRoutine,
+                   this);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStateMachine
+ *
+ * DESCRIPTION: desctructor of QCameraStateMachine. Will stop process thread.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCameraStateMachine::~QCameraStateMachine()
+{
+    if (cmd_pid != 0) {
+        qcamera_sm_cmd_t *node =
+            (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(qcamera_sm_cmd_t));
+            node->cmd = QCAMERA_SM_CMD_TYPE_EXIT;
+
+            api_queue.enqueue((void *)node);
+            cam_sem_post(&cmd_sem);
+
+            /* wait until cmd thread exits */
+            if (pthread_join(cmd_pid, NULL) != 0) {
+                ALOGD("%s: pthread dead already\n", __func__);
+            }
+        }
+        cmd_pid = 0;
+    }
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : procAPI
+ *
+ * DESCRIPTION: process incoming API request from framework layer.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @api_payload  : API payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procAPI(qcamera_sm_evt_enum_t evt,
+                                     void *api_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for qcamera_sm_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_API;
+    node->evt = evt;
+    node->evt_payload = api_payload;
+    if (api_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvt
+ *
+ * DESCRIPTION: process incoming envent from mm-camera-interface and
+ *              mm-jpeg-interface.
+ *
+ * PARAMETERS :
+ *   @evt          : event to be processed
+ *   @evt_payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvt(qcamera_sm_evt_enum_t evt,
+                                     void *evt_payload)
+{
+    qcamera_sm_cmd_t *node =
+        (qcamera_sm_cmd_t *)malloc(sizeof(qcamera_sm_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for qcamera_sm_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(node, 0, sizeof(qcamera_sm_cmd_t));
+    node->cmd = QCAMERA_SM_CMD_TYPE_EVT;
+    node->evt = evt;
+    node->evt_payload = evt_payload;
+    if (evt_queue.enqueue((void *)node)) {
+        cam_sem_post(&cmd_sem);
+        return NO_ERROR;
+    } else {
+        free(node);
+        return UNKNOWN_ERROR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : stateMachine
+ *
+ * DESCRIPTION: finite state machine entry function. Depends on state,
+ *              incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::stateMachine(qcamera_sm_evt_enum_t evt, void *payload)
+{
+    int32_t rc = NO_ERROR;
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEW_STOPPED:
+        rc = procEvtPreviewStoppedState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_READY:
+        rc = procEvtPreviewReadyState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEWING:
+        rc = procEvtPreviewingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+        rc = procEvtPrepareSnapshotState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PIC_TAKING:
+        rc = procEvtPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_RECORDING:
+        rc = procEvtRecordingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+        rc = procEvtVideoPicTakingState(evt, payload);
+        break;
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+        rc = procEvtPreviewPicTakingState(evt, payload);
+        break;
+    default:
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewStoppedState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_STOPPED.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            rc = m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                rc = m_parent->commitParameterChanges();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            if (m_parent->mPreviewWindow == NULL) {
+                rc = m_parent->preparePreview();
+                if(rc == NO_ERROR) {
+                    // preview window is not set yet, move to previewReady state
+                    m_state = QCAMERA_SM_STATE_PREVIEW_READY;
+                } else {
+                    ALOGE("%s: preparePreview failed",__func__);
+                }
+            } else {
+                rc = m_parent->preparePreview();
+                if (rc == NO_ERROR) {
+                    rc = m_parent->startPreview();
+                    if (rc != NO_ERROR) {
+                        m_parent->unpreparePreview();
+                    } else {
+                        // start preview success, move to previewing state
+                        m_state = QCAMERA_SM_STATE_PREVIEWING;
+                    }
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            rc = m_parent->preparePreview();
+            if (rc == NO_ERROR) {
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+    break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // no op needed here
+            ALOGD("%s: already in preview stopped state, do nothing", __func__);
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            rc = m_parent->release();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            // no op needed here
+            ALOGD("%s: No ops for evt(%d) in state(%d)", __func__, evt, m_state);
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewReadyState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_READY.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt,
+                                                      void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            m_parent->setPreviewWindow((struct preview_stream_ops *)payload);
+            if (m_parent->mPreviewWindow != NULL) {
+                rc = m_parent->startPreview();
+                if (rc != NO_ERROR) {
+                    m_parent->unpreparePreview();
+                    m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                } else {
+                    m_state = QCAMERA_SM_STATE_PREVIEWING;
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    m_parent->unpreparePreview();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // prepare preview again
+                    rc = m_parent->preparePreview();
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+        {
+            // no ops here
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            m_parent->unpreparePreview();
+            rc = 0;
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEWING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewingState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    // stop preview
+                    m_parent->stopPreview();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+        {
+            // no ops here
+            ALOGD("%s: Already in previewing, no ops here to start preview", __func__);
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            rc = m_parent->stopPreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            rc = m_parent->startRecording();
+            if (rc == NO_ERROR) {
+                // move state to recording state
+                m_state = QCAMERA_SM_STATE_RECORDING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            rc = m_parent->prepareHardwareForSnapshot(FALSE);
+            if (rc == NO_ERROR) {
+                // Do not signal API result in this case.
+                // Need to wait for snapshot done in metadta.
+                m_state = QCAMERA_SM_STATE_PREPARE_SNAPSHOT;
+            } else {
+                // Do not change state in this case.
+                ALOGE("%s: prepareHardwareForSnapshot failed %d",
+                    __func__, rc);
+
+                result.status = rc;
+                result.request_api = evt;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+       {
+           if ( m_parent->mParameters.getRecordingHintValue() == false) {
+           rc = m_parent->takePicture();
+           if (rc == NO_ERROR) {
+               // move state to picture taking state
+               if (m_parent->isZSLMode()) {
+                   m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+               } else {
+                   m_state = QCAMERA_SM_STATE_PIC_TAKING;
+               }
+            } else {
+                // move state to preview stopped state
+                m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+           } else {
+               rc = m_parent->takeLiveSnapshot();
+               if (rc == NO_ERROR ) {
+                   m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+                   result.status = rc;
+                   result.request_api = evt;
+                   result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                   m_parent->signalAPIResult(&result);
+               }
+           }
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGD("%s: no handling for server evt (%d) at this state",
+                      __func__, cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPrepareSnapshotState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREPARE_SNAPSHOT.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt,
+                                                    void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+    case QCAMERA_SM_EVT_SET_PARAMS:
+    case QCAMERA_SM_EVT_GET_PARAMS:
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+    case QCAMERA_SM_EVT_DUMP:
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE:
+                ALOGI("%s: Received QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE event",
+                    __func__);
+                m_parent->processPrepSnapshotDoneEvent(internal_evt->prep_snapshot_state);
+                m_state = QCAMERA_SM_STATE_PREVIEWING;
+
+                result.status = NO_ERROR;
+                result.request_api = QCAMERA_SM_EVT_PREPARE_SNAPSHOT;
+                result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+                m_parent->signalAPIResult(&result);
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, internal_evt->evt_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                rc = m_parent->commitParameterChanges();
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            // cancel picture first
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGD("%s: no handling for server evt (%d) at this state",
+                      __func__, cam_evt->server_event_type);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelPicture();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtRecordingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_RECORDING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtRecordingState(qcamera_sm_evt_enum_t evt,
+                                                   void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    ALOGE("%s: Cannot set parameters that requires restart during recording",
+                          __func__);
+                    rc = BAD_VALUE;
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+        {
+            m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+            rc = m_parent->takeLiveSnapshot();
+            if (rc != NO_ERROR) {
+                m_state = QCAMERA_SM_STATE_RECORDING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            // no ops here
+            ALOGD("%s: already in recording state, no ops for start_recording", __func__);
+            rc = 0;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREPARE_SNAPSHOT:
+        {
+            //In Video snapshot, prepare hardware is a no-op.
+            result.status = NO_ERROR;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtVideoPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_VIDEO_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                        void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+        {
+            // Error setting preview window during previewing
+            ALOGE("Cannot set preview window when preview is running");
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // cannot set parameters that requires restart during recording
+                    ALOGE("%s: Cannot set parameters that requires restart during recording",
+                          __func__);
+                    rc = BAD_VALUE;
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+        {
+            rc = m_parent->stopRecording();
+            m_state = QCAMERA_SM_STATE_PREVIEW_PIC_TAKING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            rc = m_parent->cancelLiveSnapshot();
+            m_state = QCAMERA_SM_STATE_RECORDING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : procEvtPreviewPicTakingState
+ *
+ * DESCRIPTION: finite state machine function to handle event in state of
+ *              QCAMERA_SM_STATE_PREVIEW_PIC_TAKING.
+ *
+ * PARAMETERS :
+ *   @evt      : event to be processed
+ *   @payload  : event payload. Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStateMachine::procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt,
+                                                          void *payload)
+{
+    int32_t rc = NO_ERROR;
+    qcamera_api_result_t result;
+    memset(&result, 0, sizeof(qcamera_api_result_t));
+
+    switch (evt) {
+    case QCAMERA_SM_EVT_SET_CALLBACKS:
+        {
+            qcamera_sm_evt_setcb_payload_t *setcbs =
+                (qcamera_sm_evt_setcb_payload_t *)payload;
+            rc = m_parent->setCallBacks(setcbs->notify_cb,
+                                        setcbs->data_cb,
+                                        setcbs->data_cb_timestamp,
+                                        setcbs->get_memory,
+                                        setcbs->user);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_ENABLE_MSG_TYPE:
+        {
+            rc = m_parent->enableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DISABLE_MSG_TYPE:
+        {
+            rc = m_parent->disableMsgType(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_MSG_TYPE_ENABLED:
+        {
+            int enabled = m_parent->msgTypeEnabled(int32_t(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = enabled;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SET_PARAMS:
+        {
+            bool needRestart = false;
+            rc = m_parent->updateParameters((char*)payload, needRestart);
+            if (rc == NO_ERROR) {
+                if (needRestart) {
+                    // need restart preview for parameters to take effect
+                    // stop preview
+                    m_parent->stopPreview();
+                    // commit parameter changes to server
+                    m_parent->commitParameterChanges();
+                    // start preview again
+                    rc = m_parent->preparePreview();
+                    if (rc == NO_ERROR) {
+                        rc = m_parent->startPreview();
+                        if (rc != NO_ERROR) {
+                            m_parent->unpreparePreview();
+                        }
+                    }
+                    if (rc != NO_ERROR) {
+                        m_state = QCAMERA_SM_STATE_PIC_TAKING;
+                    }
+                } else {
+                    rc = m_parent->commitParameterChanges();
+                }
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_GET_PARAMS:
+        {
+            result.params = m_parent->getParameters();
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_PARAMS;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PUT_PARAMS:
+        {
+            rc = m_parent->putParameters((char*)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_PREVIEW_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 1;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RECORDING_ENABLED:
+        {
+            rc = NO_ERROR;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_ENABLE_FLAG;
+            result.enabled = 0;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS:
+        {
+            rc = m_parent->storeMetaDataInBuffers(int(payload));
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_DUMP:
+        {
+            rc = m_parent->dump((int)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_AUTO_FOCUS:
+        {
+            rc = m_parent->autoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_AUTO_FOCUS:
+        {
+            rc = m_parent->cancelAutoFocus();
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_SEND_COMMAND:
+        {
+            qcamera_sm_evt_command_payload_t *cmd_payload =
+                (qcamera_sm_evt_command_payload_t *)payload;
+            rc = m_parent->sendCommand(cmd_payload->cmd,
+                                       cmd_payload->arg1,
+                                       cmd_payload->arg2);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME:
+        {
+            rc = m_parent->releaseRecordingFrame((const void *)payload);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_CANCEL_PICTURE:
+        {
+            if (m_parent->isZSLMode()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_PREVIEW:
+        {
+            if (m_parent->isZSLMode()) {
+                // cancel picture first
+                rc = m_parent->cancelPicture();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_ZSL);
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+                m_parent->stopChannel(QCAMERA_CH_TYPE_PREVIEW);
+            }
+            // unprepare preview
+            m_parent->unpreparePreview();
+            m_state = QCAMERA_SM_STATE_PREVIEW_STOPPED;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_START_RECORDING:
+        {
+            rc = m_parent->stopRecording();
+            if (rc == NO_ERROR) {
+                m_state = QCAMERA_SM_STATE_VIDEO_PIC_TAKING;
+            }
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_REG_FACE_IMAGE:
+        {
+            int32_t faceID = 0;
+            qcamera_sm_evt_reg_face_payload_t *reg_payload =
+                (qcamera_sm_evt_reg_face_payload_t *)payload;
+            rc = m_parent->registerFaceImage(reg_payload->img_ptr,
+                                             reg_payload->config,
+                                             faceID);
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_HANDLE;
+            result.handle = faceID;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_STOP_RECORDING:
+    case QCAMERA_SM_EVT_START_PREVIEW:
+    case QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW:
+    case QCAMERA_SM_EVT_TAKE_PICTURE:
+    case QCAMERA_SM_EVT_SET_PREVIEW_WINDOW:
+    case QCAMERA_SM_EVT_RELEASE:
+        {
+            ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+            rc = INVALID_OPERATION;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalAPIResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_INTERNAL:
+        {
+            qcamera_sm_internal_evt_payload_t *internal_evt =
+                (qcamera_sm_internal_evt_payload_t *)payload;
+            switch (internal_evt->evt_type) {
+            case QCAMERA_INTERNAL_EVT_FOCUS_UPDATE:
+                rc = m_parent->processAutoFocusEvent(internal_evt->focus_data);
+                break;
+            default:
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_EVT_NOTIFY:
+        {
+            mm_camera_event_t *cam_evt = (mm_camera_event_t *)payload;
+            switch (cam_evt->server_event_type) {
+            case CAM_EVENT_TYPE_DAEMON_DIED:
+                {
+                    m_parent->sendEvtNotify(CAMERA_MSG_ERROR,
+                                            CAMERA_ERROR_SERVER_DIED,
+                                            0);
+                }
+                break;
+            default:
+                ALOGE("%s: Invalid internal event %d in state(%d)",
+                            __func__, cam_evt->server_event_type, m_state);
+                break;
+            }
+        }
+        break;
+    case QCAMERA_SM_EVT_JPEG_EVT_NOTIFY:
+        {
+            qcamera_jpeg_evt_payload_t *jpeg_job =
+                (qcamera_jpeg_evt_payload_t *)payload;
+            rc = m_parent->processJpegNotify(jpeg_job);
+        }
+        break;
+    case QCAMERA_SM_EVT_SNAPSHOT_DONE:
+        {
+            if (m_parent->isZSLMode()) {
+                rc = m_parent->cancelPicture();
+            } else {
+                rc = m_parent->cancelLiveSnapshot();
+            }
+            m_state = QCAMERA_SM_STATE_PREVIEWING;
+            result.status = rc;
+            result.request_api = evt;
+            result.result_type = QCAMERA_API_RESULT_TYPE_DEF;
+            m_parent->signalEvtResult(&result);
+        }
+        break;
+    case QCAMERA_SM_EVT_THERMAL_NOTIFY:
+        {
+            rc = m_parent->updateThermalLevel(
+                    *(qcamera_thermal_level_enum_t *)&payload);
+        }
+        break;
+    default:
+        ALOGE("%s: cannot handle evt(%d) in state(%d)", __func__, evt, m_state);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : isPreviewRunning
+ *
+ * DESCRIPTION: check if preview is in process.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- preview running
+ *              false -- preview stopped
+ *==========================================================================*/
+bool QCameraStateMachine::isPreviewRunning()
+{
+    switch (m_state) {
+    case QCAMERA_SM_STATE_PREVIEWING:
+    case QCAMERA_SM_STATE_RECORDING:
+    case QCAMERA_SM_STATE_VIDEO_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREVIEW_PIC_TAKING:
+    case QCAMERA_SM_STATE_PREPARE_SNAPSHOT:
+        return true;
+    default:
+        return false;
+    }
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraStateMachine.h b/camera/QCamera2/HAL/QCameraStateMachine.h
new file mode 100644
index 0000000..66eb63b
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStateMachine.h
@@ -0,0 +1,207 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STATEMACHINE_H__
+#define __QCAMERA_STATEMACHINE_H__
+
+#include <pthread.h>
+
+#include <cam_semaphore.h>
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+#include "QCameraQueue.h"
+#include "QCameraChannel.h"
+
+namespace qcamera {
+
+class QCamera2HardwareInterface;
+
+typedef enum {
+    /*******BEGIN OF: API EVT*********/
+    QCAMERA_SM_EVT_SET_PREVIEW_WINDOW = 1,   // set preview window
+    QCAMERA_SM_EVT_SET_CALLBACKS,            // set callbacks
+    QCAMERA_SM_EVT_ENABLE_MSG_TYPE,          // enable msg type
+    QCAMERA_SM_EVT_DISABLE_MSG_TYPE,         // disable msg type
+    QCAMERA_SM_EVT_MSG_TYPE_ENABLED,         // query certain msg type is enabled
+
+    QCAMERA_SM_EVT_SET_PARAMS,               // set parameters
+    QCAMERA_SM_EVT_GET_PARAMS,               // get parameters
+    QCAMERA_SM_EVT_PUT_PARAMS,               // put parameters, release param buf
+
+    QCAMERA_SM_EVT_START_PREVIEW,            // start preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW,  // start no display preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_STOP_PREVIEW,             // stop preview (zsl, camera mode, camcorder mode)
+    QCAMERA_SM_EVT_PREVIEW_ENABLED,          // query if preview is running
+
+    QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS,   // request to store meta data in video buffers
+    QCAMERA_SM_EVT_START_RECORDING,          // start recording
+    QCAMERA_SM_EVT_STOP_RECORDING,           // stop recording
+    QCAMERA_SM_EVT_RECORDING_ENABLED,        // query if recording is running
+    QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME,  // release recording frame
+
+    QCAMERA_SM_EVT_PREPARE_SNAPSHOT,         // prepare snapshot in case LED needs to be flashed
+    QCAMERA_SM_EVT_TAKE_PICTURE,             // take picutre (zsl, regualr capture, live snapshot
+    QCAMERA_SM_EVT_CANCEL_PICTURE,           // cancel picture
+
+    QCAMERA_SM_EVT_START_AUTO_FOCUS,         // start auto focus
+    QCAMERA_SM_EVT_STOP_AUTO_FOCUS,          // stop auto focus
+    QCAMERA_SM_EVT_SEND_COMMAND,             // send command
+
+    QCAMERA_SM_EVT_RELEASE,                  // release camera resource
+    QCAMERA_SM_EVT_DUMP,                     // dump
+    QCAMERA_SM_EVT_REG_FACE_IMAGE,           // register a face image in imaging lib
+    /*******END OF: API EVT*********/
+
+    QCAMERA_SM_EVT_EVT_INTERNAL,             // internal evt notify
+    QCAMERA_SM_EVT_EVT_NOTIFY,               // evt notify from server
+    QCAMERA_SM_EVT_JPEG_EVT_NOTIFY,          // evt notify from jpeg
+    QCAMERA_SM_EVT_SNAPSHOT_DONE,            // internal evt that snapshot is done
+    QCAMERA_SM_EVT_THERMAL_NOTIFY,           // evt notify from thermal daemon
+    QCAMERA_SM_EVT_MAX
+} qcamera_sm_evt_enum_t;
+
+typedef enum {
+    QCAMERA_API_RESULT_TYPE_DEF,             // default type, no additional info
+    QCAMERA_API_RESULT_TYPE_ENABLE_FLAG,     // msg_enabled, preview_enabled, recording_enabled
+    QCAMERA_API_RESULT_TYPE_PARAMS,          // returned parameters in string
+    QCAMERA_API_RESULT_TYPE_HANDLE,          // returned handle in int
+    QCAMERA_API_RESULT_TYPE_MAX
+} qcamera_api_result_type_t;
+
+typedef struct {
+    int32_t status;                          // api call status
+    qcamera_sm_evt_enum_t request_api;       // api evt requested
+    qcamera_api_result_type_t result_type;   // result type
+    union {
+        int enabled;                          // result_type == QCAMERA_API_RESULT_TYPE_ENABLE_FLAG
+        char *params;                         // result_type == QCAMERA_API_RESULT_TYPE_PARAMS
+        int handle;                           // result_type ==QCAMERA_API_RESULT_TYPE_HANDLE
+    };
+} qcamera_api_result_t;
+
+// definition for payload type of setting callback
+typedef struct {
+    camera_notify_callback notify_cb;
+    camera_data_callback data_cb;
+    camera_data_timestamp_callback data_cb_timestamp;
+    camera_request_memory get_memory;
+    void *user;
+} qcamera_sm_evt_setcb_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    int32_t cmd;
+    int32_t arg1;
+    int32_t arg2;
+} qcamera_sm_evt_command_payload_t;
+
+// definition for payload type of sending command
+typedef struct {
+    void *img_ptr;
+    cam_pp_offline_src_config_t *config;
+} qcamera_sm_evt_reg_face_payload_t;
+
+typedef enum {
+    QCAMERA_INTERNAL_EVT_FOCUS_UPDATE,       // focus updating result
+    QCAMERA_INTERNAL_EVT_PREP_SNAPSHOT_DONE, // prepare snapshot done
+    QCAMERA_INTERNAL_EVT_MAX
+} qcamera_internal_evt_type_t;
+
+typedef struct {
+    qcamera_internal_evt_type_t evt_type;
+    union {
+        cam_auto_focus_data_t focus_data;
+        cam_prep_snapshot_state_t prep_snapshot_state;
+    };
+} qcamera_sm_internal_evt_payload_t;
+
+class QCameraStateMachine
+{
+public:
+    QCameraStateMachine(QCamera2HardwareInterface *ctrl);
+    virtual ~QCameraStateMachine();
+    int32_t procAPI(qcamera_sm_evt_enum_t evt, void *api_payload);
+    int32_t procEvt(qcamera_sm_evt_enum_t evt, void *evt_payload);
+
+    bool isPreviewRunning(); // check if preview is running
+
+private:
+    typedef enum {
+        QCAMERA_SM_STATE_PREVIEW_STOPPED,          // preview is stopped
+        QCAMERA_SM_STATE_PREVIEW_READY,            // preview started but preview window is not set yet
+        QCAMERA_SM_STATE_PREVIEWING,               // previewing
+        QCAMERA_SM_STATE_PREPARE_SNAPSHOT,         // prepare snapshot in case aec estimation is
+                                                   // needed for LED flash
+        QCAMERA_SM_STATE_PIC_TAKING,               // taking picture (preview stopped)
+        QCAMERA_SM_STATE_RECORDING,                // recording (preview running)
+        QCAMERA_SM_STATE_VIDEO_PIC_TAKING,         // taking live snapshot during recording (preview running)
+        QCAMERA_SM_STATE_PREVIEW_PIC_TAKING        // taking ZSL/live snapshot (recording stopped but preview running)
+    } qcamera_state_enum_t;
+
+    typedef enum
+    {
+        QCAMERA_SM_CMD_TYPE_API,                   // cmd from API
+        QCAMERA_SM_CMD_TYPE_EVT,                   // cmd from mm-camera-interface/mm-jpeg-interface event
+        QCAMERA_SM_CMD_TYPE_EXIT,                  // cmd for exiting statemachine cmdThread
+        QCAMERA_SM_CMD_TYPE_MAX
+    } qcamera_sm_cmd_type_t;
+
+    typedef struct {
+        qcamera_sm_cmd_type_t cmd;                  // cmd type (where it comes from)
+        qcamera_sm_evt_enum_t evt;                  // event type
+        void *evt_payload;                          // ptr to payload
+    } qcamera_sm_cmd_t;
+
+    int32_t stateMachine(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewStoppedState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewReadyState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPrepareSnapshotState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtRecordingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtVideoPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+    int32_t procEvtPreviewPicTakingState(qcamera_sm_evt_enum_t evt, void *payload);
+
+    // main statemachine process routine
+    static void *smEvtProcRoutine(void *data);
+
+    QCamera2HardwareInterface *m_parent;  // ptr to HWI
+    qcamera_state_enum_t m_state;         // statemachine state
+    QCameraQueue api_queue;               // cmd queue for APIs
+    QCameraQueue evt_queue;               // cmd queue for evt from mm-camera-intf/mm-jpeg-intf
+    pthread_t cmd_pid;                    // cmd thread ID
+    cam_semaphore_t cmd_sem;              // semaphore for cmd thread
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STATEMACHINE_H__ */
diff --git a/camera/QCamera2/HAL/QCameraStream.cpp b/camera/QCamera2/HAL/QCameraStream.cpp
new file mode 100644
index 0000000..7acff1d
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStream.cpp
@@ -0,0 +1,943 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCameraStream"
+
+#include <utils/Errors.h>
+#include "QCamera2HWI.h"
+#include "QCameraStream.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->getBufs(offset, num_bufs, initial_reg_flag, bufs, ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("putBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->putBufs(ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidate_buf(int index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->invalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to clean invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::clean_invalidate_buf(int index, void *user_data)
+{
+    QCameraStream *stream = reinterpret_cast<QCameraStream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->cleanInvalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraStream
+ *
+ * DESCRIPTION: constructor of QCameraStream
+ *
+ * PARAMETERS :
+ *   @allocator  : memory allocator obj
+ *   @camHandle  : camera handle
+ *   @chId       : channel handle
+ *   @camOps     : ptr to camera ops table
+ *   @paddingInfo: ptr to padding info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::QCameraStream(QCameraAllocator &allocator,
+                             uint32_t camHandle,
+                             uint32_t chId,
+                             mm_camera_ops_t *camOps,
+                             cam_padding_info_t *paddingInfo) :
+        mCamHandle(camHandle),
+        mChannelHandle(chId),
+        mHandle(0),
+        mCamOps(camOps),
+        mStreamInfo(NULL),
+        mNumBufs(0),
+        mDataCB(NULL),
+        mStreamInfoBuf(NULL),
+        mStreamBufs(NULL),
+        mAllocator(allocator),
+        mBufDefs(NULL)
+{
+    mMemVtbl.user_data = this;
+    mMemVtbl.get_bufs = get_bufs;
+    mMemVtbl.put_bufs = put_bufs;
+    mMemVtbl.invalidate_buf = invalidate_buf;
+    mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+    memset(&mCropInfo, 0, sizeof(cam_rect_t));
+    pthread_mutex_init(&mCropLock, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraStream
+ *
+ * DESCRIPTION: deconstructor of QCameraStream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraStream::~QCameraStream()
+{
+    pthread_mutex_destroy(&mCropLock);
+
+    if (mStreamInfoBuf != NULL) {
+        int rc = mCamOps->unmap_stream_buf(mCamHandle,
+                    mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+        if (rc < 0) {
+            ALOGE("Failed to map stream info buffer");
+        }
+        mStreamInfoBuf->deallocate();
+        delete mStreamInfoBuf;
+        mStreamInfoBuf = NULL;
+    }
+
+    // delete stream
+    if (mHandle > 0) {
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+        mHandle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ *   @streamInfoBuf: ptr to buf that contains stream info
+ *   @stream_cb    : stream data notify callback. Can be NULL if not needed
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::init(QCameraHeapMemory *streamInfoBuf,
+                            uint8_t minNumBuffers,
+                            stream_cb_routine stream_cb,
+                            void *userdata)
+{
+    int32_t rc = OK;
+    mm_camera_stream_config_t stream_config;
+
+    mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+    if (!mHandle) {
+        ALOGE("add_stream failed");
+        rc = UNKNOWN_ERROR;
+        goto done;
+    }
+
+    // assign and map stream info memory
+    mStreamInfoBuf = streamInfoBuf;
+    mStreamInfo = reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+    mNumBufs = minNumBuffers;
+
+    rc = mCamOps->map_stream_buf(mCamHandle,
+                mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                0, -1, mStreamInfoBuf->getFd(0), mStreamInfoBuf->getSize(0));
+    if (rc < 0) {
+        ALOGE("Failed to map stream info buffer");
+        goto err1;
+    }
+
+    // Configure the stream
+    stream_config.stream_info = mStreamInfo;
+    stream_config.mem_vtbl = mMemVtbl;
+    stream_config.stream_cb = dataNotifyCB;
+    stream_config.padding_info = mPaddingInfo;
+    stream_config.userdata = this;
+    rc = mCamOps->config_stream(mCamHandle,
+                mChannelHandle, mHandle, &stream_config);
+    if (rc < 0) {
+        ALOGE("Failed to config stream, rc = %d", rc);
+        goto err2;
+    }
+
+    mDataCB = stream_cb;
+    mUserData = userdata;
+    return 0;
+
+err2:
+    mCamOps->unmap_stream_buf(mCamHandle,
+                mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+err1:
+    mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    mHandle = 0;
+    mStreamInfoBuf = NULL;
+    mStreamInfo = NULL;
+    mNumBufs = 0;
+done:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ *              related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::start()
+{
+    int32_t rc = 0;
+    rc = mProcTh.launch(dataProcRoutine, this);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::stop()
+{
+    int32_t rc = 0;
+    rc = mProcTh.exit();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processZoomDone
+ *
+ * DESCRIPTION: process zoom done event
+ *
+ * PARAMETERS :
+ *   @previewWindoe : preview window ops table to set preview crop window
+ *   @crop_info     : crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processZoomDone(preview_stream_ops_t *previewWindow,
+                                       cam_crop_data_t &crop_info)
+{
+    int32_t rc = 0;
+
+    // get stream param for crop info
+    for (int i = 0; i < crop_info.num_of_streams; i++) {
+        if (crop_info.crop_info[i].stream_id == mStreamInfo->stream_svr_id) {
+            pthread_mutex_lock(&mCropLock);
+            mCropInfo = crop_info.crop_info[i].crop;
+            pthread_mutex_unlock(&mCropLock);
+
+            // update preview window crop if it's preview/postview stream
+            if ( (previewWindow != NULL) &&
+                 (mStreamInfo->stream_type == CAM_STREAM_TYPE_PREVIEW ||
+                  mStreamInfo->stream_type == CAM_STREAM_TYPE_POSTVIEW) ) {
+                rc = previewWindow->set_crop(previewWindow,
+                                             mCropInfo.left,
+                                             mCropInfo.top,
+                                             mCropInfo.width,
+                                             mCropInfo.height);
+            }
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ *   @frame   : stream frame received
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+    ALOGI("%s:\n", __func__);
+    mDataQ.enqueue((void *)frame);
+    return mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ *              mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCameraStream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    ALOGI("%s:\n", __func__);
+    QCameraStream* stream = (QCameraStream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        ALOGE("%s: Not a valid stream to handle buf", __func__);
+        return;
+    }
+
+    mm_camera_super_buf_t *frame =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: No mem for mm_camera_buf_def_t", __func__);
+        stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+        return;
+    }
+    *frame = *recvd_frame;
+    stream->processDataNotify(frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCameraStream::dataProcRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    QCameraStream *pme = (QCameraStream *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+
+    ALOGI("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                      __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                ALOGD("%s: Do next job", __func__);
+                mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+                if (NULL != frame) {
+                    if (pme->mDataCB != NULL) {
+                        pme->mDataCB(frame, pme, pme->mUserData);
+                    } else {
+                        // no data cb routine, return buf here
+                        pme->bufDone(frame->bufs[0]->buf_idx);
+                        free(frame);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            ALOGD("%s: Exit", __func__);
+            /* flush data buf queue */
+            pme->mDataQ.flush();
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    ALOGD("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(int index)
+{
+    int32_t rc = NO_ERROR;
+
+    if (index >= mNumBufs || mBufDefs == NULL)
+        return BAD_INDEX;
+
+    rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+    if (rc < 0)
+        return rc;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @opaque    : stream frame/metadata buf to be returned
+ *   @isMetaData: flag if returned opaque is a metadatabuf or the real frame ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::bufDone(const void *opaque, bool isMetaData)
+{
+    int32_t rc = NO_ERROR;
+
+    int index = mStreamBufs->getMatchBufIndex(opaque, isMetaData);
+    if (index == -1 || index >= mNumBufs || mBufDefs == NULL) {
+        ALOGE("%s: Cannot find buf for opaque data = %p", __func__, opaque);
+        return BAD_INDEX;
+    }
+    ALOGD("%s: Buffer Index = %d, Frame Idx = %d", __func__, index, mBufDefs[index].frame_idx);
+    rc = bufDone(index);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+
+    if (!ops_tbl) {
+        ALOGE("%s: ops_tbl is NULL", __func__);
+        return INVALID_OPERATION;
+    }
+
+    mFrameLenOffset = *offset;
+
+    //Allocate and map stream info buffer
+    mStreamBufs = mAllocator.allocateStreamBuf(mStreamInfo->stream_type,
+                                               mFrameLenOffset.frame_len,
+                                               mNumBufs);
+    if (!mStreamBufs) {
+        ALOGE("%s: Failed to allocate stream buffers", __func__);
+        return NO_MEMORY;
+    }
+
+    for (int i = 0; i < mNumBufs; i++) {
+        rc = ops_tbl->map_ops(i, -1, mStreamBufs->getFd(i),
+                mStreamBufs->getSize(i), ops_tbl->userdata);
+        if (rc < 0) {
+            ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+            for (int j = 0; j < i; j++) {
+                ops_tbl->unmap_ops(j, -1, ops_tbl->userdata);
+            }
+            mStreamBufs->deallocate();
+            delete mStreamBufs;
+            mStreamBufs = NULL;
+            return INVALID_OPERATION;
+        }
+    }
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        ALOGE("%s: Out of memory", __func__);
+        for (int i = 0; i < mNumBufs; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        return NO_MEMORY;
+    }
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (int i = 0; i < mNumBufs; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    for (int i = 0; i < mNumBufs; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(regFlags);
+    if (rc < 0) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (int i = 0; i < mNumBufs; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        mStreamBufs->deallocate();
+        delete mStreamBufs;
+        mStreamBufs = NULL;
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    for (int i = 0; i < mNumBufs; i++) {
+        rc = ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        if (rc < 0) {
+            ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+        }
+    }
+    mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+                     // mm-camera-interface own the buffer, so no need to free
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    mStreamBufs->deallocate();
+    delete mStreamBufs;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::invalidateBuf(int index)
+{
+    return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to clean invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::cleanInvalidateBuf(int index)
+{
+    return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : isTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the stream is of the queried type
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL && (mStreamInfo->stream_type == type)) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : isOrignalTypeOf
+ *
+ * DESCRIPTION: helper function to determine if the original stream is of the
+ *              queried type if it's reproc stream
+ *
+ * PARAMETERS :
+ *   @type    : stream type as of queried
+ *
+ * RETURN     : true/false
+ *==========================================================================*/
+bool QCameraStream::isOrignalTypeOf(cam_stream_type_t type)
+{
+    if (mStreamInfo != NULL &&
+        mStreamInfo->stream_type == CAM_STREAM_TYPE_OFFLINE_PROC &&
+        mStreamInfo->reprocess_config.pp_type == CAM_ONLINE_REPROCESS_TYPE &&
+        mStreamInfo->reprocess_config.online.input_stream_type == type) {
+        return true;
+    } else {
+        return false;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyType
+ *
+ * DESCRIPTION: return stream type
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : stream type
+ *==========================================================================*/
+cam_stream_type_t QCameraStream::getMyType()
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_type;
+    } else {
+        return CAM_STREAM_TYPE_DEFAULT;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ *   @offset  : reference to struct to store the queried frame offset info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+    offset = mFrameLenOffset;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCropInfo
+ *
+ * DESCRIPTION: query crop info of the stream
+ *
+ * PARAMETERS :
+ *   @crop    : reference to struct to store the queried crop info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getCropInfo(cam_rect_t &crop)
+{
+    pthread_mutex_lock(&mCropLock);
+    crop = mCropInfo;
+    pthread_mutex_unlock(&mCropLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ *   @dim     : reference to struct to store the queried frame dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFrameDimension(cam_dimension_t &dim)
+{
+    if (mStreamInfo != NULL) {
+        dim = mStreamInfo->dim;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ *   @fmt     : reference to stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::getFormat(cam_format_t &fmt)
+{
+    if (mStreamInfo != NULL) {
+        fmt = mStreamInfo->fmt;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : stream ID from server
+ *==========================================================================*/
+uint32_t QCameraStream::getMyServerID() {
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_svr_id;
+    } else {
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @fd       : fd of the buffer
+ *   @size     : lenght of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::mapBuf(uint8_t buf_type,
+                              uint32_t buf_idx,
+                              int32_t plane_idx,
+                              int fd,
+                              uint32_t size)
+{
+    return mCamOps->map_stream_buf(mCamHandle, mChannelHandle,
+                                   mHandle, buf_type,
+                                   buf_idx, plane_idx,
+                                   fd, size);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx)
+{
+    return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+                                     mHandle, buf_type,
+                                     buf_idx, plane_idx);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be set
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraStream::setParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->set_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraStream.h b/camera/QCamera2/HAL/QCameraStream.h
new file mode 100644
index 0000000..ab2e2f7
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraStream.h
@@ -0,0 +1,137 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_STREAM_H__
+#define __QCAMERA_STREAM_H__
+
+#include <hardware/camera.h>
+#include "QCameraCmdThread.h"
+#include "QCameraMem.h"
+#include "QCameraAllocator.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCameraStream;
+typedef void (*stream_cb_routine)(mm_camera_super_buf_t *frame,
+                                  QCameraStream *stream,
+                                  void *userdata);
+
+class QCameraStream
+{
+public:
+    QCameraStream(QCameraAllocator &allocator,
+                  uint32_t camHandle,
+                  uint32_t chId,
+                  mm_camera_ops_t *camOps,
+                  cam_padding_info_t *paddingInfo);
+    virtual ~QCameraStream();
+    virtual int32_t init(QCameraHeapMemory *streamInfoBuf,
+                         uint8_t minStreamBufNum,
+                         stream_cb_routine stream_cb,
+                         void *userdata);
+    virtual int32_t processZoomDone(preview_stream_ops_t *previewWindow,
+                                    cam_crop_data_t &crop_info);
+    virtual int32_t bufDone(int index);
+    virtual int32_t bufDone(const void *opaque, bool isMetaData);
+    virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+    virtual int32_t start();
+    virtual int32_t stop();
+
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void *dataProcRoutine(void *data);
+    uint32_t getMyHandle() const {return mHandle;}
+    bool isTypeOf(cam_stream_type_t type);
+    bool isOrignalTypeOf(cam_stream_type_t type);
+    int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+    int32_t getCropInfo(cam_rect_t &crop);
+    int32_t getFrameDimension(cam_dimension_t &dim);
+    int32_t getFormat(cam_format_t &fmt);
+    QCameraMemory *getStreamBufs() {return mStreamBufs;};
+    uint32_t getMyServerID();
+    cam_stream_type_t getMyType();
+
+    int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+                   int32_t plane_idx, int fd, uint32_t size);
+    int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx);
+    int32_t setParameter(cam_stream_parm_buffer_t &param);
+
+private:
+    uint32_t mCamHandle;
+    uint32_t mChannelHandle;
+    uint32_t mHandle; // stream handle from mm-camera-interface
+    mm_camera_ops_t *mCamOps;
+    cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+    mm_camera_stream_mem_vtbl_t mMemVtbl;
+    uint8_t mNumBufs;
+    stream_cb_routine mDataCB;
+    void *mUserData;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh; // thread for dataCB
+
+    QCameraHeapMemory *mStreamInfoBuf;
+    QCameraMemory *mStreamBufs;
+    QCameraAllocator &mAllocator;
+    mm_camera_buf_def_t *mBufDefs;
+    cam_frame_len_offset_t mFrameLenOffset;
+    cam_padding_info_t mPaddingInfo;
+    cam_rect_t mCropInfo;
+    pthread_mutex_t mCropLock; // lock to protect crop info
+
+    static int32_t get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t invalidate_buf(int index, void *user_data);
+    static int32_t clean_invalidate_buf(int index, void *user_data);
+
+    int32_t getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t invalidateBuf(int index);
+    int32_t cleanInvalidateBuf(int index);
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_STREAM_H__ */
diff --git a/camera/QCamera2/HAL/QCameraThermalAdapter.cpp b/camera/QCamera2/HAL/QCameraThermalAdapter.cpp
new file mode 100644
index 0000000..abdae3e
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraThermalAdapter.cpp
@@ -0,0 +1,162 @@
+/* Copyright (c) 2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCameraThermalAdapter"
+
+#include <dlfcn.h>
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+#include "QCamera2HWI.h"
+#include "QCameraThermalAdapter.h"
+
+using namespace android;
+
+namespace qcamera {
+
+
+QCameraThermalAdapter& QCameraThermalAdapter::getInstance()
+{
+    static QCameraThermalAdapter instance;
+    return instance;
+}
+
+QCameraThermalAdapter::QCameraThermalAdapter() :
+                                        mCallback(NULL),
+                                        mHandle(NULL),
+                                        mRegister(NULL),
+                                        mUnregister(NULL),
+                                        mCameraHandle(0),
+                                        mCamcorderHandle(0)
+{
+}
+
+int QCameraThermalAdapter::init(QCameraThermalCallback *thermalCb)
+{
+    const char *error = NULL;
+    int rc = NO_ERROR;
+
+    ALOGV("%s E", __func__);
+    mHandle = dlopen("/vendor/lib/libthermalclient.so", RTLD_NOW);
+    if (!mHandle) {
+        error = dlerror();
+        ALOGE("%s: dlopen failed with error %s",
+                    __func__, error ? error : "");
+        rc = UNKNOWN_ERROR;
+        goto error;
+    }
+    *(void **)&mRegister = dlsym(mHandle, "thermal_client_register_callback");
+    if (!mRegister) {
+        error = dlerror();
+        ALOGE("%s: dlsym failed with error code %s",
+                    __func__, error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    *(void **)&mUnregister = dlsym(mHandle, "thermal_client_unregister_callback");
+    if (!mUnregister) {
+        error = dlerror();
+        ALOGE("%s: dlsym failed with error code %s",
+                    __func__, error ? error: "");
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+
+    // Register camera and camcorder callbacks
+    mCameraHandle = mRegister(mStrCamera, thermalCallback, NULL);
+    if (mCameraHandle < 0) {
+        ALOGE("%s: thermal_client_register_callback failed %d",
+                        __func__, mCameraHandle);
+        rc = UNKNOWN_ERROR;
+        goto error2;
+    }
+    mCamcorderHandle = mRegister(mStrCamcorder, thermalCallback, NULL);
+    if (mCamcorderHandle < 0) {
+        ALOGE("%s: thermal_client_register_callback failed %d",
+                        __func__, mCamcorderHandle);
+        rc = UNKNOWN_ERROR;
+        goto error3;
+    }
+
+    mCallback = thermalCb;
+    ALOGV("%s X", __func__);
+    return rc;
+
+error3:
+    mCamcorderHandle = 0;
+    mUnregister(mCameraHandle);
+error2:
+    mCameraHandle = 0;
+    dlclose(mHandle);
+    mHandle = NULL;
+error:
+    ALOGV("%s X", __func__);
+    return rc;
+}
+
+void QCameraThermalAdapter::deinit()
+{
+    ALOGV("%s E", __func__);
+    if (mUnregister) {
+        if (mCameraHandle) {
+            mUnregister(mCameraHandle);
+            mCameraHandle = 0;
+        }
+        if (mCamcorderHandle) {
+            mUnregister(mCamcorderHandle);
+            mCamcorderHandle = 0;
+        }
+    }
+    if (mHandle)
+        dlclose(mHandle);
+
+    mHandle = NULL;
+    mRegister = NULL;
+    mUnregister = NULL;
+    mCallback = NULL;
+    ALOGV("%s X", __func__);
+}
+
+char QCameraThermalAdapter::mStrCamera[] = "camera";
+char QCameraThermalAdapter::mStrCamcorder[] = "camcorder";
+
+int QCameraThermalAdapter::thermalCallback(int level,
+                void *userdata, void *data)
+{
+    int rc = 0;
+    ALOGV("%s E", __func__);
+    QCameraThermalAdapter& instance = getInstance();
+    qcamera_thermal_level_enum_t lvl = (qcamera_thermal_level_enum_t) level;
+    if (instance.mCallback)
+        rc = instance.mCallback->thermalEvtHandle(lvl, userdata, data);
+    ALOGV("%s X", __func__);
+    return rc;
+}
+
+}; //namespace qcamera
diff --git a/camera/QCamera2/HAL/QCameraThermalAdapter.h b/camera/QCamera2/HAL/QCameraThermalAdapter.h
new file mode 100644
index 0000000..80711a0
--- /dev/null
+++ b/camera/QCamera2/HAL/QCameraThermalAdapter.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_THERMAL_ADAPTER__
+#define __QCAMERA_THERMAL_ADAPTER__
+
+namespace qcamera {
+
+typedef enum {
+    QCAMERA_THERMAL_NO_ADJUSTMENT = 0,
+    QCAMERA_THERMAL_SLIGHT_ADJUSTMENT,
+    QCAMERA_THERMAL_BIG_ADJUSTMENT,
+    QCAMERA_THERMAL_SHUTDOWN
+} qcamera_thermal_level_enum_t;
+
+typedef enum {
+    QCAMERA_THERMAL_ADJUST_FPS,
+    QCAMERA_THERMAL_ADJUST_FRAMESKIP,
+} qcamera_thermal_mode;
+
+class QCameraThermalCallback
+{
+public:
+    virtual int thermalEvtHandle(qcamera_thermal_level_enum_t level,
+            void *userdata, void *data) = 0;
+    virtual ~QCameraThermalCallback() {}
+};
+
+class QCameraThermalAdapter
+{
+public:
+    static QCameraThermalAdapter& getInstance();
+
+    int init(QCameraThermalCallback *thermalCb);
+    void deinit();
+
+private:
+    static char mStrCamera[];
+    static char mStrCamcorder[];
+
+    static int thermalCallback(int level, void *userdata, void *data);
+
+    QCameraThermalCallback *mCallback;
+    void *mHandle;
+    int (*mRegister)(char *name,
+            int (*callback)(int, void *userdata, void *data), void *data);
+    int (*mUnregister)(int handle);
+    int mCameraHandle;
+    int mCamcorderHandle;
+
+    QCameraThermalAdapter();
+    QCameraThermalAdapter(QCameraThermalAdapter const& copy); // not implemented
+    QCameraThermalAdapter& operator=(QCameraThermalAdapter const& copy); // not implemented
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_THERMAL_ADAPTER__ */
diff --git a/camera/QCamera2/HAL/test/Android.mk b/camera/QCamera2/HAL/test/Android.mk
new file mode 100644
index 0000000..f7fa575
--- /dev/null
+++ b/camera/QCamera2/HAL/test/Android.mk
@@ -0,0 +1,31 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+    qcamera_test.cpp \
+
+LOCAL_SHARED_LIBRARIES:= \
+    libdl \
+    libui \
+    libutils \
+    libcutils \
+    libbinder \
+    libmedia \
+    libmedia_native \
+    libui \
+    libgui \
+    libcamera_client
+
+LOCAL_C_INCLUDES += \
+    frameworks/base/include/ui \
+    frameworks/base/include/surfaceflinger \
+    frameworks/base/include/camera \
+    frameworks/base/include/media \
+
+LOCAL_MODULE:= camera_test
+LOCAL_MODULE_TAGS:= tests
+
+LOCAL_CFLAGS += -Wall -fno-short-enums -O0
+
+include $(BUILD_EXECUTABLE)
diff --git a/camera/QCamera2/HAL/test/qcamera_test.cpp b/camera/QCamera2/HAL/test/qcamera_test.cpp
new file mode 100644
index 0000000..52c5415
--- /dev/null
+++ b/camera/QCamera2/HAL/test/qcamera_test.cpp
@@ -0,0 +1,985 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdlib.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <time.h>
+#include <semaphore.h>
+#include <pthread.h>
+
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/wait.h>
+
+#include <ui/DisplayInfo.h>
+#include <gui/Surface.h>
+#include <gui/SurfaceComposerClient.h>
+#include <gui/ISurfaceComposer.h>
+
+#include <system/camera.h>
+#include <camera/Camera.h>
+#include <camera/ICamera.h>
+#include <camera/CameraParameters.h>
+
+#include <utils/RefBase.h>
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+#include <binder/IServiceManager.h>
+#include <cutils/properties.h>
+#include <cutils/memory.h>
+
+#include "qcamera_test.h"
+
+namespace qcamera {
+
+using namespace android;
+
+int CameraContext::JpegIdx = 0;
+
+/*===========================================================================
+ * FUNCTION   : previewCallback
+ *
+ * DESCRIPTION: preview callback preview mesages are enabled
+ *
+ * PARAMETERS :
+ *   @mem : preview buffer
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::previewCallback(const sp<IMemory>& mem)
+{
+    printf("PREVIEW Callback 0x%x", ( unsigned int ) mem->pointer());
+    uint8_t *ptr = (uint8_t*) mem->pointer();
+    printf("PRV_CB: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
+           ptr[0],
+           ptr[1],
+           ptr[2],
+           ptr[3],
+           ptr[4],
+           ptr[5],
+           ptr[6],
+           ptr[7],
+           ptr[8],
+           ptr[9]);
+}
+
+/*===========================================================================
+ * FUNCTION   : saveFile
+ *
+ * DESCRIPTION: helper function for saving buffers on filesystem
+ *
+ * PARAMETERS :
+ *   @mem : buffer to save to filesystem
+ *   @path: File path
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::saveFile(const sp<IMemory>& mem, String8 path)
+{
+    unsigned char *buff = NULL;
+    int size;
+    int fd = -1;
+
+    if (mem == NULL) {
+        return BAD_VALUE;
+    }
+
+    fd = open(path, O_CREAT | O_WRONLY | O_TRUNC, 0655);
+    if(fd < 0) {
+        printf("Unable to open file %s %s\n", path.string(), strerror(fd));
+        return -errno;
+    }
+
+    size = mem->size();
+    if (size <= 0) {
+        printf("IMemory object is of zero size\n");
+        close(fd);
+        return BAD_VALUE;
+    }
+
+    buff = (unsigned char *)mem->pointer();
+    if (!buff) {
+        printf("Buffer pointer is invalid\n");
+        close(fd);
+        return BAD_VALUE;
+    }
+
+    if ( size != write(fd, buff, size) ) {
+        printf("Bad Write error (%d)%s\n",
+               errno,
+               strerror(errno));
+        close(fd);
+        return INVALID_OPERATION;
+    }
+
+    printf("%s: buffer=%08X, size=%d stored at %s\n",
+           __FUNCTION__, (int)buff, size, path.string());
+
+    if (fd >= 0)
+        close(fd);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : notify
+ *
+ * DESCRIPTION: notify callback
+ *
+ * PARAMETERS :
+ *   @msgType : type of callback
+ *   @ext1: extended parameters
+ *   @ext2: extended parameters
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::notify(int32_t msgType, int32_t ext1, int32_t ext2)
+{
+    printf("Notify cb: %d %d %d\n", msgType, ext1, ext2);
+
+    if ( msgType & CAMERA_MSG_FOCUS ) {
+        printf("AutoFocus %s \n",
+               (ext1) ? "OK" : "FAIL");
+    }
+
+    if ( msgType & CAMERA_MSG_SHUTTER ) {
+        printf("Shutter done \n");
+    }
+
+    if ( msgType & CAMERA_MSG_ERROR) {
+        printf("Camera Test CAMERA_MSG_ERROR\n");
+        stopPreview();
+        closeCamera();
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : postData
+ *
+ * DESCRIPTION: handles data callbacks
+ *
+ * PARAMETERS :
+ *   @msgType : type of callback
+ *   @dataPtr: buffer data
+ *   @metadata: additional metadata where available
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::postData(int32_t msgType,
+                             const sp<IMemory>& dataPtr,
+                             camera_frame_metadata_t *metadata)
+{
+    printf("Data cb: %d\n", msgType);
+
+    if ( msgType & CAMERA_MSG_PREVIEW_FRAME ) {
+        previewCallback(dataPtr);
+    }
+
+    if ( msgType & CAMERA_MSG_RAW_IMAGE ) {
+        printf("RAW done \n");
+    }
+
+    if (msgType & CAMERA_MSG_POSTVIEW_FRAME) {
+        printf("Postview frame \n");
+    }
+
+    if (msgType & CAMERA_MSG_COMPRESSED_IMAGE ) {
+        printf("JPEG done\n");
+        String8 jpegPath;
+        jpegPath = jpegPath.format("/sdcard/img_%d.jpg", JpegIdx);
+        saveFile(dataPtr, jpegPath);
+        JpegIdx++;
+    }
+
+    if ( ( msgType & CAMERA_MSG_PREVIEW_METADATA ) &&
+         ( NULL != metadata ) ) {
+        printf("Face detected %d \n", metadata->number_of_faces);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : postDataTimestamp
+ *
+ * DESCRIPTION: handles recording callbacks
+ *
+ * PARAMETERS :
+ *   @timestamp : timestamp of buffer
+ *   @msgType : type of buffer
+ *   @dataPtr : buffer data
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::postDataTimestamp(nsecs_t timestamp,
+                                      int32_t msgType,
+                                      const sp<IMemory>& dataPtr)
+{
+    printf("Recording cb: %d %lld %p\n", msgType, timestamp, dataPtr.get());
+}
+
+/*===========================================================================
+ * FUNCTION   : printSupportedParams
+ *
+ * DESCRIPTION: dump common supported parameters
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void CameraContext::printSupportedParams()
+{
+    printf("\n\r\tSupported Cameras: %s",
+           mParams.get("camera-indexes")? mParams.get("camera-indexes") : "NULL");
+    printf("\n\r\tSupported Picture Sizes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES) : "NULL");
+    printf("\n\r\tSupported Picture Formats: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS) : "NULL");
+    printf("\n\r\tSupported Preview Sizes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES) : "NULL");
+    printf("\n\r\tSupported Preview Formats: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS) : "NULL");
+    printf("\n\r\tSupported Preview Frame Rates: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES) : "NULL");
+    printf("\n\r\tSupported Thumbnail Sizes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES) : "NULL");
+    printf("\n\r\tSupported Whitebalance Modes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE) : "NULL");
+    printf("\n\r\tSupported Effects: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_EFFECTS) : "NULL");
+    printf("\n\r\tSupported Scene Modes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_SCENE_MODES) : "NULL");
+    printf("\n\r\tSupported Focus Modes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES) : "NULL");
+    printf("\n\r\tSupported Antibanding Options: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_ANTIBANDING) : "NULL");
+    printf("\n\r\tSupported Flash Modes: %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES) : "NULL");
+    printf("\n\r\tSupported Focus Areas: %d",
+           mParams.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+    printf("\n\r\tSupported FPS ranges : %s",
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)?
+           mParams.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE) : "NULL");
+    printf("\n\r\tFocus Distances: %s \n",
+           mParams.get(CameraParameters::KEY_FOCUS_DISTANCES)?
+           mParams.get(CameraParameters::KEY_FOCUS_DISTANCES) : "NULL");
+}
+
+/*===========================================================================
+ * FUNCTION   : createPreviewSurface
+ *
+ * DESCRIPTION: helper function for creating preview surfaces
+ *
+ * PARAMETERS :
+ *   @width : preview width
+ *   @height: preview height
+ *   @pixFormat : surface pixelformat
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::createPreviewSurface(unsigned int width,
+                                             unsigned int height,
+                                             int32_t pixFormat)
+{
+    int ret = NO_ERROR;
+    DisplayInfo dinfo;
+    sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
+                        ISurfaceComposer::eDisplayIdMain));
+    SurfaceComposerClient::getDisplayInfo(display, &dinfo);
+    unsigned int previewWidth, previewHeight;
+
+    if ( dinfo.w < width ) {
+        previewWidth = dinfo.w;
+    } else {
+        previewWidth = width;
+    }
+
+    if ( dinfo.h < height ) {
+        previewHeight = dinfo.h;
+    } else {
+        previewHeight = height;
+    }
+
+    mClient = new SurfaceComposerClient();
+
+    if ( NULL == mClient.get() ) {
+        printf("Unable to establish connection to Surface Composer \n");
+        return NO_INIT;
+    }
+
+    mSurfaceControl = mClient->createSurface(String8("QCamera_Test"),
+                                             previewWidth,
+                                             previewHeight,
+                                             pixFormat,
+                                             0);
+    if ( NULL == mSurfaceControl.get() ) {
+        printf("Unable to create preview surface \n");
+        return NO_INIT;
+    }
+
+    mPreviewSurface = mSurfaceControl->getSurface();
+    if ( NULL != mPreviewSurface.get() ) {
+        mClient->openGlobalTransaction();
+        ret |= mSurfaceControl->setLayer(0x7fffffff);
+        ret |= mSurfaceControl->setPosition(0, 0);
+        ret |= mSurfaceControl->setSize(previewWidth, previewHeight);
+        ret |= mSurfaceControl->show();
+        mClient->closeGlobalTransaction();
+
+        if ( NO_ERROR != ret ) {
+            printf("Preview surface configuration failed! \n");
+        }
+    } else {
+        ret = NO_INIT;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : destroyPreviewSurface
+ *
+ * DESCRIPTION: closes previously open preview surface
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::destroyPreviewSurface()
+{
+    if ( NULL != mPreviewSurface.get() ) {
+        mPreviewSurface.clear();
+    }
+
+    if ( NULL != mSurfaceControl.get() ) {
+        mSurfaceControl->clear();
+        mSurfaceControl.clear();
+    }
+
+    if ( NULL != mClient.get() ) {
+        mClient->dispose();
+        mClient.clear();
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~CameraContext
+ *
+ * DESCRIPTION: camera context destructor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+CameraContext::~CameraContext()
+{
+    stopPreview();
+    closeCamera();
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: connects to and initializes camera
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t  CameraContext::openCamera()
+{
+    if ( NULL != mCamera.get() ) {
+        printf("Camera already open! \n");
+        return NO_ERROR;
+    }
+
+    printf("openCamera(camera_index=%d)\n", mCameraIndex);
+    mCamera = Camera::connect(mCameraIndex);
+
+    if ( NULL == mCamera.get() ) {
+        printf("Unable to connect to CameraService\n");
+        return NO_INIT;
+    }
+
+    mParams = mCamera->getParameters();
+    mParams.getSupportedPreviewSizes(mSupportedPreviewSizes);
+    mParams.getSupportedPictureSizes(mSupportedPictureSizes);
+    mCurrentPictureSizeIdx = mSupportedPictureSizes.size() / 2;
+    mCurrentPreviewSizeIdx = mSupportedPreviewSizes.size() / 2;
+
+    mCamera->setListener(this);
+    mHardwareActive = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: returns the number of supported camera by the system
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : supported camera count
+ *==========================================================================*/
+int CameraContext::getNumberOfCameras()
+{
+    int ret = -1;
+
+    if ( NULL != mCamera.get() ) {
+        ret = mCamera->getNumberOfCameras();
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: closes a previously the initialized camera reference
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::closeCamera()
+{
+    if ( NULL == mCamera.get() ) {
+        return NO_INIT;
+    }
+
+    mCamera->disconnect();
+    mCamera.clear();
+    mHardwareActive = false;
+    mPreviewRunning = false;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : startPreview
+ *
+ * DESCRIPTION: starts camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::startPreview()
+{
+    int ret = NO_ERROR;
+    int previewWidth, previewHeight;
+    Size currentPreviewSize = mSupportedPreviewSizes.itemAt(mCurrentPreviewSizeIdx);
+    Size currentPictureSize = mSupportedPictureSizes.itemAt(mCurrentPictureSizeIdx);
+
+    if ( mPreviewRunning || !mHardwareActive ) {
+        printf("Preview already running or camera not active! \n");
+        return NO_INIT;
+    }
+
+    if (mResizePreview) {
+        previewWidth = currentPreviewSize.width;
+        previewHeight = currentPreviewSize.height;
+
+        ret = createPreviewSurface(previewWidth,
+                                   previewHeight,
+                                   HAL_PIXEL_FORMAT_YCrCb_420_SP);
+        if (  NO_ERROR != ret ) {
+            printf("Error while creating preview surface\n");
+            return ret;
+        }
+
+        mParams.setPreviewSize(previewWidth, previewHeight);
+        mParams.setPictureSize(currentPictureSize.width, currentPictureSize.height);
+
+        ret |= mCamera->setParameters(mParams.flatten());
+        ret |= mCamera->setPreviewDisplay(mPreviewSurface);
+        ret |= mCamera->startPreview();
+        if ( NO_ERROR != ret ) {
+            printf("Preview start failed! \n");
+            return ret;
+        }
+
+        mPreviewRunning = true;
+        mResizePreview = false;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : autoFocus
+ *
+ * DESCRIPTION: Triggers autofocus
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::autoFocus()
+{
+    status_t ret = NO_ERROR;
+
+    if ( mPreviewRunning ) {
+        ret = mCamera->autoFocus();
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : enablePreviewCallbacks
+ *
+ * DESCRIPTION: Enables preview callback messages
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::enablePreviewCallbacks()
+{
+    if ( mHardwareActive ) {
+        mCamera->setPreviewCallbackFlags(CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : takePicture
+ *
+ * DESCRIPTION: triggers image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::takePicture()
+{
+    status_t ret = NO_ERROR;
+
+    if ( mPreviewRunning ) {
+        ret = mCamera->takePicture(CAMERA_MSG_COMPRESSED_IMAGE|CAMERA_MSG_RAW_IMAGE);
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopPreview
+ *
+ * DESCRIPTION: stops camera preview
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::stopPreview()
+{
+    status_t ret = NO_ERROR;
+
+    if ( mHardwareActive ) {
+        mCamera->stopPreview();
+        ret = destroyPreviewSurface();
+    }
+
+    mPreviewRunning  = false;
+    mResizePreview = true;
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : resumePreview
+ *
+ * DESCRIPTION: resumes camera preview after image capture
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::resumePreview()
+{
+    status_t ret = NO_ERROR;
+
+    if ( mHardwareActive ) {
+        ret = mCamera->startPreview();
+    } else {
+        ret = NO_INIT;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : nextPreviewSize
+ *
+ * DESCRIPTION: Iterates through all supported preview sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPreviewSize()
+{
+    if ( mHardwareActive ) {
+        mCurrentPreviewSizeIdx += 1;
+        mCurrentPreviewSizeIdx %= mSupportedPreviewSizes.size();
+        Size previewSize = mSupportedPreviewSizes.itemAt(mCurrentPreviewSizeIdx);
+        mParams.setPreviewSize(previewSize.width,
+                               previewSize.height);
+        mResizePreview = true;
+
+        if ( mPreviewRunning ) {
+            mCamera->stopPreview();
+            mCamera->setParameters(mParams.flatten());
+            mCamera->startPreview();
+        } else {
+            mCamera->setParameters(mParams.flatten());
+        }
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCurrentPreviewSize
+ *
+ * DESCRIPTION: queries the currently configured preview size
+ *
+ * PARAMETERS :
+ *  @previewSize : preview size currently configured
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPreviewSize(Size &previewSize)
+{
+    if ( mHardwareActive ) {
+        previewSize = mSupportedPreviewSizes.itemAt(mCurrentPreviewSizeIdx);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : nextPictureSize
+ *
+ * DESCRIPTION: Iterates through all supported picture sizes.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::nextPictureSize()
+{
+    if ( mHardwareActive ) {
+        mCurrentPictureSizeIdx += 1;
+        mCurrentPictureSizeIdx %= mSupportedPictureSizes.size();
+        Size pictureSize = mSupportedPictureSizes.itemAt(mCurrentPictureSizeIdx);
+        mParams.setPictureSize(pictureSize.width,
+                               pictureSize.height);
+        mCamera->setParameters(mParams.flatten());
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCurrentPictureSize
+ *
+ * DESCRIPTION: queries the currently configured picture size
+ *
+ * PARAMETERS :
+ *  @pictureSize : picture size currently configured
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+status_t CameraContext::getCurrentPictureSize(Size &pictureSize)
+{
+    if ( mHardwareActive ) {
+        pictureSize = mSupportedPictureSizes.itemAt(mCurrentPictureSizeIdx);
+    }
+
+    return NO_ERROR;
+}
+
+}; //namespace qcamera ends here
+
+using namespace qcamera;
+
+/*===========================================================================
+ * FUNCTION   : printMenu
+ *
+ * DESCRIPTION: prints the available camera options
+ *
+ * PARAMETERS :
+ *  @currentCamera : camera context currently being used
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void printMenu(sp<CameraContext> currentCamera)
+{
+    Size currentPictureSize, currentPreviewSize;
+
+    assert(currentCamera.get());
+
+    currentCamera->getCurrentPictureSize(currentPictureSize);
+    currentCamera->getCurrentPreviewSize(currentPreviewSize);
+
+    printf("\n\n=========== FUNCTIONAL TEST MENU ===================\n\n");
+
+    printf(" \n\nSTART / STOP / GENERAL SERVICES \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Switch camera - Current Index: %d\n",
+            SWITCH_CAMERA_CMD,
+            currentCamera->getCameraIndex());
+    printf("   %c. Resume Preview after capture \n",
+            RESUME_PREVIEW_CMD);
+    printf("   %c. Quit \n",
+            EXIT_CMD);
+    printf("   %c. Camera Capability Dump",
+            DUMP_CAPS_CMD);
+
+    printf(" \n\n PREVIEW SUB MENU \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Start Preview\n",
+            START_PREVIEW_CMD);
+    printf("   %c. Stop Preview\n",
+            STOP_PREVIEW_CMD);
+    printf("   %c. Preview size:  %dx%d\n",
+           CHANGE_PREVIEW_SIZE_CMD,
+           currentPreviewSize.width,
+           currentPreviewSize.height);
+    printf("   %c. Enable preview frames\n",
+            ENABLE_PRV_CALLBACKS_CMD);
+    printf("   %c. Trigger autofocus \n",
+            AUTOFOCUS_CMD);
+
+    printf(" \n\n IMAGE CAPTURE SUB MENU \n");
+    printf(" -----------------------------\n");
+    printf("   %c. Take picture/Full Press\n",
+            TAKEPICTURE_CMD);
+    printf("   %c. Picture size:  %dx%d\n",
+           CHANGE_PICTURE_SIZE_CMD,
+           currentPictureSize.width,
+           currentPictureSize.height);
+
+    printf("\n");
+    printf("   Choice: ");
+}
+
+/*===========================================================================
+ * FUNCTION   : functionalTest
+ *
+ * DESCRIPTION: queries and executes client supplied commands for testing a
+ *              particular camera.
+ *
+ * PARAMETERS :
+ *  @availableCameras : List with all cameras supported
+ *
+ * RETURN     : status_t type of status
+ *              NO_ERROR  -- continue testing
+ *              none-zero -- quit test
+ *==========================================================================*/
+status_t functionalTest(Vector< sp<CameraContext> > &availableCameras)
+{
+    int cmd;
+    status_t stat = NO_ERROR;
+    static int currentCameraIdx = 0;
+
+    assert(availableCameras.size());
+
+    sp<CameraContext> currentCamera = availableCameras.itemAt(currentCameraIdx);
+    printMenu(currentCamera);
+    cmd = getchar();
+
+    switch (cmd) {
+    case SWITCH_CAMERA_CMD:
+        {
+            currentCameraIdx++;
+            currentCameraIdx %= availableCameras.size();
+            currentCamera = availableCameras.itemAt(currentCameraIdx);
+        }
+        break;
+
+    case RESUME_PREVIEW_CMD:
+        {
+            stat = currentCamera->resumePreview();
+        }
+        break;
+
+    case START_PREVIEW_CMD:
+        {
+            stat = currentCamera->startPreview();
+        }
+        break;
+
+    case STOP_PREVIEW_CMD:
+        {
+            stat = currentCamera->stopPreview();
+        }
+        break;
+
+    case CHANGE_PREVIEW_SIZE_CMD:
+        {
+            stat = currentCamera->nextPreviewSize();
+        }
+        break;
+
+    case CHANGE_PICTURE_SIZE_CMD:
+        {
+            stat = currentCamera->nextPictureSize();
+        }
+        break;
+
+    case DUMP_CAPS_CMD:
+        {
+            currentCamera->printSupportedParams();
+        }
+        break;
+
+    case AUTOFOCUS_CMD:
+        {
+            stat = currentCamera->autoFocus();
+        }
+        break;
+
+    case TAKEPICTURE_CMD:
+        {
+            stat = currentCamera->takePicture();
+        }
+        break;
+
+    case ENABLE_PRV_CALLBACKS_CMD:
+        {
+            stat = currentCamera->enablePreviewCallbacks();
+        }
+        break;
+
+    case EXIT_CMD:
+        {
+            currentCamera->stopPreview();
+            return -1;
+        }
+
+        break;
+
+    default:
+        {
+        }
+        break;
+    }
+    printf("Command status 0x%x \n", stat);
+
+    return NO_ERROR;
+}
+
+int main()
+{
+    sp<ProcessState> proc(ProcessState::self());
+    Vector< sp<CameraContext> > availableCameras;
+    sp<CameraContext> camera;
+    int i = 0;
+
+    ProcessState::self()->startThreadPool();
+
+    do {
+        camera = new CameraContext(i);
+        if ( NULL == camera.get() ) {
+            return NO_INIT;
+        }
+
+        status_t stat = camera->openCamera();
+        if ( NO_ERROR != stat ) {
+            printf("Error encountered during camera open \n");
+            return stat;
+        }
+
+        availableCameras.add(camera);
+        i++;
+    } while ( i < camera->getNumberOfCameras() ) ;
+
+    while ( true ) {
+        if ( NO_ERROR != functionalTest(availableCameras) ) {
+            break;
+        }
+    };
+
+    for ( size_t j = 0 ; j < availableCameras.size() ; j++ ) {
+        camera = availableCameras.itemAt(j);
+        camera->closeCamera();
+        camera.clear();
+    }
+
+    availableCameras.clear();
+
+    return 0;
+}
diff --git a/camera/QCamera2/HAL/test/qcamera_test.h b/camera/QCamera2/HAL/test/qcamera_test.h
new file mode 100644
index 0000000..5bb8f67
--- /dev/null
+++ b/camera/QCamera2/HAL/test/qcamera_test.h
@@ -0,0 +1,126 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef QCAMERA_TEST_H
+#define QCAMERA_TEST_H
+
+namespace qcamera {
+
+using namespace android;
+
+typedef enum qcamera_test_cmds_t {
+    SWITCH_CAMERA_CMD = 'A',
+    RESUME_PREVIEW_CMD = '[',
+    START_PREVIEW_CMD = '1',
+    STOP_PREVIEW_CMD = '2',
+    CHANGE_PREVIEW_SIZE_CMD = '4',
+    CHANGE_PICTURE_SIZE_CMD = '5',
+    DUMP_CAPS_CMD = 'E',
+    AUTOFOCUS_CMD = 'f',
+    TAKEPICTURE_CMD = 'p',
+    ENABLE_PRV_CALLBACKS_CMD = '&',
+    EXIT_CMD = 'q'
+} qcamera_test_cmds;
+
+class CameraContext : public CameraListener {
+public:
+
+    CameraContext(int cameraIndex) :
+        mCameraIndex(cameraIndex),
+        mResizePreview(true),
+        mHardwareActive(false),
+        mPreviewRunning(false),
+        mCamera(NULL),
+        mClient(NULL),
+        mSurfaceControl(NULL),
+        mPreviewSurface(NULL) {}
+
+    status_t openCamera();
+    status_t closeCamera();
+
+    status_t startPreview();
+    status_t stopPreview();
+    status_t resumePreview();
+    status_t autoFocus();
+    status_t enablePreviewCallbacks();
+    status_t takePicture();
+
+    status_t nextPreviewSize();
+    status_t getCurrentPreviewSize(Size &previewSize);
+
+    status_t nextPictureSize();
+    status_t getCurrentPictureSize(Size &pictureSize);
+
+    void printSupportedParams();
+
+    int getCameraIndex() { return mCameraIndex; }
+    int getNumberOfCameras();
+
+    virtual ~CameraContext();
+
+    virtual void notify(int32_t msgType, int32_t ext1, int32_t ext2);
+    virtual void postData(int32_t msgType,
+                          const sp<IMemory>& dataPtr,
+                          camera_frame_metadata_t *metadata);
+
+    virtual void postDataTimestamp(nsecs_t timestamp,
+                                   int32_t msgType,
+                                   const sp<IMemory>& dataPtr);
+
+private:
+
+    status_t createPreviewSurface(unsigned int width,
+                                  unsigned int height,
+                                  int32_t pixFormat);
+    status_t destroyPreviewSurface();
+
+    status_t saveFile(const sp<IMemory>& mem, String8 path);
+    void previewCallback(const sp<IMemory>& mem);
+
+    static int JpegIdx;
+    int mCameraIndex;
+    bool mResizePreview;
+    bool mHardwareActive;
+    bool mPreviewRunning;
+
+    sp<Camera> mCamera;
+    sp<SurfaceComposerClient> mClient;
+    sp<SurfaceControl> mSurfaceControl;
+    sp<Surface> mPreviewSurface;
+    CameraParameters mParams;
+
+    int mCurrentPreviewSizeIdx;
+    int mCurrentPictureSizeIdx;
+    Vector<Size> mSupportedPreviewSizes;
+    Vector<Size> mSupportedPictureSizes;
+};
+
+}; //namespace qcamera
+
+#endif
diff --git a/camera/QCamera2/HAL3/Android.mk b/camera/QCamera2/HAL3/Android.mk
new file mode 100644
index 0000000..d233fa0
--- /dev/null
+++ b/camera/QCamera2/HAL3/Android.mk
@@ -0,0 +1,44 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+        QCamera3Factory.cpp \
+        QCamera3Hal.cpp \
+        QCamera3HWI.cpp \
+        QCamera3Mem.cpp \
+        QCamera3Stream.cpp \
+        QCamera3Channel.cpp \
+        QCamera3PostProc.cpp \
+        QCamera3VendorTags.cpp \
+        ../util/QCameraCmdThread.cpp \
+        ../util/QCameraQueue.cpp
+
+LOCAL_CFLAGS := -Wall -Werror
+LOCAL_CFLAGS += -DHAS_MULTIMEDIA_HINTS
+
+LOCAL_C_INCLUDES := \
+        $(LOCAL_PATH)/../stack/common \
+        frameworks/native/include/media/openmax \
+        frameworks/native/include \
+        frameworks/av/include \
+        hardware/qcom/media/libstagefrighthw \
+        system/media/camera/include \
+        $(LOCAL_PATH)/../../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../../mm-image-codec/qomx_core \
+        $(LOCAL_PATH)/../util
+
+LOCAL_C_INCLUDES += \
+        hardware/qcom/display/msm8974/libgralloc
+
+LOCAL_SHARED_LIBRARIES := libcamera_client liblog libhardware libutils libcutils libdl
+LOCAL_SHARED_LIBRARIES += libmmcamera_interface libmmjpeg_interface libui libcamera_metadata
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+#LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE := camera.$(TARGET_DEVICE)
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+#include $(LOCAL_PATH)/test/Android.mk
diff --git a/camera/QCamera2/HAL3/QCamera3Channel.cpp b/camera/QCamera2/HAL3/QCamera3Channel.cpp
new file mode 100644
index 0000000..7c99bca
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Channel.cpp
@@ -0,0 +1,2577 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3Channel"
+//#define LOG_NDEBUG 0
+#include <fcntl.h>
+#include <stdlib.h>
+#include <cstdlib>
+#include <stdio.h>
+#include <string.h>
+#include <hardware/camera3.h>
+#include <system/camera_metadata.h>
+#include <gralloc_priv.h>
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <cutils/properties.h>
+#include "QCamera3Channel.h"
+
+using namespace android;
+
+#define MIN_STREAMING_BUFFER_NUM 7+11
+
+namespace qcamera {
+static const char ExifAsciiPrefix[] =
+    { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };          // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] =
+    { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 };   // "\0\0\0\0\0\0\0\0"
+
+#define EXIF_ASCII_PREFIX_SIZE           8   //(sizeof(ExifAsciiPrefix))
+#define FOCAL_LENGTH_DECIMAL_PRECISION   1000
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Channel
+ *
+ * DESCRIPTION: constrcutor of QCamera3Channel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3Channel::QCamera3Channel(uint32_t cam_handle,
+                               mm_camera_ops_t *cam_ops,
+                               channel_cb_routine cb_routine,
+                               cam_padding_info_t *paddingInfo,
+                               void *userData)
+{
+    m_camHandle = cam_handle;
+    m_camOps = cam_ops;
+    m_bIsActive = false;
+
+    m_handle = 0;
+    m_numStreams = 0;
+    memset(mStreams, 0, sizeof(mStreams));
+    mUserData = userData;
+
+    mStreamInfoBuf = NULL;
+    mChannelCB = cb_routine;
+    mPaddingInfo = paddingInfo;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Channel
+ *
+ * DESCRIPTION: default constrcutor of QCamera3Channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3Channel::QCamera3Channel()
+{
+    m_camHandle = 0;
+    m_camOps = NULL;
+    m_bIsActive = false;
+
+    m_handle = 0;
+    m_numStreams = 0;
+    memset(mStreams, 0, sizeof(mStreams));
+    mUserData = NULL;
+
+    mStreamInfoBuf = NULL;
+    mChannelCB = NULL;
+    mPaddingInfo = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Channel
+ *
+ * DESCRIPTION: destructor of QCamera3Channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3Channel::~QCamera3Channel()
+{
+    if (m_bIsActive)
+        stop();
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            delete mStreams[i];
+            mStreams[i] = 0;
+        }
+    }
+    if (m_handle) {
+        m_camOps->delete_channel(m_camHandle, m_handle);
+        ALOGE("%s: deleting channel %d", __func__, m_handle);
+        m_handle = 0;
+    }
+    m_numStreams = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of channel
+ *
+ * PARAMETERS :
+ *   @attr    : channel bundle attribute setting
+ *   @dataCB  : data notify callback
+ *   @userData: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t dataCB)
+{
+    m_handle = m_camOps->add_channel(m_camHandle,
+                                      attr,
+                                      dataCB,
+                                      this);
+    if (m_handle == 0) {
+        ALOGE("%s: Add channel failed", __func__);
+        return UNKNOWN_ERROR;
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : addStream
+ *
+ * DESCRIPTION: add a stream into channel
+ *
+ * PARAMETERS :
+ *   @allocator      : stream related buffer allocator
+ *   @streamInfoBuf  : ptr to buf that constains stream info
+ *   @minStreamBufNum: number of stream buffers needed
+ *   @paddingInfo    : padding information
+ *   @stream_cb      : stream data notify callback
+ *   @userdata       : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::addStream(cam_stream_type_t streamType,
+                                  cam_format_t streamFormat,
+                                  cam_dimension_t streamDim,
+                                  uint8_t minStreamBufNum)
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_numStreams >= 1) {
+        ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__);
+        return BAD_VALUE;
+    }
+
+    if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) {
+        ALOGE("%s: stream number (%d) exceeds max limit (%d)",
+              __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE);
+        return BAD_VALUE;
+    }
+    QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
+                                               m_handle,
+                                               m_camOps,
+                                               mPaddingInfo,
+                                               this);
+    if (pStream == NULL) {
+        ALOGE("%s: No mem for Stream", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum,
+                                                    streamCbRoutine, this);
+    if (rc == 0) {
+        mStreams[m_numStreams] = pStream;
+        m_numStreams++;
+    } else {
+        delete pStream;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start channel, which will start all streams belong to this channel
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::start()
+{
+    int32_t rc = NO_ERROR;
+
+    if (m_numStreams > 1) {
+        ALOGE("%s: bundle not supported", __func__);
+    } else if (m_numStreams == 0) {
+        return NO_INIT;
+    }
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->start();
+        }
+    }
+    rc = m_camOps->start_channel(m_camHandle, m_handle);
+
+    if (rc != NO_ERROR) {
+        for (int i = 0; i < m_numStreams; i++) {
+            if (mStreams[i] != NULL) {
+                mStreams[i]->stop();
+            }
+        }
+    } else {
+        m_bIsActive = true;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams belong to this channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::stop()
+{
+    int32_t rc = NO_ERROR;
+    if(!m_bIsActive) {
+        ALOGE("%s: Attempt to stop inactive channel",__func__);
+        return rc;
+    }
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL) {
+            mStreams[i]->stop();
+        }
+    }
+
+    rc = m_camOps->stop_channel(m_camHandle, m_handle);
+
+    m_bIsActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return a stream buf back to kernel
+ *
+ * PARAMETERS :
+ *   @recvd_frame  : stream buf frame to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame)
+{
+    int32_t rc = NO_ERROR;
+    for (int i = 0; i < recvd_frame->num_bufs; i++) {
+         if (recvd_frame->bufs[i] != NULL) {
+             for (int j = 0; j < m_numStreams; j++) {
+                 if (mStreams[j] != NULL &&
+                     mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) {
+                     rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx);
+                     break; // break loop j
+                 }
+             }
+         }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamTypeMask
+ *
+ * DESCRIPTION: Get bit mask of all stream types in this channel
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Bit mask of all stream types in this channel
+ *==========================================================================*/
+uint32_t QCamera3Channel::getStreamTypeMask()
+{
+    uint32_t mask = 0;
+    for (int i = 0; i < m_numStreams; i++) {
+       mask |= (0x1 << mStreams[i]->getMyType());
+    }
+    return mask;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamID
+ *
+ * DESCRIPTION: Get StreamID of requested stream type
+ *
+ * PARAMETERS : streamMask
+ *
+ * RETURN     : Stream ID
+ *==========================================================================*/
+uint32_t QCamera3Channel::getStreamID(uint32_t streamMask)
+{
+    uint32_t streamID = 0;
+    for (int i = 0; i < m_numStreams; i++) {
+        if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) {
+            streamID = mStreams[i]->getMyServerID();
+            break;
+        }
+    }
+    return streamID;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByHandle
+ *
+ * DESCRIPTION: return stream object by stream handle
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle)
+{
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) {
+            return mStreams[i];
+        }
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamByIndex
+ *
+ * DESCRIPTION: return stream object by index
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+QCamera3Stream *QCamera3Channel::getStreamByIndex(uint8_t index)
+{
+    if (index < m_numStreams) {
+        return mStreams[index];
+    }
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : streamCbRoutine
+ *
+ * DESCRIPTION: callback routine for stream
+ *
+ * PARAMETERS :
+ *   @streamHandle : stream handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                QCamera3Stream *stream, void *userdata)
+{
+    QCamera3Channel *channel = (QCamera3Channel *)userdata;
+    if (channel == NULL) {
+        ALOGE("%s: invalid channel pointer", __func__);
+        return;
+    }
+    channel->streamCbRoutine(super_frame, stream);
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3RegularChannel
+ *
+ * DESCRIPTION: constrcutor of QCamera3RegularChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @cb_routine : callback routine to frame aggregator
+ *   @stream     : camera3_stream_t structure
+ *   @stream_type: Channel stream type
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_stream_type_t stream_type) :
+                        QCamera3Channel(cam_handle, cam_ops, cb_routine,
+                                                paddingInfo, userData),
+                        mCamera3Stream(stream),
+                        mNumBufs(0),
+                        mStreamType(stream_type)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3RegularChannel
+ *
+ * DESCRIPTION: destructor of QCamera3RegularChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3RegularChannel::~QCamera3RegularChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize and add camera channel & stream
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+
+int32_t QCamera3RegularChannel::initialize()
+{
+    int32_t rc = NO_ERROR;
+    cam_format_t streamFormat;
+    cam_dimension_t streamDim;
+
+    if (NULL == mCamera3Stream) {
+        ALOGE("%s: Camera stream uninitialized", __func__);
+        return NO_INIT;
+    }
+
+    if (1 <= m_numStreams) {
+        // Only one stream per channel supported in v3 Hal
+        return NO_ERROR;
+    }
+
+    rc = init(NULL, NULL);
+    if (rc < 0) {
+        ALOGE("%s: init failed", __func__);
+        return rc;
+    }
+
+    mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
+
+    if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
+        if (mStreamType ==  CAM_STREAM_TYPE_VIDEO) {
+            streamFormat = CAM_FORMAT_YUV_420_NV12;
+        } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
+            streamFormat = CAM_FORMAT_YUV_420_NV21;
+        } else {
+            //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs
+            // to be properly aligned and padded.
+            streamFormat = CAM_FORMAT_YUV_420_NV21;
+        }
+    } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
+         streamFormat = CAM_FORMAT_YUV_420_NV21;
+    } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+            mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) {
+        // Bayer pattern doesn't matter here.
+        // All CAMIF raw format uses 10bit.
+        streamFormat = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
+    } else {
+
+        //TODO: Fail for other types of streams for now
+        ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
+        return -EINVAL;
+    }
+
+    streamDim.width = mCamera3Stream->width;
+    streamDim.height = mCamera3Stream->height;
+
+    rc = QCamera3Channel::addStream(mStreamType,
+            streamFormat,
+            streamDim,
+            mNumBufs);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start a regular channel
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::start()
+{
+    int32_t rc = NO_ERROR;
+
+    if (0 < mMemory.getCnt()) {
+        rc = QCamera3Channel::start();
+    }
+
+    return rc;
+}
+/*===========================================================================
+ * FUNCTION   : getInternalFormatBuffer
+ *
+ * DESCRIPTION: return buffer in the internal format structure
+ *
+ * PARAMETERS :
+ *   @streamHandle : buffer handle
+ *
+ * RETURN     : stream object. NULL if not found
+ *==========================================================================*/
+mm_camera_buf_def_t* QCamera3RegularChannel::getInternalFormatBuffer(
+                                            buffer_handle_t * buffer)
+{
+    int32_t index;
+    if(buffer == NULL)
+        return NULL;
+    index = mMemory.getMatchBufIndex((void*)buffer);
+    if(index < 0) {
+        ALOGE("%s: Could not find object among registered buffers",__func__);
+        return NULL;
+    }
+    return mStreams[0]->getInternalFormatBuffer(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: process a request from camera service. Stream on if ncessary.
+ *
+ * PARAMETERS :
+ *   @buffer  : buffer to be filled for this request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber)
+{
+    //FIX ME: Return buffer back in case of failures below.
+
+    int32_t rc = NO_ERROR;
+    int index;
+
+    if (NULL == buffer) {
+        ALOGE("%s: Invalid buffer in channel request", __func__);
+        return BAD_VALUE;
+    }
+
+    if(!m_bIsActive) {
+        rc = registerBuffer(buffer);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: On-the-fly buffer registration failed %d",
+                    __func__, rc);
+            return rc;
+        }
+
+        rc = start();
+        if (NO_ERROR != rc) {
+            return rc;
+        }
+    } else {
+        ALOGV("%s: Request on an existing stream",__func__);
+    }
+
+    index = mMemory.getMatchBufIndex((void*)buffer);
+    if(index < 0) {
+        rc = registerBuffer(buffer);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: On-the-fly buffer registration failed %d",
+                    __func__, rc);
+            return rc;
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if (index < 0) {
+            ALOGE("%s: Could not find object among registered buffers",
+                    __func__);
+            return DEAD_OBJECT;
+        }
+    }
+
+    rc = mStreams[0]->bufDone(index);
+    if(rc != NO_ERROR) {
+        ALOGE("%s: Failed to Q new buffer to stream",__func__);
+        return rc;
+    }
+
+    rc = mMemory.markFrameNumber(index, frameNumber);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: register streaming buffer to the channel object
+ *
+ * PARAMETERS :
+ *   @buffer     : buffer to be registered
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer)
+{
+    int rc = 0;
+
+    if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
+        ALOGE("%s: Trying to register more buffers than initially requested",
+                __func__);
+        return BAD_VALUE;
+    }
+
+    if (0 == m_numStreams) {
+        rc = initialize();
+        if (rc != NO_ERROR) {
+            ALOGE("%s: Couldn't initialize camera stream %d",
+                    __func__, rc);
+            return rc;
+        }
+    }
+
+    rc = mMemory.registerBuffer(buffer);
+    if (ALREADY_EXISTS == rc) {
+        return NO_ERROR;
+    } else if (NO_ERROR != rc) {
+        ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+void QCamera3RegularChannel::streamCbRoutine(
+                            mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream)
+{
+    //FIXME Q Buf back in case of error?
+    uint8_t frameIndex;
+    buffer_handle_t *resultBuffer;
+    int32_t resultFrameNumber;
+    camera3_stream_buffer_t result;
+
+    if(!super_frame) {
+         ALOGE("%s: Invalid Super buffer",__func__);
+         return;
+    }
+
+    if(super_frame->num_bufs != 1) {
+         ALOGE("%s: Multiple streams are not supported",__func__);
+         return;
+    }
+    if(super_frame->bufs[0] == NULL ) {
+         ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
+                  __func__);
+         return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    if(frameIndex >= mNumBufs) {
+         ALOGE("%s: Error, Invalid index for buffer",__func__);
+         if(stream) {
+             stream->bufDone(frameIndex);
+         }
+         return;
+    }
+
+    ////Use below data to issue framework callback
+    resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
+    resultFrameNumber = mMemory.getFrameNumber(frameIndex);
+
+    result.stream = mCamera3Stream;
+    result.buffer = resultBuffer;
+    result.status = CAMERA3_BUFFER_STATUS_OK;
+    result.acquire_fence = -1;
+    result.release_fence = -1;
+
+    mChannelCB(NULL, &result, resultFrameNumber, mUserData);
+    free(super_frame);
+    return;
+}
+
+QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/)
+{
+    return &mMemory;
+}
+
+void QCamera3RegularChannel::putStreamBufs()
+{
+    mMemory.unregisterBuffers();
+}
+
+int QCamera3RegularChannel::kMaxBuffers = 7;
+
+QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData) :
+                        QCamera3Channel(cam_handle, cam_ops,
+                                cb_routine, paddingInfo, userData),
+                        mMemory(NULL)
+{
+}
+
+QCamera3MetadataChannel::~QCamera3MetadataChannel()
+{
+    if (m_bIsActive)
+        stop();
+
+    if (mMemory) {
+        mMemory->deallocate();
+        delete mMemory;
+        mMemory = NULL;
+    }
+}
+
+int32_t QCamera3MetadataChannel::initialize()
+{
+    int32_t rc;
+    cam_dimension_t streamDim;
+
+    if (mMemory || m_numStreams > 0) {
+        ALOGE("%s: metadata channel already initialized", __func__);
+        return -EINVAL;
+    }
+
+    rc = init(NULL, NULL);
+    if (rc < 0) {
+        ALOGE("%s: init failed", __func__);
+        return rc;
+    }
+
+    streamDim.width = sizeof(metadata_buffer_t),
+    streamDim.height = 1;
+    rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX,
+        streamDim, MIN_STREAMING_BUFFER_NUM);
+    if (rc < 0) {
+        ALOGE("%s: addStream failed", __func__);
+    }
+    return rc;
+}
+
+int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/,
+                                                uint32_t /*frameNumber*/)
+{
+    if (!m_bIsActive) {
+        return start();
+    }
+    else
+        return 0;
+}
+
+void QCamera3MetadataChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * /*stream*/)
+{
+    uint32_t requestNumber = 0;
+    if (super_frame == NULL || super_frame->num_bufs != 1) {
+        ALOGE("%s: super_frame is not valid", __func__);
+        return;
+    }
+    mChannelCB(super_frame, NULL, requestNumber, mUserData);
+}
+
+QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len)
+{
+    int rc;
+    if (len < sizeof(metadata_buffer_t)) {
+        ALOGE("%s: size doesn't match %d vs %d", __func__,
+                len, sizeof(metadata_buffer_t));
+        return NULL;
+    }
+    mMemory = new QCamera3HeapMemory();
+    if (!mMemory) {
+        ALOGE("%s: unable to create metadata memory", __func__);
+        return NULL;
+    }
+    rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
+    if (rc < 0) {
+        ALOGE("%s: unable to allocate metadata memory", __func__);
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    memset(mMemory->getPtr(0), 0, sizeof(metadata_buffer_t));
+    return mMemory;
+}
+
+void QCamera3MetadataChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+/*************************************************************************************/
+// RAW Channel related functions
+int QCamera3RawChannel::kMaxBuffers = 7;
+
+QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    bool raw_16) :
+                        QCamera3RegularChannel(cam_handle, cam_ops,
+                                cb_routine, paddingInfo, userData, stream,
+                                CAM_STREAM_TYPE_RAW),
+                        mIsRaw16(raw_16)
+{
+    char prop[PROPERTY_VALUE_MAX];
+    property_get("persist.camera.raw.dump", prop, "0");
+    mRawDump = atoi(prop);
+}
+
+QCamera3RawChannel::~QCamera3RawChannel()
+{
+}
+
+void QCamera3RawChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * stream)
+{
+    /* Move this back down once verified */
+    if (mRawDump)
+        dumpRawSnapshot(super_frame->bufs[0]);
+
+    if (mIsRaw16)
+        convertToRaw16(super_frame->bufs[0]);
+
+    //Make sure cache coherence because extra processing is done
+    mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx);
+
+    QCamera3RegularChannel::streamCbRoutine(super_frame, stream);
+    return;
+}
+
+void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame)
+{
+   QCamera3Stream *stream = getStreamByIndex(0);
+   char buf[32];
+   memset(buf, 0, sizeof(buf));
+   cam_dimension_t dim;
+   memset(&dim, 0, sizeof(dim));
+   stream->getFrameDimension(dim);
+
+   cam_frame_len_offset_t offset;
+   memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+   stream->getFrameOffset(offset);
+   snprintf(buf, sizeof(buf), "/data/r_%d_%dx%d.raw",
+            frame->frame_idx, dim.width, dim.height);
+
+   int file_fd = open(buf, O_RDWR| O_CREAT, 0777);
+   if (file_fd) {
+      int written_len = write(file_fd, frame->buffer, offset.frame_len);
+      ALOGE("%s: written number of bytes %d", __func__, written_len);
+      close(file_fd);
+   } else {
+      ALOGE("%s: failed to open file to dump image", __func__);
+   }
+
+}
+
+void QCamera3RawChannel::convertToRaw16(mm_camera_buf_def_t *frame)
+{
+    // Convert image buffer from Opaque raw format to RAW16 format
+    // 10bit Opaque raw is stored in the format of:
+    // 0000 - p5 - p4 - p3 - p2 - p1 - p0
+    // where p0 to p5 are 6 pixels (each is 10bit)_and most significant
+    // 4 bits are 0s. Each 64bit word contains 6 pixels.
+
+    QCamera3Stream *stream = getStreamByIndex(0);
+    cam_dimension_t dim;
+    memset(&dim, 0, sizeof(dim));
+    stream->getFrameDimension(dim);
+
+    cam_frame_len_offset_t offset;
+    memset(&offset, 0, sizeof(cam_frame_len_offset_t));
+    stream->getFrameOffset(offset);
+
+    uint32_t raw16_stride = (dim.width + 15) & ~15;
+    uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
+
+    // In-place format conversion.
+    // Raw16 format always occupy more memory than opaque raw10.
+    // Convert to Raw16 by iterating through all pixels from bottom-right
+    // to top-left of the image.
+    // One special notes:
+    // 1. Cross-platform raw16's stride is 16 pixels.
+    // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes.
+    for (int y = dim.height-1; y >= 0; y--) {
+        uint64_t* row_start = (uint64_t *)frame->buffer +
+            y * offset.mp[0].stride / 8;
+        for (int x = dim.width-1;  x >= 0; x--) {
+            uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6)));
+            raw16_buffer[y*raw16_stride+x] = raw16_pixel;
+        }
+    }
+}
+
+/*************************************************************************************/
+
+/*===========================================================================
+ * FUNCTION   : jpegEvtHandle
+ *
+ * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events.
+                Construct result payload and call mChannelCb to deliver buffer
+                to framework.
+ *
+ * PARAMETERS :
+ *   @status    : status of jpeg job
+ *   @client_hdl: jpeg client handle
+ *   @jobId     : jpeg job Id
+ *   @p_ouput   : ptr to jpeg output result struct
+ *   @userdata  : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status,
+                                              uint32_t /*client_hdl*/,
+                                              uint32_t jobId,
+                                              mm_jpeg_output_t *p_output,
+                                              void *userdata)
+{
+    buffer_handle_t *resultBuffer, *jpegBufferHandle;
+    int32_t resultFrameNumber;
+    int resultStatus = CAMERA3_BUFFER_STATUS_OK;
+    camera3_stream_buffer_t result;
+    camera3_jpeg_blob_t jpegHeader;
+    char* jpeg_eof = 0;
+    int maxJpegSize;
+    QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata;
+    if (obj) {
+
+        //Release any cached metabuffer information
+        if (obj->mMetaFrame != NULL && obj->m_pMetaChannel != NULL) {
+            ((QCamera3MetadataChannel*)(obj->m_pMetaChannel))->bufDone(obj->mMetaFrame);
+            obj->mMetaFrame = NULL;
+            obj->m_pMetaChannel = NULL;
+        } else {
+            ALOGE("%s: Meta frame was NULL", __func__);
+        }
+        //Construct payload for process_capture_result. Call mChannelCb
+
+        qcamera_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId);
+
+        if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) {
+            ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status);
+            resultStatus = CAMERA3_BUFFER_STATUS_ERROR;
+        }
+
+        //Construct jpeg transient header of type camera3_jpeg_blob_t
+        //Append at the end of jpeg image of buf_filled_len size
+
+        jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+        jpegHeader.jpeg_size = p_output->buf_filled_len;
+
+
+        char* jpeg_buf = (char *)p_output->buf_vaddr;
+
+        // Gralloc buffer may have additional padding for 4K page size
+        // Follow size guidelines based on spec since framework relies
+        // on that to reach end of buffer and with it the header
+
+        //Handle same as resultBuffer, but for readablity
+        jpegBufferHandle =
+            (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
+
+        maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width;
+        if (maxJpegSize > obj->mMemory.getSize(obj->mCurrentBufIndex)) {
+            maxJpegSize = obj->mMemory.getSize(obj->mCurrentBufIndex);
+        }
+
+        jpeg_eof = &jpeg_buf[maxJpegSize-sizeof(jpegHeader)];
+        memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader));
+        obj->mMemory.cleanInvalidateCache(obj->mCurrentBufIndex);
+
+        ////Use below data to issue framework callback
+        resultBuffer = (buffer_handle_t *)obj->mMemory.getBufferHandle(obj->mCurrentBufIndex);
+        resultFrameNumber = obj->mMemory.getFrameNumber(obj->mCurrentBufIndex);
+
+        result.stream = obj->mCamera3Stream;
+        result.buffer = resultBuffer;
+        result.status = resultStatus;
+        result.acquire_fence = -1;
+        result.release_fence = -1;
+
+        ALOGV("%s: Issue Callback", __func__);
+        obj->mChannelCB(NULL, &result, resultFrameNumber, obj->mUserData);
+
+        // release internal data for jpeg job
+        if (job != NULL) {
+            obj->m_postprocessor.releaseJpegJobData(job);
+            free(job);
+        }
+        return;
+        // }
+    } else {
+        ALOGE("%s: Null userdata in jpeg callback", __func__);
+    }
+}
+
+QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream) :
+                        QCamera3Channel(cam_handle, cam_ops, cb_routine,
+                        paddingInfo, userData),
+                        m_postprocessor(this),
+                        mCamera3Stream(stream),
+                        mNumBufs(0),
+                        mCurrentBufIndex(-1),
+                        mYuvMemory(NULL),
+                        mMetaFrame(NULL)
+{
+    mYuvWidth = stream->width;
+    mYuvHeight = stream->height;
+    int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, this);
+    if (rc != 0) {
+        ALOGE("Init Postprocessor failed");
+    }
+}
+
+QCamera3PicChannel::~QCamera3PicChannel()
+{
+    int32_t rc = m_postprocessor.deinit();
+    if (rc != 0) {
+        ALOGE("De-init Postprocessor failed");
+    }
+}
+
+int32_t QCamera3PicChannel::initialize()
+{
+    int32_t rc = NO_ERROR;
+    cam_dimension_t streamDim;
+    cam_stream_type_t streamType;
+    cam_format_t streamFormat;
+    mm_camera_channel_attr_t attr;
+
+    if (NULL == mCamera3Stream) {
+        ALOGE("%s: Camera stream uninitialized", __func__);
+        return NO_INIT;
+    }
+
+    if (1 <= m_numStreams) {
+        // Only one stream per channel supported in v3 Hal
+        return NO_ERROR;
+    }
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    attr.look_back = 1;
+    attr.post_frame_skip = 1;
+    attr.water_mark = 1;
+    attr.max_unmatched_frames = 1;
+
+    rc = init(&attr, NULL);
+    if (rc < 0) {
+        ALOGE("%s: init failed", __func__);
+        return rc;
+    }
+
+    streamType = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
+    streamFormat = CAM_FORMAT_YUV_420_NV21;
+    streamDim.width = mYuvWidth;
+    streamDim.height = mYuvHeight;
+
+    int num_buffers = 1;
+    mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM;
+    rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim,
+            num_buffers);
+
+    return rc;
+}
+
+int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
+        uint32_t frameNumber,
+        mm_camera_buf_def_t *pInputBuffer,
+        metadata_buffer_t *metadata)
+{
+    //FIX ME: Return buffer back in case of failures below.
+
+    int32_t rc = NO_ERROR;
+    int index;
+    // Picture stream has already been started before any request comes in
+    if (!m_bIsActive) {
+        ALOGE("%s: Channel not started!!", __func__);
+        return NO_INIT;
+    }
+
+    index = mMemory.getMatchBufIndex((void*)buffer);
+    if(index < 0) {
+        rc = registerBuffer(buffer);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: On-the-fly buffer registration failed %d",
+                    __func__, rc);
+            return rc;
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if (index < 0) {
+            ALOGE("%s: Could not find object among registered buffers",__func__);
+            return DEAD_OBJECT;
+        }
+    }
+    rc = mMemory.markFrameNumber(index, frameNumber);
+
+    //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
+    mCurrentBufIndex = index;
+
+    // Start postprocessor
+    m_postprocessor.start(this, metadata);
+
+    // Queue jpeg settings
+    rc = queueJpegSetting(index, metadata);
+
+    if (pInputBuffer == NULL)
+        mStreams[0]->bufDone(0);
+    else {
+        mm_camera_super_buf_t *src_frame = NULL;
+        src_frame = (mm_camera_super_buf_t *)malloc(
+                sizeof(mm_camera_super_buf_t));
+        if (src_frame == NULL) {
+            ALOGE("%s: No memory for src frame", __func__);
+            return NO_MEMORY;
+        }
+        memset(src_frame, 0, sizeof(mm_camera_super_buf_t));
+        src_frame->num_bufs = 1;
+        src_frame->bufs[0] = pInputBuffer;
+
+        ALOGD("%s: Post-process started", __func__);
+        ALOGD("%s: Issue call to reprocess", __func__);
+
+        m_postprocessor.processPPMetadata(metadata);
+        m_postprocessor.processData(src_frame);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: Channel Level callback used for super buffer data notify.
+ *              This function is registered with mm-camera-interface to handle
+ *              data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    ALOGV("%s: E\n", __func__);
+    QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata;
+
+    if (channel == NULL) {
+        ALOGE("%s: invalid channel pointer", __func__);
+        return;
+    }
+
+    if(channel->m_numStreams != 1) {
+        ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__);
+        return;
+    }
+
+
+    if(channel->mStreams[0] == NULL) {
+        ALOGE("%s: Error: Invalid Stream object",__func__);
+        return;
+    }
+
+    channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]);
+
+    ALOGV("%s: X\n", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: register streaming buffer to the channel object
+ *
+ * PARAMETERS :
+ *   @buffer     : buffer to be registered
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer)
+{
+    int rc = 0;
+
+    if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) {
+        ALOGE("%s: Trying to register more buffers than initially requested",
+                __func__);
+        return BAD_VALUE;
+    }
+
+    if (0 == m_numStreams) {
+        rc = initialize();
+        if (rc != NO_ERROR) {
+            ALOGE("%s: Couldn't initialize camera stream %d",
+                    __func__, rc);
+            return rc;
+        }
+    }
+    rc = mMemory.registerBuffer(buffer);
+    if (ALREADY_EXISTS == rc) {
+        return NO_ERROR;
+    } else if (NO_ERROR != rc) {
+        ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc);
+        return rc;
+    }
+
+    return rc;
+}
+
+void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream)
+{
+    //TODO
+    //Used only for getting YUV. Jpeg callback will be sent back from channel
+    //directly to HWI. Refer to func jpegEvtHandle
+
+    //Got the yuv callback. Calling yuv callback handler in PostProc
+    uint8_t frameIndex;
+    mm_camera_super_buf_t* frame = NULL;
+    if(!super_frame) {
+         ALOGE("%s: Invalid Super buffer",__func__);
+         return;
+    }
+
+    if(super_frame->num_bufs != 1) {
+         ALOGE("%s: Multiple streams are not supported",__func__);
+         return;
+    }
+    if(super_frame->bufs[0] == NULL ) {
+         ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
+                  __func__);
+         return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    if(frameIndex >= mNumBufs) {
+         ALOGE("%s: Error, Invalid index for buffer",__func__);
+         if(stream) {
+             stream->bufDone(frameIndex);
+         }
+         return;
+    }
+
+    frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+       ALOGE("%s: Error allocating memory to save received_frame structure.",
+                                                                    __func__);
+       if(stream) {
+           stream->bufDone(frameIndex);
+       }
+       return;
+    }
+    *frame = *super_frame;
+    m_postprocessor.processData(frame);
+    free(super_frame);
+    return;
+}
+
+QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len)
+{
+    int rc = 0;
+
+    mYuvMemory = new QCamera3HeapMemory();
+    if (!mYuvMemory) {
+        ALOGE("%s: unable to create metadata memory", __func__);
+        return NULL;
+    }
+
+    //Queue YUV buffers in the beginning mQueueAll = true
+    rc = mYuvMemory->allocate(1, len, false);
+    if (rc < 0) {
+        ALOGE("%s: unable to allocate metadata memory", __func__);
+        delete mYuvMemory;
+        mYuvMemory = NULL;
+        return NULL;
+    }
+    return mYuvMemory;
+}
+
+void QCamera3PicChannel::putStreamBufs()
+{
+    mMemory.unregisterBuffers();
+
+    mYuvMemory->deallocate();
+    delete mYuvMemory;
+    mYuvMemory = NULL;
+}
+
+int32_t QCamera3PicChannel::queueReprocMetadata(metadata_buffer_t *metadata)
+{
+    return m_postprocessor.processPPMetadata(metadata);
+}
+
+int32_t QCamera3PicChannel::queueJpegSetting(int32_t index, metadata_buffer_t *metadata)
+{
+    jpeg_settings_t *settings =
+            (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t));
+
+    if (!settings) {
+        ALOGE("%s: out of memory allocating jpeg_settings", __func__);
+        return -ENOMEM;
+    }
+
+    memset(settings, 0, sizeof(jpeg_settings_t));
+
+    settings->out_buf_index = index;
+
+    settings->jpeg_orientation = 0;
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
+        int32_t *orientation = (int32_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_ORIENTATION, metadata);
+        settings->jpeg_orientation = *orientation;
+    }
+
+    settings->jpeg_quality = 85;
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_QUALITY, metadata)) {
+        uint8_t *quality = (uint8_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_QUALITY, metadata);
+        settings->jpeg_quality = *quality;
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_QUALITY, metadata)) {
+        uint8_t *quality = (uint8_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
+        settings->jpeg_thumb_quality = *quality;
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_THUMB_SIZE, metadata)) {
+        cam_dimension_t *dimension = (cam_dimension_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
+        settings->thumbnail_size = *dimension;
+    }
+
+    settings->gps_timestamp_valid = 0;
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata)) {
+        int64_t *timestamp = (int64_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
+        settings->gps_timestamp = *timestamp;
+        settings->gps_timestamp_valid = 1;
+    }
+
+    settings->gps_coordinates_valid = 0;
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_COORDINATES, metadata)) {
+        double *coordinates = (double *)POINTER_OF(
+                CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
+        memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double));
+        settings->gps_coordinates_valid = 1;
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata)) {
+        char *proc_methods = (char *)POINTER_OF(
+                CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
+        memset(settings->gps_processing_method, 0,
+                sizeof(settings->gps_processing_method));
+        strncpy(settings->gps_processing_method, proc_methods,
+                sizeof(settings->gps_processing_method));
+    }
+
+    return m_postprocessor.processJpegSettingData(settings);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRational
+ *
+ * DESCRIPTION: compose rational struct
+ *
+ * PARAMETERS :
+ *   @rat     : ptr to struct to store rational info
+ *   @num     :num of the rational
+ *   @denom   : denom of the rational
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getRational(rat_t *rat, int num, int denom)
+{
+    if (NULL == rat) {
+        ALOGE("%s: NULL rat input", __func__);
+        return BAD_VALUE;
+    }
+    rat->num = num;
+    rat->denom = denom;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : parseGPSCoordinate
+ *
+ * DESCRIPTION: parse GPS coordinate string
+ *
+ * PARAMETERS :
+ *   @coord_str : [input] coordinate string
+ *   @coord     : [output]  ptr to struct to store coordinate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int parseGPSCoordinate(const char *coord_str, rat_t* coord)
+{
+    if(coord == NULL) {
+        ALOGE("%s: error, invalid argument coord == NULL", __func__);
+        return BAD_VALUE;
+    }
+    float degF = atof(coord_str);
+    if (degF < 0) {
+        degF = -degF;
+    }
+    float minF = (degF - (int) degF) * 60;
+    float secF = (minF - (int) minF) * 60;
+
+    getRational(&coord[0], (int)degF, 1);
+    getRational(&coord[1], (int)minF, 1);
+    getRational(&coord[2], (int)(secF * 10000), 10000);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifDateTime
+ *
+ * DESCRIPTION: query exif date time
+ *
+ * PARAMETERS :
+ *   @dateTime   : string to store exif date time
+ *   @subsecTime : string to store exif subsec time
+ *   @count      : length of the dateTime string
+ *   @subsecCount: length of the subsecTime string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifDateTime(char *dateTime, char *subsecTime,
+        uint32_t &count, uint32_t &subsecCount)
+{
+    //get time and date from system
+    struct timeval tv;
+    struct tm *timeinfo;
+
+    gettimeofday(&tv, NULL);
+    timeinfo = localtime(&tv.tv_sec);
+    //Write datetime according to EXIF Spec
+    //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+    snprintf(dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
+             timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+             timeinfo->tm_mday, timeinfo->tm_hour,
+             timeinfo->tm_min, timeinfo->tm_sec);
+    count = 20;
+
+    //Write subsec according to EXIF Sepc
+    snprintf(subsecTime, 7, "%06ld", tv.tv_usec);
+    subsecCount = 7;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifFocalLength
+ *
+ * DESCRIPTION: get exif focal lenght
+ *
+ * PARAMETERS :
+ *   @focalLength : ptr to rational strcut to store focal lenght
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifFocalLength(rat_t *focalLength, float value)
+{
+    int focalLengthValue =
+        (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION);
+    return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+}
+
+/*===========================================================================
+  * FUNCTION   : getExifExpTimeInfo
+  *
+  * DESCRIPTION: get exif exposure time information
+  *
+  * PARAMETERS :
+  *   @expoTimeInfo     : expousure time value
+  * RETURN     : nt32_t type of status
+  *              NO_ERROR  -- success
+  *              none-zero failure code
+  *==========================================================================*/
+int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value)
+{
+
+    int cal_exposureTime;
+    if (value != 0)
+        cal_exposureTime = value;
+    else
+        cal_exposureTime = 60;
+
+    return getRational(expoTimeInfo, 1, cal_exposureTime);
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsProcessingMethod
+ *
+ * DESCRIPTION: get GPS processing method
+ *
+ * PARAMETERS :
+ *   @gpsProcessingMethod : string to store GPS process method
+ *   @count               : lenght of the string
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod,
+                                   uint32_t &count, char* value)
+{
+    if(value != NULL) {
+        memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+        count = EXIF_ASCII_PREFIX_SIZE;
+        strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, value, strlen(value));
+        count += strlen(value);
+        gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLatitude
+ *
+ * DESCRIPTION: get exif latitude
+ *
+ * PARAMETERS :
+ *   @latitude : ptr to rational struct to store latitude info
+ *   @ladRef   : charater to indicate latitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifLatitude(rat_t *latitude,
+                                           char *latRef, double value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", value);
+    if(str != NULL) {
+        parseGPSCoordinate(str, latitude);
+
+        //set Latitude Ref
+        float latitudeValue = strtof(str, 0);
+        if(latitudeValue < 0.0f) {
+            latRef[0] = 'S';
+        } else {
+            latRef[0] = 'N';
+        }
+        latRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifLongitude
+ *
+ * DESCRIPTION: get exif longitude
+ *
+ * PARAMETERS :
+ *   @longitude : ptr to rational struct to store longitude info
+ *   @lonRef    : charater to indicate longitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifLongitude(rat_t *longitude,
+                                            char *lonRef, double value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", value);
+    if(str != NULL) {
+        parseGPSCoordinate(str, longitude);
+
+        //set Longitude Ref
+        float longitudeValue = strtof(str, 0);
+        if(longitudeValue < 0.0f) {
+            lonRef[0] = 'W';
+        } else {
+            lonRef[0] = 'E';
+        }
+        lonRef[1] = '\0';
+        return NO_ERROR;
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifAltitude
+ *
+ * DESCRIPTION: get exif altitude
+ *
+ * PARAMETERS :
+ *   @altitude : ptr to rational struct to store altitude info
+ *   @altRef   : charater to indicate altitude reference
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifAltitude(rat_t *altitude,
+                                           char *altRef, double value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%f", value);
+    if(str != NULL) {
+        double value = atof(str);
+        *altRef = 0;
+        if(value < 0){
+            *altRef = 1;
+            value = -value;
+        }
+        return getRational(altitude, value*1000, 1000);
+    }else{
+        return BAD_VALUE;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getExifGpsDateTimeStamp
+ *
+ * DESCRIPTION: get exif GPS date time stamp
+ *
+ * PARAMETERS :
+ *   @gpsDateStamp : GPS date time stamp string
+ *   @bufLen       : length of the string
+ *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t getExifGpsDateTimeStamp(char *gpsDateStamp,
+                                           uint32_t bufLen,
+                                           rat_t *gpsTimeStamp, int64_t value)
+{
+    char str[30];
+    snprintf(str, sizeof(str), "%lld", value);
+    if(str != NULL) {
+        time_t unixTime = (time_t)atol(str);
+        struct tm *UTCTimestamp = gmtime(&unixTime);
+
+        strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp);
+
+        getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1);
+        getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1);
+        getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1);
+
+        return NO_ERROR;
+    } else {
+        return BAD_VALUE;
+    }
+}
+
+int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp,
+                             cam_rational_type_t step)
+{
+    exposure_val->num = exposure_comp * step.numerator;
+    exposure_val->denom = step.denominator;
+    return 0;
+}
+/*===========================================================================
+ * FUNCTION   : getExifData
+ *
+ * DESCRIPTION: get exif data to be passed into jpeg encoding
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : exif data from user setting and GPS
+ *==========================================================================*/
+QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata,
+        jpeg_settings_t *jpeg_settings)
+{
+    QCamera3Exif *exif = new QCamera3Exif();
+    if (exif == NULL) {
+        ALOGE("%s: No memory for QCamera3Exif", __func__);
+        return NULL;
+    }
+
+    int32_t rc = NO_ERROR;
+    uint32_t count = 0;
+
+    // add exif entries
+    {
+        char dateTime[20];
+        char subsecTime[7];
+        uint32_t subsecCount;
+        memset(dateTime, 0, sizeof(dateTime));
+        memset(subsecTime, 0, sizeof(subsecTime));
+        count = 20;
+        subsecCount = 7;
+        rc = getExifDateTime(dateTime, subsecTime, count, subsecCount);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_DATE_TIME,
+                    EXIF_ASCII,
+                    count,
+                    (void *)dateTime);
+            exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL,
+                    EXIF_ASCII,
+                    count,
+                    (void *)dateTime);
+            exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED,
+                    EXIF_ASCII,
+                    count,
+                    (void *)dateTime);
+            exif->addEntry(EXIFTAGID_SUBSEC_TIME,
+                    EXIF_ASCII,
+                    subsecCount,
+                    (void *)subsecTime);
+            exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL,
+                    EXIF_ASCII,
+                    subsecCount,
+                    (void *)subsecTime);
+            exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED,
+                    EXIF_ASCII,
+                    subsecCount,
+                    (void *)subsecTime);
+        } else {
+            ALOGE("%s: getExifDateTime failed", __func__);
+        }
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)) {
+        float focal_length = *(float *)POINTER_OF(
+                CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
+        rat_t focalLength;
+        rc = getExifFocalLength(&focalLength, focal_length);
+        if (rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_FOCAL_LENGTH,
+                    EXIF_RATIONAL,
+                    1,
+                    (void *)&(focalLength));
+        } else {
+            ALOGE("%s: getExifFocalLength failed", __func__);
+        }
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)) {
+        int16_t isoSpeed = *(int32_t *)POINTER_OF(
+                CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
+        exif->addEntry(EXIFTAGID_ISO_SPEED_RATING,
+                   EXIF_SHORT,
+                   1,
+                   (void *)&(isoSpeed));
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)) {
+        int64_t sensor_exposure_time = *(int64_t *)POINTER_OF(
+                CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
+        rat_t sensorExpTime;
+        rc = getExifExpTimeInfo(&sensorExpTime, sensor_exposure_time);
+        if (rc == NO_ERROR){
+            exif->addEntry(EXIFTAGID_EXPOSURE_TIME,
+                    EXIF_RATIONAL,
+                    1,
+                    (void *)&(sensorExpTime));
+        } else {
+            ALOGE("%s: getExifExpTimeInfo failed", __func__);
+        }
+    }
+
+    if (strlen(jpeg_settings->gps_processing_method) > 0) {
+        char gpsProcessingMethod[
+                    EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+        count = 0;
+        rc = getExifGpsProcessingMethod(gpsProcessingMethod,
+                count, jpeg_settings->gps_processing_method);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD,
+                    EXIF_ASCII,
+                    count,
+                    (void *)gpsProcessingMethod);
+        } else {
+            ALOGE("%s: getExifGpsProcessingMethod failed", __func__);
+        }
+    }
+
+    if (jpeg_settings->gps_coordinates_valid) {
+
+        //latitude
+        rat_t latitude[3];
+        char latRef[2];
+        rc = getExifLatitude(latitude, latRef,
+                jpeg_settings->gps_coordinates[0]);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_GPS_LATITUDE,
+                           EXIF_RATIONAL,
+                           3,
+                           (void *)latitude);
+            exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF,
+                           EXIF_ASCII,
+                           2,
+                           (void *)latRef);
+        } else {
+            ALOGE("%s: getExifLatitude failed", __func__);
+        }
+
+        //longitude
+        rat_t longitude[3];
+        char lonRef[2];
+        rc = getExifLongitude(longitude, lonRef,
+                jpeg_settings->gps_coordinates[1]);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_GPS_LONGITUDE,
+                           EXIF_RATIONAL,
+                           3,
+                           (void *)longitude);
+
+            exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF,
+                           EXIF_ASCII,
+                           2,
+                           (void *)lonRef);
+        } else {
+            ALOGE("%s: getExifLongitude failed", __func__);
+        }
+
+        //altitude
+        rat_t altitude;
+        char altRef;
+        rc = getExifAltitude(&altitude, &altRef,
+                jpeg_settings->gps_coordinates[2]);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_GPS_ALTITUDE,
+                           EXIF_RATIONAL,
+                           1,
+                           (void *)&(altitude));
+
+            exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF,
+                           EXIF_BYTE,
+                           1,
+                           (void *)&altRef);
+        } else {
+            ALOGE("%s: getExifAltitude failed", __func__);
+        }
+    }
+
+    if (jpeg_settings->gps_timestamp_valid) {
+
+        char gpsDateStamp[20];
+        rat_t gpsTimeStamp[3];
+        rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp,
+                jpeg_settings->gps_timestamp);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_GPS_DATESTAMP,
+                           EXIF_ASCII,
+                           strlen(gpsDateStamp) + 1,
+                           (void *)gpsDateStamp);
+
+            exif->addEntry(EXIFTAGID_GPS_TIMESTAMP,
+                           EXIF_RATIONAL,
+                           3,
+                           (void *)gpsTimeStamp);
+        } else {
+            ALOGE("%s: getExifGpsDataTimeStamp failed", __func__);
+        }
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_PARM_EV, metadata) &&
+            IS_PARM_VALID(CAM_INTF_PARM_EV_STEP, metadata)) {
+        int32_t exposure_comp = *(int32_t *)POINTER_OF(
+                CAM_INTF_PARM_EV, metadata);
+        cam_rational_type_t comp_step = *(cam_rational_type_t *)POINTER_OF(
+                CAM_INTF_PARM_EV_STEP, metadata);
+        srat_t exposure_val;
+        rc = getExifExposureValue(&exposure_val, exposure_comp, comp_step);
+        if(rc == NO_ERROR) {
+            exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE,
+                       EXIF_SRATIONAL,
+                       1,
+                       (void *)(&exposure_val));
+        } else {
+            ALOGE("%s: getExifExposureValue failed ", __func__);
+        }
+    }
+
+    char value[PROPERTY_VALUE_MAX];
+    if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MAKE,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifMaker failed", __func__);
+    }
+
+    if (property_get("ro.product.model", value, "QCAM-AA") > 0) {
+        exif->addEntry(EXIFTAGID_MODEL,
+                       EXIF_ASCII,
+                       strlen(value) + 1,
+                       (void *)value);
+    } else {
+        ALOGE("%s: getExifModel failed", __func__);
+    }
+
+    return exif;
+}
+
+void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height)
+{
+   mYuvWidth = width;
+   mYuvHeight = height;
+}
+
+int QCamera3PicChannel::kMaxBuffers = 1;
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle,
+                                                 mm_camera_ops_t *cam_ops,
+                                                 channel_cb_routine cb_routine,
+                                                 cam_padding_info_t *paddingInfo,
+                                                 void *userData, void *ch_hdl) :
+    QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, userData),
+    picChHandle(ch_hdl),
+    m_pSrcChannel(NULL),
+    m_pMetaChannel(NULL),
+    mMemory(NULL)
+{
+    memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles));
+}
+
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::initialize()
+{
+    int32_t rc = NO_ERROR;
+    mm_camera_channel_attr_t attr;
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = 1;
+
+    rc = init(&attr, NULL);
+    if (rc < 0) {
+        ALOGE("%s: init failed", __func__);
+    }
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS :
+ *   @cam_handle : camera handle
+ *   @cam_ops    : ptr to camera ops table
+ *   @pp_mask    : post-proccess feature mask
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                                  QCamera3Stream *stream)
+{
+    //Got the pproc data callback. Now send to jpeg encoding
+    uint8_t frameIndex;
+    mm_camera_super_buf_t* frame = NULL;
+    QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle;
+
+    if(!super_frame) {
+         ALOGE("%s: Invalid Super buffer",__func__);
+         return;
+    }
+
+    if(super_frame->num_bufs != 1) {
+         ALOGE("%s: Multiple streams are not supported",__func__);
+         return;
+    }
+    if(super_frame->bufs[0] == NULL ) {
+         ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
+                  __func__);
+         return;
+    }
+
+    frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx;
+    frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+       ALOGE("%s: Error allocating memory to save received_frame structure.",
+                                                                    __func__);
+       if(stream) {
+           stream->bufDone(frameIndex);
+       }
+       return;
+    }
+    *frame = *super_frame;
+    obj->m_postprocessor.processPPData(frame);
+    free(super_frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: default constructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ReprocessChannel::QCamera3ReprocessChannel() :
+    m_pSrcChannel(NULL),
+    m_pMetaChannel(NULL)
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: register the buffers of the reprocess channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : QCamera3Memory *
+ *==========================================================================*/
+QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len)
+{
+   int rc = 0;
+
+    mMemory = new QCamera3HeapMemory();
+    if (!mMemory) {
+        ALOGE("%s: unable to create reproc memory", __func__);
+        return NULL;
+    }
+
+    //Queue YUV buffers in the beginning mQueueAll = true
+    rc = mMemory->allocate(2, len, true);
+    if (rc < 0) {
+        ALOGE("%s: unable to allocate reproc memory", __func__);
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    return mMemory;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: register the buffers of the reprocess channel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3ReprocessChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3ReprocessChannel
+ *
+ * DESCRIPTION: destructor of QCamera3ReprocessChannel
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3ReprocessChannel::~QCamera3ReprocessChannel()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBySrcHandle
+ *
+ * DESCRIPTION: find reprocess stream by its source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle)
+{
+    QCamera3Stream *pStream = NULL;
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = mStreams[i];
+            break;
+        }
+    }
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSrcStreamBySrcHandle
+ *
+ * DESCRIPTION: find source stream by source stream handle
+ *
+ * PARAMETERS :
+ *   @srcHandle : source stream handle
+ *
+ * RETURN     : ptr to reprocess stream if found. NULL if not found
+ *==========================================================================*/
+QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle)
+{
+    QCamera3Stream *pStream = NULL;
+
+    for (int i = 0; i < m_numStreams; i++) {
+        if (mSrcStreamHandles[i] == srcHandle) {
+            pStream = m_pSrcChannel->getStreamByIndex(i);
+            break;
+        }
+    }
+    return pStream;
+}
+
+/*===========================================================================
+ * FUNCTION   : metadataBufDone
+ *
+ * DESCRIPTION: buf done method for a metadata buffer
+ *
+ * PARAMETERS :
+ *   @recvd_frame : received metadata frame
+ *
+ * RETURN     :
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame)
+{
+   int32_t rc;
+   rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame);
+   free(recvd_frame);
+   recvd_frame = NULL;
+   return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @frame   : frame to be performed a reprocess
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::doReprocess(mm_camera_super_buf_t *frame,
+                                              mm_camera_super_buf_t *meta_frame)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        ALOGE("%s: No reprocess stream is created", __func__);
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        ALOGE("%s: No source channel for reprocess", __func__);
+        return -1;
+    }
+    for (int i = 0; i < frame->num_bufs; i++) {
+        QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+            if (meta_frame != NULL) {
+               param.reprocess.meta_present = 1;
+               param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
+               param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
+            }
+            rc = pStream->setParameter(param);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: stream setParameter for reprocess failed", __func__);
+                break;
+            }
+        }
+    }
+    return rc;
+}
+
+int32_t QCamera3ReprocessChannel::doReprocessOffline(mm_camera_super_buf_t *frame,
+        metadata_buffer_t *metadata)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        ALOGE("%s: No reprocess stream is created", __func__);
+        return -1;
+    }
+    if (m_pSrcChannel == NULL) {
+        ALOGE("%s: No source channel for reprocess", __func__);
+        return -1;
+    }
+
+    uint32_t buf_idx = 0;
+    for (int i = 0; i < frame->num_bufs; i++) {
+        QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id);
+        QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id);
+        if (pStream != NULL && pSrcStream != NULL) {
+
+            rc = mStreams[i]->mapBuf(
+                    CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                    buf_idx, -1,
+                    frame->bufs[i]->fd, frame->bufs[i]->frame_len);
+
+            if (rc == NO_ERROR) {
+                cam_stream_parm_buffer_t param;
+                memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+                param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+                param.reprocess.buf_index = frame->bufs[i]->buf_idx;
+
+                param.reprocess.meta_present = 1;
+                char* private_data = (char *)POINTER_OF(
+                        CAM_INTF_META_PRIVATE_DATA, metadata);
+                memcpy(param.reprocess.private_data, private_data,
+                        MAX_METADATA_PAYLOAD_SIZE);
+
+                // Find crop info for reprocess stream
+                cam_crop_data_t *crop_data = (cam_crop_data_t *)
+                        POINTER_OF(CAM_INTF_META_CROP_DATA, metadata);
+                for (int j = 0; j < crop_data->num_of_streams; j++) {
+                    if (crop_data->crop_info[j].stream_id ==
+                           pSrcStream->getMyServerID()) {
+                        param.reprocess.crop_rect  =
+                                crop_data->crop_info[j].crop;
+                        break;
+                    }
+                }
+                rc = pStream->setParameter(param);
+                if (rc != NO_ERROR) {
+                    ALOGE("%s: stream setParameter for reprocess failed", __func__);
+                    break;
+                }
+            }
+        }
+    }
+    return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : doReprocess
+ *
+ * DESCRIPTION: request to do a reprocess on the frame
+ *
+ * PARAMETERS :
+ *   @buf_fd     : fd to the input buffer that needs reprocess
+ *   @buf_lenght : length of the input buffer
+ *   @ret_val    : result of reprocess.
+ *                 Example: Could be faceID in case of register face image.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd,
+                                              uint32_t buf_length,
+                                              int32_t &ret_val,
+                                              mm_camera_super_buf_t *meta_frame)
+{
+    int32_t rc = 0;
+    if (m_numStreams < 1) {
+        ALOGE("%s: No reprocess stream is created", __func__);
+        return -1;
+    }
+    if (meta_frame == NULL) {
+        ALOGE("%s: Did not get corresponding metadata in time", __func__);
+        return -1;
+    }
+
+    uint32_t buf_idx = 0;
+    for (int i = 0; i < m_numStreams; i++) {
+        rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                 buf_idx, -1,
+                                 buf_fd, buf_length);
+
+        if (rc == NO_ERROR) {
+            cam_stream_parm_buffer_t param;
+            memset(&param, 0, sizeof(cam_stream_parm_buffer_t));
+            param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS;
+            param.reprocess.buf_index = buf_idx;
+            param.reprocess.meta_present = 1;
+            param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID();
+            param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx;
+            rc = mStreams[i]->setParameter(param);
+            if (rc == NO_ERROR) {
+                ret_val = param.reprocess.ret_val;
+            }
+            mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF,
+                                  buf_idx, -1);
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : addReprocStreamsFromSource
+ *
+ * DESCRIPTION: add reprocess streams from input source channel
+ *
+ * PARAMETERS :
+ *   @config         : pp feature configuration
+ *   @pSrcChannel    : ptr to input source channel that needs reprocess
+ *   @pMetaChannel   : ptr to metadata channel to get corresp. metadata
+ *   @offline        : configure for offline reprocessing
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
+                                                             QCamera3Channel *pSrcChannel,
+                                                             QCamera3Channel *pMetaChannel)
+{
+    int32_t rc = 0;
+    QCamera3Stream *pSrcStream = pSrcChannel->getStreamByIndex(0);
+    if (pSrcStream == NULL) {
+       ALOGE("%s: source channel doesn't have a stream", __func__);
+       return BAD_VALUE;
+    }
+    cam_stream_reproc_config_t reprocess_config;
+    cam_dimension_t streamDim;
+    cam_stream_type_t streamType;
+    cam_format_t streamFormat;
+    cam_frame_len_offset_t frameOffset;
+    int num_buffers = 2;
+
+    streamType = CAM_STREAM_TYPE_OFFLINE_PROC;
+    pSrcStream->getFormat(streamFormat);
+    pSrcStream->getFrameDimension(streamDim);
+    pSrcStream->getFrameOffset(frameOffset);
+    reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE;
+
+    reprocess_config.offline.input_fmt = streamFormat;
+    reprocess_config.offline.input_dim = streamDim;
+    reprocess_config.offline.input_buf_planes.plane_info = frameOffset;
+    reprocess_config.offline.num_of_bufs = num_buffers;
+    reprocess_config.offline.input_stream_type = pSrcStream->getMyType();
+
+
+    reprocess_config.pp_feature_config = pp_config;
+    mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle();
+
+    // pp feature config
+    if (pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
+        if (pp_config.rotation == ROTATE_90 ||
+            pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            int32_t temp = streamDim.height;
+            streamDim.height = streamDim.width;
+            streamDim.width = temp;
+        }
+    }
+
+    QCamera3Stream *pStream = new QCamera3Stream(m_camHandle,
+                                               m_handle,
+                                               m_camOps,
+                                               mPaddingInfo,
+                                               (QCamera3Channel*)this);
+    if (pStream == NULL) {
+        ALOGE("%s: No mem for Stream", __func__);
+        return NO_MEMORY;
+    }
+
+    rc = pStream->init(streamType, streamFormat, streamDim, &reprocess_config,
+                       num_buffers,QCamera3Channel::streamCbRoutine, this);
+
+
+    if (rc == 0) {
+        mStreams[m_numStreams] = pStream;
+        m_numStreams++;
+    } else {
+        ALOGE("%s: failed to create reprocess stream", __func__);
+        delete pStream;
+    }
+
+    if (rc == NO_ERROR) {
+        m_pSrcChannel = pSrcChannel;
+        m_pMetaChannel = pMetaChannel;
+    }
+    if(m_camOps->request_super_buf(m_camHandle,m_handle,1) < 0) {
+        ALOGE("%s: Request for super buffer failed",__func__);
+    }
+    return rc;
+}
+
+cam_dimension_t QCamera3SupportChannel::kDim = {640, 480};
+
+QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData) :
+                        QCamera3Channel(cam_handle, cam_ops,
+                                NULL, paddingInfo, userData),
+                        mMemory(NULL)
+{
+}
+
+QCamera3SupportChannel::~QCamera3SupportChannel()
+{
+    if (m_bIsActive)
+        stop();
+
+    if (mMemory) {
+        mMemory->deallocate();
+        delete mMemory;
+        mMemory = NULL;
+    }
+}
+
+int32_t QCamera3SupportChannel::initialize()
+{
+    int32_t rc;
+
+    if (mMemory || m_numStreams > 0) {
+        ALOGE("%s: metadata channel already initialized", __func__);
+        return -EINVAL;
+    }
+
+    rc = init(NULL, NULL);
+    if (rc < 0) {
+        ALOGE("%s: init failed", __func__);
+        return rc;
+    }
+
+    // Hardcode to VGA size for now
+    rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_CALLBACK,
+        CAM_FORMAT_YUV_420_NV21, kDim, MIN_STREAMING_BUFFER_NUM);
+    if (rc < 0) {
+        ALOGE("%s: addStream failed", __func__);
+    }
+    return rc;
+}
+
+int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/,
+                                                uint32_t /*frameNumber*/)
+{
+    return NO_ERROR;
+}
+
+void QCamera3SupportChannel::streamCbRoutine(
+                        mm_camera_super_buf_t *super_frame,
+                        QCamera3Stream * /*stream*/)
+{
+    if (super_frame == NULL || super_frame->num_bufs != 1) {
+        ALOGE("%s: super_frame is not valid", __func__);
+        return;
+    }
+    bufDone(super_frame);
+    free(super_frame);
+}
+
+QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len)
+{
+    int rc;
+
+    mMemory = new QCamera3HeapMemory();
+    if (!mMemory) {
+        ALOGE("%s: unable to create heap memory", __func__);
+        return NULL;
+    }
+    rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true);
+    if (rc < 0) {
+        ALOGE("%s: unable to allocate heap memory", __func__);
+        delete mMemory;
+        mMemory = NULL;
+        return NULL;
+    }
+    return mMemory;
+}
+
+void QCamera3SupportChannel::putStreamBufs()
+{
+    mMemory->deallocate();
+    delete mMemory;
+    mMemory = NULL;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3Channel.h b/camera/QCamera2/HAL3/QCamera3Channel.h
new file mode 100755
index 0000000..6832da1
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Channel.h
@@ -0,0 +1,375 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3_CHANNEL_H__
+#define __QCAMERA3_CHANNEL_H__
+
+#include <hardware/camera3.h>
+#include "QCamera3Stream.h"
+#include "QCamera3Mem.h"
+#include "QCamera3PostProc.h"
+#include "QCamera3HALHeader.h"
+#include "utils/Vector.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+typedef void (*channel_cb_routine)(mm_camera_super_buf_t *metadata,
+                                camera3_stream_buffer_t *buffer,
+                                uint32_t frame_number, void *userdata);
+
+class QCamera3Channel
+{
+public:
+    QCamera3Channel(uint32_t cam_handle,
+                   mm_camera_ops_t *cam_ops,
+                   channel_cb_routine cb_routine,
+                   cam_padding_info_t *paddingInfo,
+                   void *userData);
+    QCamera3Channel();
+    virtual ~QCamera3Channel();
+
+    int32_t addStream(cam_stream_type_t streamType,
+                              cam_format_t streamFormat,
+                              cam_dimension_t streamDim,
+                              uint8_t minStreamBufnum);
+    virtual int32_t start();
+    int32_t stop();
+    int32_t bufDone(mm_camera_super_buf_t *recvd_frame);
+
+    uint32_t getStreamTypeMask();
+    uint32_t getStreamID(uint32_t streamMask);
+    virtual int32_t initialize() = 0;
+    virtual int32_t request(buffer_handle_t * /*buffer*/,
+                uint32_t /*frameNumber*/){ return 0;};
+    virtual int32_t request(buffer_handle_t * /*buffer*/,
+                uint32_t /*frameNumber*/,
+                mm_camera_buf_def_t* /*pInputBuffer*/,
+                metadata_buffer_t* /*metadata*/){ return 0;};
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream) = 0;
+
+    virtual int32_t registerBuffer(buffer_handle_t *buffer) = 0;
+    virtual QCamera3Memory *getStreamBufs(uint32_t len) = 0;
+    virtual void putStreamBufs() = 0;
+
+    QCamera3Stream *getStreamByHandle(uint32_t streamHandle);
+    uint32_t getMyHandle() const {return m_handle;};
+    uint8_t getNumOfStreams() const {return m_numStreams;};
+    QCamera3Stream *getStreamByIndex(uint8_t index);
+
+    static void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                QCamera3Stream *stream, void *userdata);
+    void *mUserData;
+    cam_padding_info_t *mPaddingInfo;
+    QCamera3Stream *mStreams[MAX_STREAM_NUM_IN_BUNDLE];
+    uint8_t m_numStreams;
+protected:
+
+   virtual int32_t init(mm_camera_channel_attr_t *attr,
+                         mm_camera_buf_notify_t dataCB);
+    int32_t allocateStreamInfoBuf(camera3_stream_t *stream);
+
+    uint32_t m_camHandle;
+    mm_camera_ops_t *m_camOps;
+    bool m_bIsActive;
+
+    uint32_t m_handle;
+
+
+    mm_camera_buf_notify_t mDataCB;
+
+
+    QCamera3HeapMemory *mStreamInfoBuf;
+    channel_cb_routine mChannelCB;
+    //cam_padding_info_t *mPaddingInfo;
+};
+
+/* QCamera3RegularChannel is used to handle all streams that are directly
+ * generated by hardware and given to frameworks without any postprocessing at HAL.
+ * Examples are: all IMPLEMENTATION_DEFINED streams, CPU_READ streams. */
+class QCamera3RegularChannel : public QCamera3Channel
+{
+public:
+    QCamera3RegularChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    cam_stream_type_t stream_type);
+    virtual ~QCamera3RegularChannel();
+
+    virtual int32_t start();
+    virtual int32_t initialize();
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                                            QCamera3Stream *stream);
+
+    virtual QCamera3Memory *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    mm_camera_buf_def_t* getInternalFormatBuffer(buffer_handle_t* buffer);
+    virtual int32_t registerBuffer(buffer_handle_t *buffer);
+
+public:
+    static int kMaxBuffers;
+protected:
+    QCamera3GrallocMemory mMemory;
+private:
+    camera3_stream_t *mCamera3Stream;
+    uint32_t mNumBufs;
+
+    cam_stream_type_t mStreamType; // Stream type
+};
+
+/* QCamera3MetadataChannel is for metadata stream generated by camera daemon. */
+class QCamera3MetadataChannel : public QCamera3Channel
+{
+public:
+    QCamera3MetadataChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData);
+    virtual ~QCamera3MetadataChannel();
+
+    virtual int32_t initialize();
+
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual QCamera3Memory *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/)
+            { return NO_ERROR; };
+
+private:
+    QCamera3HeapMemory *mMemory;
+};
+
+/* QCameraRawChannel is for Dumping raw stream generated by camera daemon. */
+class QCameraRawChannel : public QCamera3Channel
+{
+public:
+    QCameraRawChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    cam_dimension_t *raw_dim);
+    virtual ~QCameraRawChannel();
+
+    virtual int32_t initialize();
+
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual int32_t registerBuffers(uint32_t num_buffers,
+                buffer_handle_t **buffers);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual QCamera3Memory *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    void dumpRawSnapshot(mm_camera_buf_def_t *frame);
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/)
+            { return NO_ERROR; };
+
+private:
+    QCamera3HeapMemory *mMemory;
+    uint32_t mWidth, mHeight;
+    uint32_t mMaxBuffers;
+};
+
+/* QCamera3RawChannel is for opaqueu/cross-platform raw stream containing
+ * vendor specific bayer data or 16-bit unpacked bayer data */
+class QCamera3RawChannel : public QCamera3RegularChannel
+{
+public:
+    QCamera3RawChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    channel_cb_routine cb_routine,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData,
+                    camera3_stream_t *stream,
+                    bool raw_16 = false);
+    virtual ~QCamera3RawChannel();
+
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+
+public:
+    static int kMaxBuffers;
+
+private:
+    bool mRawDump;
+    bool mIsRaw16;
+
+    void dumpRawSnapshot(mm_camera_buf_def_t *frame);
+    void convertToRaw16(mm_camera_buf_def_t *frame);
+};
+
+/* QCamera3PicChannel is for JPEG stream, which contains a YUV stream generated
+ * by the hardware, and encoded to a JPEG stream */
+class QCamera3PicChannel : public QCamera3Channel
+{
+public:
+    QCamera3PicChannel(uint32_t cam_handle,
+            mm_camera_ops_t *cam_ops,
+            channel_cb_routine cb_routine,
+            cam_padding_info_t *paddingInfo,
+            void *userData,
+            camera3_stream_t *stream);
+    ~QCamera3PicChannel();
+
+    virtual int32_t initialize();
+
+    virtual int32_t request(buffer_handle_t *buffer,
+            uint32_t frameNumber,
+            mm_camera_buf_def_t* pInputBuffer,
+            metadata_buffer_t* metadata);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+            QCamera3Stream *stream);
+
+    virtual QCamera3Memory *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+
+    bool isWNREnabled() {return m_bWNROn;};
+    bool needOnlineRotation();
+    QCamera3Exif *getExifData(metadata_buffer_t *metadata,
+            jpeg_settings_t *jpeg_settings);
+    void overrideYuvSize(uint32_t width, uint32_t height);
+    static void jpegEvtHandle(jpeg_job_status_t status,
+            uint32_t /*client_hdl*/,
+            uint32_t jobId,
+            mm_jpeg_output_t *p_output,
+            void *userdata);
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+            void *userdata);
+    int32_t queueReprocMetadata(metadata_buffer_t *metadata);
+    virtual int32_t registerBuffer(buffer_handle_t *buffer);
+
+private:
+    int32_t queueJpegSetting(int32_t out_buf_index, metadata_buffer_t *metadata);
+
+public:
+    static int kMaxBuffers;
+    QCamera3PostProcessor m_postprocessor; // post processor
+private:
+    camera3_stream_t *mCamera3Stream;
+    uint32_t mNumBufs;
+    int32_t mCurrentBufIndex;
+    bool m_bWNROn;
+    uint32_t mYuvWidth, mYuvHeight;
+
+    QCamera3GrallocMemory mMemory;
+    QCamera3HeapMemory *mYuvMemory;
+    QCamera3Channel *m_pMetaChannel;
+    mm_camera_super_buf_t *mMetaFrame;
+
+};
+
+// reprocess channel class
+class QCamera3ReprocessChannel : public QCamera3Channel
+{
+public:
+    QCamera3ReprocessChannel(uint32_t cam_handle,
+                            mm_camera_ops_t *cam_ops,
+                            channel_cb_routine cb_routine,
+                            cam_padding_info_t *paddingInfo,
+                            void *userData, void *ch_hdl);
+    QCamera3ReprocessChannel();
+    virtual ~QCamera3ReprocessChannel();
+    // online reprocess
+    int32_t doReprocess(mm_camera_super_buf_t *frame,
+                        mm_camera_super_buf_t *meta_frame);
+    int32_t doReprocessOffline(mm_camera_super_buf_t *frame,
+                        metadata_buffer_t *metadata);
+    // offline reprocess
+    int32_t doReprocess(int buf_fd, uint32_t buf_length, int32_t &ret_val,
+                        mm_camera_super_buf_t *meta_buf);
+    virtual QCamera3Memory *getStreamBufs(uint32_t len);
+    virtual void putStreamBufs();
+    virtual int32_t initialize();
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                       void* userdata);
+    int32_t addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config,
+                                       QCamera3Channel *pSrcChannel,
+                                       QCamera3Channel *pMetaChannel);
+    QCamera3Stream *getStreamBySrcHandle(uint32_t srcHandle);
+    QCamera3Stream *getSrcStreamBySrcHandle(uint32_t srcHandle);
+    int32_t metadataBufDone(mm_camera_super_buf_t *recvd_frame);
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/)
+            { return NO_ERROR; };
+
+public:
+    void *picChHandle;
+private:
+    uint32_t mSrcStreamHandles[MAX_STREAM_NUM_IN_BUNDLE];
+    QCamera3Channel *m_pSrcChannel; // ptr to source channel for reprocess
+    QCamera3Channel *m_pMetaChannel;
+    QCamera3HeapMemory *mMemory;
+};
+
+/* QCamera3SupportChannel is for HAL internal consumption only */
+class QCamera3SupportChannel : public QCamera3Channel
+{
+public:
+    QCamera3SupportChannel(uint32_t cam_handle,
+                    mm_camera_ops_t *cam_ops,
+                    cam_padding_info_t *paddingInfo,
+                    void *userData);
+    virtual ~QCamera3SupportChannel();
+
+    virtual int32_t initialize();
+
+    virtual int32_t request(buffer_handle_t *buffer, uint32_t frameNumber);
+    virtual void streamCbRoutine(mm_camera_super_buf_t *super_frame,
+                            QCamera3Stream *stream);
+
+    virtual QCamera3Memory *getStreamBufs(uint32_t le);
+    virtual void putStreamBufs();
+    virtual int32_t registerBuffer(buffer_handle_t * /*buffer*/)
+            { return NO_ERROR; };
+
+    static cam_dimension_t kDim;
+private:
+    QCamera3HeapMemory *mMemory;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CHANNEL_H__ */
diff --git a/camera/QCamera2/HAL3/QCamera3Factory.cpp b/camera/QCamera2/HAL3/QCamera3Factory.cpp
new file mode 100644
index 0000000..65fe67b
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Factory.cpp
@@ -0,0 +1,288 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3Factory"
+//#define LOG_NDEBUG 0
+
+#include <stdlib.h>
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <hardware/camera3.h>
+
+#include "QCamera3Factory.h"
+
+using namespace android;
+
+namespace qcamera {
+
+QCamera3Factory *gQCamera3Factory = NULL;
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Factory
+ *
+ * DESCRIPTION: default constructor of QCamera3Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Factory::QCamera3Factory()
+{
+    camera_info info;
+
+    mCallbacks = NULL;
+    mNumOfCameras = get_num_of_cameras();
+
+    //Query camera at this point in order
+    //to avoid any delays during subsequent
+    //calls to 'getCameraInfo()'
+    for (int i = 0 ; i < mNumOfCameras ; i++) {
+        getCameraInfo(i, &info);
+    }
+    //
+
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Factory
+ *
+ * DESCRIPTION: deconstructor of QCamera2Factory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Factory::~QCamera3Factory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : get_number_of_cameras
+ *
+ * DESCRIPTION: static function to query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera3Factory::get_number_of_cameras()
+{
+    if (!gQCamera3Factory) {
+        gQCamera3Factory = new QCamera3Factory();
+        if (!gQCamera3Factory) {
+            ALOGE("%s: Failed to allocate Camera3Factory object", __func__);
+            return 0;
+        }
+    }
+    return gQCamera3Factory->getNumberOfCameras();
+}
+
+/*===========================================================================
+ * FUNCTION   : get_camera_info
+ *
+ * DESCRIPTION: static function to query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::get_camera_info(int camera_id, struct camera_info *info)
+{
+    return gQCamera3Factory->getCameraInfo(camera_id, info);
+}
+
+/*===========================================================================
+ * FUNCTION   : set_callbacks
+ *
+ * DESCRIPTION: static function to set callbacks function to camera module
+ *
+ * PARAMETERS :
+ *   @callbacks : ptr to callback functions
+ *
+ * RETURN     : NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::set_callbacks(const camera_module_callbacks_t *callbacks)
+{
+    return gQCamera3Factory->setCallbacks(callbacks);
+}
+
+/*===========================================================================
+ * FUNCTION   : open_legacy
+ *
+ * DESCRIPTION: Function to open older hal version implementation
+ *
+ * PARAMETERS :
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *   @camera_id : camera ID
+ *   @halVersion: Based on camera_module_t.common.module_api_version
+ *
+ * RETURN     : 0  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::open_legacy(const struct hw_module_t* module,
+            const char* id, uint32_t halVersion, struct hw_device_t** device)
+{
+    return -ENOSYS;
+}
+
+/*===========================================================================
+ * FUNCTION   : getNumberOfCameras
+ *
+ * DESCRIPTION: query number of cameras detected
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of cameras detected
+ *==========================================================================*/
+int QCamera3Factory::getNumberOfCameras()
+{
+    return mNumOfCameras;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCameraInfo
+ *
+ * DESCRIPTION: query camera information with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @info      : ptr to camera info struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::getCameraInfo(int camera_id, struct camera_info *info)
+{
+    int rc;
+    ALOGV("%s: E, camera_id = %d", __func__, camera_id);
+
+    if (!mNumOfCameras || camera_id >= mNumOfCameras || !info ||
+        (camera_id < 0)) {
+        return -ENODEV;
+    }
+
+    rc = QCamera3HardwareInterface::getCamInfo(camera_id, info);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setCallbacks
+ *
+ * DESCRIPTION: set callback functions to send asynchronous notifications to
+ *              frameworks.
+ *
+ * PARAMETERS :
+ *   @callbacks : callback function pointer
+ *
+ * RETURN     :
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::setCallbacks(const camera_module_callbacks_t *callbacks)
+{
+    int rc = NO_ERROR;
+    mCallbacks = callbacks;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : cameraDeviceOpen
+ *
+ * DESCRIPTION: open a camera device with its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::cameraDeviceOpen(int camera_id,
+                    struct hw_device_t **hw_device)
+{
+    int rc = NO_ERROR;
+    if (camera_id < 0 || camera_id >= mNumOfCameras)
+        return -ENODEV;
+
+    QCamera3HardwareInterface *hw = new QCamera3HardwareInterface(
+            camera_id, mCallbacks);
+    if (!hw) {
+        ALOGE("Allocation of hardware interface failed");
+        return NO_MEMORY;
+    }
+    rc = hw->openCamera(hw_device);
+    if (rc != 0) {
+        delete hw;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : camera_device_open
+ *
+ * DESCRIPTION: static function to open a camera device by its ID
+ *
+ * PARAMETERS :
+ *   @camera_id : camera ID
+ *   @hw_device : ptr to struct storing camera hardware device info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Factory::camera_device_open(
+    const struct hw_module_t *module, const char *id,
+    struct hw_device_t **hw_device)
+{
+    if (module != &HAL_MODULE_INFO_SYM.common) {
+        ALOGE("Invalid module. Trying to open %p, expect %p",
+            module, &HAL_MODULE_INFO_SYM.common);
+        return INVALID_OPERATION;
+    }
+    if (!id) {
+        ALOGE("Invalid camera id");
+        return BAD_VALUE;
+    }
+    return gQCamera3Factory->cameraDeviceOpen(atoi(id), hw_device);
+}
+
+struct hw_module_methods_t QCamera3Factory::mModuleMethods = {
+    open: QCamera3Factory::camera_device_open,
+};
+
+}; // namespace qcamera
+
diff --git a/camera/QCamera2/HAL3/QCamera3Factory.h b/camera/QCamera2/HAL3/QCamera3Factory.h
new file mode 100644
index 0000000..e751a2b
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Factory.h
@@ -0,0 +1,71 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3FACTORY_H__
+#define __QCAMERA3FACTORY_H__
+
+#include <hardware/camera3.h>
+
+#include "QCamera3HWI.h"
+
+namespace qcamera {
+
+class QCamera3Factory
+{
+public:
+    QCamera3Factory();
+    virtual ~QCamera3Factory();
+
+    static int get_number_of_cameras();
+    static int get_camera_info(int camera_id, struct camera_info *info);
+    static int set_callbacks(const camera_module_callbacks_t *callbacks);
+    static int open_legacy(const struct hw_module_t* module,
+            const char* id, uint32_t halVersion, struct hw_device_t** device);
+
+private:
+    int getNumberOfCameras();
+    int getCameraInfo(int camera_id, struct camera_info *info);
+    int setCallbacks(const camera_module_callbacks_t *callbacks);
+    int cameraDeviceOpen(int camera_id, struct hw_device_t **hw_device);
+    static int camera_device_open(const struct hw_module_t *module, const char *id,
+                struct hw_device_t **hw_device);
+
+public:
+    static struct hw_module_methods_t mModuleMethods;
+
+private:
+    int mNumOfCameras;
+    const camera_module_callbacks_t *mCallbacks;
+};
+
+}; /*namespace qcamera*/
+
+extern camera_module_t HAL_MODULE_INFO_SYM;
+
+#endif /* ANDROID_HARDWARE_QUALCOMM_CAMERA_H */
diff --git a/camera/QCamera2/HAL3/QCamera3HALHeader.h b/camera/QCamera2/HAL3/QCamera3HALHeader.h
new file mode 100644
index 0000000..a105d0e
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3HALHeader.h
@@ -0,0 +1,64 @@
+/* Copyright (c) 2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*	notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*	copyright notice, this list of conditions and the following
+*	disclaimer in the documentation and/or other materials provided
+*	with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*	contributors may be used to endorse or promote products derived
+*	from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+#ifndef __QCAMERA_HALHEADER_H__
+#define __QCAMERA_HALHEADER_H__
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+class QCamera3Channel;
+
+    typedef enum {
+        INVALID,
+        VALID,
+    } stream_status_t;
+
+    typedef struct {
+        int32_t out_buf_index;
+        int32_t jpeg_orientation;
+        uint8_t jpeg_quality;
+        uint8_t jpeg_thumb_quality;
+        cam_dimension_t thumbnail_size;
+        uint8_t gps_timestamp_valid;
+        int64_t gps_timestamp;
+        uint8_t gps_coordinates_valid;
+        double gps_coordinates[3];
+        char gps_processing_method[GPS_PROCESSING_METHOD_SIZE];
+    } jpeg_settings_t;
+
+ };//namespace qcamera
+
+
+#endif
diff --git a/camera/QCamera2/HAL3/QCamera3HWI.cpp b/camera/QCamera2/HAL3/QCamera3HWI.cpp
new file mode 100644
index 0000000..0102905
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3HWI.cpp
@@ -0,0 +1,5775 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3HWI"
+//#define LOG_NDEBUG 0
+
+#define __STDC_LIMIT_MACROS
+#include <cutils/properties.h>
+#include <hardware/camera3.h>
+#include <camera/CameraMetadata.h>
+#include <stdlib.h>
+#include <fcntl.h>
+#include <stdint.h>
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <ui/Fence.h>
+#include <gralloc_priv.h>
+#include "QCamera3HWI.h"
+#include "QCamera3Mem.h"
+#include "QCamera3Channel.h"
+#include "QCamera3PostProc.h"
+#include "QCamera3VendorTags.h"
+
+using namespace android;
+
+namespace qcamera {
+
+#define MAX(a, b) ((a) > (b) ? (a) : (b))
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+
+#define EMPTY_PIPELINE_DELAY 2
+
+cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
+const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
+
+pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock =
+    PTHREAD_MUTEX_INITIALIZER;
+unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0;
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
+    { ANDROID_CONTROL_EFFECT_MODE_OFF,       CAM_EFFECT_MODE_OFF },
+    { ANDROID_CONTROL_EFFECT_MODE_MONO,       CAM_EFFECT_MODE_MONO },
+    { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE,   CAM_EFFECT_MODE_NEGATIVE },
+    { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE,   CAM_EFFECT_MODE_SOLARIZE },
+    { ANDROID_CONTROL_EFFECT_MODE_SEPIA,      CAM_EFFECT_MODE_SEPIA },
+    { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE,  CAM_EFFECT_MODE_POSTERIZE },
+    { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
+    { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
+    { ANDROID_CONTROL_EFFECT_MODE_AQUA,       CAM_EFFECT_MODE_AQUA }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
+    { ANDROID_CONTROL_AWB_MODE_OFF,             CAM_WB_MODE_OFF },
+    { ANDROID_CONTROL_AWB_MODE_AUTO,            CAM_WB_MODE_AUTO },
+    { ANDROID_CONTROL_AWB_MODE_INCANDESCENT,    CAM_WB_MODE_INCANDESCENT },
+    { ANDROID_CONTROL_AWB_MODE_FLUORESCENT,     CAM_WB_MODE_FLUORESCENT },
+    { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
+    { ANDROID_CONTROL_AWB_MODE_DAYLIGHT,        CAM_WB_MODE_DAYLIGHT },
+    { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
+    { ANDROID_CONTROL_AWB_MODE_TWILIGHT,        CAM_WB_MODE_TWILIGHT },
+    { ANDROID_CONTROL_AWB_MODE_SHADE,           CAM_WB_MODE_SHADE }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
+    { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY,  CAM_SCENE_MODE_OFF },
+    { ANDROID_CONTROL_SCENE_MODE_ACTION,         CAM_SCENE_MODE_ACTION },
+    { ANDROID_CONTROL_SCENE_MODE_PORTRAIT,       CAM_SCENE_MODE_PORTRAIT },
+    { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE,      CAM_SCENE_MODE_LANDSCAPE },
+    { ANDROID_CONTROL_SCENE_MODE_NIGHT,          CAM_SCENE_MODE_NIGHT },
+    { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
+    { ANDROID_CONTROL_SCENE_MODE_THEATRE,        CAM_SCENE_MODE_THEATRE },
+    { ANDROID_CONTROL_SCENE_MODE_BEACH,          CAM_SCENE_MODE_BEACH },
+    { ANDROID_CONTROL_SCENE_MODE_SNOW,           CAM_SCENE_MODE_SNOW },
+    { ANDROID_CONTROL_SCENE_MODE_SUNSET,         CAM_SCENE_MODE_SUNSET },
+    { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO,    CAM_SCENE_MODE_ANTISHAKE },
+    { ANDROID_CONTROL_SCENE_MODE_FIREWORKS ,     CAM_SCENE_MODE_FIREWORKS },
+    { ANDROID_CONTROL_SCENE_MODE_SPORTS ,        CAM_SCENE_MODE_SPORTS },
+    { ANDROID_CONTROL_SCENE_MODE_PARTY,          CAM_SCENE_MODE_PARTY },
+    { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT,    CAM_SCENE_MODE_CANDLELIGHT },
+    { ANDROID_CONTROL_SCENE_MODE_BARCODE,        CAM_SCENE_MODE_BARCODE}
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
+    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_OFF },
+    { ANDROID_CONTROL_AF_MODE_OFF,                CAM_FOCUS_MODE_FIXED },
+    { ANDROID_CONTROL_AF_MODE_AUTO,               CAM_FOCUS_MODE_AUTO },
+    { ANDROID_CONTROL_AF_MODE_MACRO,              CAM_FOCUS_MODE_MACRO },
+    { ANDROID_CONTROL_AF_MODE_EDOF,               CAM_FOCUS_MODE_EDOF },
+    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
+    { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO,   CAM_FOCUS_MODE_CONTINOUS_VIDEO }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,  CAM_ANTIBANDING_MODE_OFF },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
+    { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
+    { ANDROID_CONTROL_AE_MODE_OFF,                  CAM_FLASH_MODE_OFF },
+    { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
+    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
+    { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
+    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
+    { ANDROID_FLASH_MODE_OFF,    CAM_FLASH_MODE_OFF  },
+    { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
+    { ANDROID_FLASH_MODE_TORCH,  CAM_FLASH_MODE_TORCH }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
+    { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,    CAM_FACE_DETECT_MODE_OFF     },
+    { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL,   CAM_FACE_DETECT_MODE_FULL    }
+};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
+      CAM_FOCUS_UNCALIBRATED },
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
+      CAM_FOCUS_APPROXIMATE },
+    { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
+      CAM_FOCUS_CALIBRATED }
+};
+
+const int32_t available_thumbnail_sizes[] = {0, 0,
+                                             176, 144,
+                                             320, 240,
+                                             432, 288,
+                                             480, 288,
+                                             512, 288,
+                                             512, 384};
+
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF,          CAM_TEST_PATTERN_OFF   },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR,  CAM_TEST_PATTERN_SOLID_COLOR },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS,   CAM_TEST_PATTERN_COLOR_BARS },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
+    { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9,          CAM_TEST_PATTERN_PN9 },
+};
+
+/* Since there is no mapping for all the options some Android enum are not listed.
+ * Also, the order in this list is important because while mapping from HAL to Android it will
+ * traverse from lower to higher index which means that for HAL values that are map to different
+ * Android values, the traverse logic will select the first one found.
+ */
+const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
+    { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
+};
+
+/* Custom tag definitions */
+
+camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
+    initialize:                         QCamera3HardwareInterface::initialize,
+    configure_streams:                  QCamera3HardwareInterface::configure_streams,
+    register_stream_buffers:            NULL,
+    construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
+    process_capture_request:            QCamera3HardwareInterface::process_capture_request,
+    get_metadata_vendor_tag_ops:        NULL,
+    dump:                               QCamera3HardwareInterface::dump,
+    flush:                              QCamera3HardwareInterface::flush,
+    reserved:                           {0},
+};
+
+int QCamera3HardwareInterface::kMaxInFlight = 5;
+
+/*===========================================================================
+ * FUNCTION   : QCamera3HardwareInterface
+ *
+ * DESCRIPTION: constructor of QCamera3HardwareInterface
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera ID
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
+                        const camera_module_callbacks_t *callbacks)
+    : mCameraId(cameraId),
+      mCameraHandle(NULL),
+      mCameraOpened(false),
+      mCameraInitialized(false),
+      mCallbackOps(NULL),
+      mInputStream(NULL),
+      mMetadataChannel(NULL),
+      mPictureChannel(NULL),
+      mRawChannel(NULL),
+      mSupportChannel(NULL),
+      mFirstRequest(false),
+      mRepeatingRequest(false),
+      mParamHeap(NULL),
+      mParameters(NULL),
+      mPrevParameters(NULL),
+      mLoopBackResult(NULL),
+      mMinProcessedFrameDuration(0),
+      mMinJpegFrameDuration(0),
+      mMinRawFrameDuration(0),
+      m_pPowerModule(NULL),
+      mHdrHint(false),
+      mMetaFrameCount(0),
+      mCallbacks(callbacks)
+{
+    mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
+    mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
+    mCameraDevice.common.close = close_camera_device;
+    mCameraDevice.ops = &mCameraOps;
+    mCameraDevice.priv = this;
+    gCamCapability[cameraId]->version = CAM_HAL_V3;
+    // TODO: hardcode for now until mctl add support for min_num_pp_bufs
+    //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
+    gCamCapability[cameraId]->min_num_pp_bufs = 3;
+
+    pthread_cond_init(&mRequestCond, NULL);
+    mPendingRequest = 0;
+    mCurrentRequestId = -1;
+    pthread_mutex_init(&mMutex, NULL);
+
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
+        mDefaultMetadata[i] = NULL;
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
+        ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
+    }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3HardwareInterface
+ *
+ * DESCRIPTION: destructor of QCamera3HardwareInterface
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HardwareInterface::~QCamera3HardwareInterface()
+{
+    ALOGV("%s: E", __func__);
+    /* We need to stop all streams before deleting any stream */
+
+    // NOTE: 'camera3_stream_t *' objects are already freed at
+    //        this stage by the framework
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (*it)->channel;
+        if (channel) {
+            channel->stop();
+        }
+    }
+    if (mSupportChannel)
+        mSupportChannel->stop();
+
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (*it)->channel;
+        if (channel)
+            delete channel;
+        free (*it);
+    }
+    if (mSupportChannel) {
+        delete mSupportChannel;
+        mSupportChannel = NULL;
+    }
+
+    mPictureChannel = NULL;
+
+    /* Clean up all channels */
+    if (mCameraInitialized) {
+        if (mMetadataChannel) {
+            mMetadataChannel->stop();
+            delete mMetadataChannel;
+            mMetadataChannel = NULL;
+        }
+        deinitParameters();
+    }
+
+    if (mCameraOpened)
+        closeCamera();
+
+    mPendingBuffersMap.mPendingBufferList.clear();
+    mPendingRequestsList.clear();
+
+    for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
+        if (mDefaultMetadata[i])
+            free_camera_metadata(mDefaultMetadata[i]);
+
+    pthread_cond_destroy(&mRequestCond);
+
+    pthread_mutex_destroy(&mMutex);
+    ALOGV("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS :
+ *   @hw_device  : double ptr for camera device struct
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
+{
+    int rc = 0;
+    pthread_mutex_lock(&mCameraSessionLock);
+    if (mCameraSessionActive) {
+        ALOGE("%s: multiple simultaneous camera instance not supported", __func__);
+        pthread_mutex_unlock(&mCameraSessionLock);
+        return -EUSERS;
+    }
+
+    if (mCameraOpened) {
+        *hw_device = NULL;
+        return PERMISSION_DENIED;
+    }
+
+    rc = openCamera();
+    if (rc == 0) {
+        *hw_device = &mCameraDevice.common;
+        mCameraSessionActive = 1;
+    } else
+        *hw_device = NULL;
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (rc == 0) {
+        if (m_pPowerModule) {
+            if (m_pPowerModule->powerHint) {
+                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
+                        (void *)"state=1");
+            }
+        }
+    }
+#endif
+    pthread_mutex_unlock(&mCameraSessionLock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : openCamera
+ *
+ * DESCRIPTION: open camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::openCamera()
+{
+    if (mCameraHandle) {
+        ALOGE("Failure: Camera already opened");
+        return ALREADY_EXISTS;
+    }
+    mCameraHandle = camera_open(mCameraId);
+    if (!mCameraHandle) {
+        ALOGE("camera_open failed.");
+        return UNKNOWN_ERROR;
+    }
+
+    mCameraOpened = true;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : closeCamera
+ *
+ * DESCRIPTION: close camera
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::closeCamera()
+{
+    int rc = NO_ERROR;
+
+    rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
+    mCameraHandle = NULL;
+    mCameraOpened = false;
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if (rc == NO_ERROR) {
+        if (m_pPowerModule) {
+            if (m_pPowerModule->powerHint) {
+                if(mHdrHint == true) {
+                    m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
+                            (void *)"state=3");
+                    mHdrHint = false;
+                }
+                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
+                        (void *)"state=0");
+            }
+        }
+    }
+#endif
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Initialize frameworks callback functions
+ *
+ * PARAMETERS :
+ *   @callback_ops : callback function to frameworks
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::initialize(
+        const struct camera3_callback_ops *callback_ops)
+{
+    int rc;
+
+    pthread_mutex_lock(&mMutex);
+
+    rc = initParameters();
+    if (rc < 0) {
+        ALOGE("%s: initParamters failed %d", __func__, rc);
+       goto err1;
+    }
+    mCallbackOps = callback_ops;
+
+    pthread_mutex_unlock(&mMutex);
+    mCameraInitialized = true;
+    return 0;
+
+err1:
+    pthread_mutex_unlock(&mMutex);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configureStreams
+ *
+ * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
+ *              and output streams.
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::configureStreams(
+        camera3_stream_configuration_t *streamList)
+{
+    int rc = 0;
+
+    // Sanity check stream_list
+    if (streamList == NULL) {
+        ALOGE("%s: NULL stream configuration", __func__);
+        return BAD_VALUE;
+    }
+    if (streamList->streams == NULL) {
+        ALOGE("%s: NULL stream list", __func__);
+        return BAD_VALUE;
+    }
+
+    if (streamList->num_streams < 1) {
+        ALOGE("%s: Bad number of streams requested: %d", __func__,
+                streamList->num_streams);
+        return BAD_VALUE;
+    }
+
+    /* first invalidate all the steams in the mStreamList
+     * if they appear again, they will be validated */
+    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
+        channel->stop();
+        (*it)->status = INVALID;
+    }
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        mMetadataChannel->stop();
+    }
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    if(mHdrHint == true) {
+        if (m_pPowerModule) {
+            if (m_pPowerModule->powerHint) {
+                m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
+                        (void *)"state=3");
+                mHdrHint = false;
+            }
+        }
+    }
+#endif
+
+    pthread_mutex_lock(&mMutex);
+
+    bool isZsl = false;
+    camera3_stream_t *inputStream = NULL;
+    camera3_stream_t *jpegStream = NULL;
+    cam_stream_size_info_t stream_config_info;
+
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        ALOGD("%s: newStream type = %d, stream format = %d stream size : %d x %d",
+                __func__, newStream->stream_type, newStream->format,
+                 newStream->width, newStream->height);
+        //if the stream is in the mStreamList validate it
+        bool stream_exists = false;
+        for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+                it != mStreamInfo.end(); it++) {
+            if ((*it)->stream == newStream) {
+                QCamera3Channel *channel =
+                    (QCamera3Channel*)(*it)->stream->priv;
+                stream_exists = true;
+                delete channel;
+                (*it)->status = VALID;
+                (*it)->stream->priv = NULL;
+                (*it)->channel = NULL;
+            }
+        }
+        if (!stream_exists) {
+            //new stream
+            stream_info_t* stream_info;
+            stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
+            stream_info->stream = newStream;
+            stream_info->status = VALID;
+            stream_info->channel = NULL;
+            mStreamInfo.push_back(stream_info);
+        }
+        if (newStream->stream_type == CAMERA3_STREAM_INPUT
+                || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
+            if (inputStream != NULL) {
+                ALOGE("%s: Multiple input streams requested!", __func__);
+                pthread_mutex_unlock(&mMutex);
+                return BAD_VALUE;
+            }
+            inputStream = newStream;
+        }
+        if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
+            jpegStream = newStream;
+        }
+    }
+    mInputStream = inputStream;
+
+    cleanAndSortStreamInfo();
+    if (mMetadataChannel) {
+        delete mMetadataChannel;
+        mMetadataChannel = NULL;
+    }
+
+    //Create metadata channel and initialize it
+    mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
+                    mCameraHandle->ops, captureResultCb,
+                    &gCamCapability[mCameraId]->padding_info, this);
+    if (mMetadataChannel == NULL) {
+        ALOGE("%s: failed to allocate metadata channel", __func__);
+        rc = -ENOMEM;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+    rc = mMetadataChannel->initialize();
+    if (rc < 0) {
+        ALOGE("%s: metadata channel initialization failed", __func__);
+        delete mMetadataChannel;
+        mMetadataChannel = NULL;
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    /* Create dummy stream if there is one single raw stream */
+    if (streamList->num_streams == 1 &&
+            (streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+            streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16)) {
+        mSupportChannel = new QCamera3SupportChannel(
+                mCameraHandle->camera_handle,
+                mCameraHandle->ops,
+                &gCamCapability[mCameraId]->padding_info,
+                this);
+        if (!mSupportChannel) {
+            ALOGE("%s: dummy channel cannot be created", __func__);
+            pthread_mutex_unlock(&mMutex);
+            return -ENOMEM;
+        }
+
+        rc = mSupportChannel->initialize();
+        if (rc < 0) {
+            ALOGE("%s: dummy channel initialization failed", __func__);
+            delete mSupportChannel;
+            mSupportChannel = NULL;
+            delete mMetadataChannel;
+            mMetadataChannel = NULL;
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+    }
+
+    /* Allocate channel objects for the requested streams */
+    for (size_t i = 0; i < streamList->num_streams; i++) {
+        camera3_stream_t *newStream = streamList->streams[i];
+        uint32_t stream_usage = newStream->usage;
+        stream_config_info.stream_sizes[i].width = newStream->width;
+        stream_config_info.stream_sizes[i].height = newStream->height;
+        if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
+            newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
+            //for zsl stream the size is active array size
+            isZsl = true;
+            stream_config_info.stream_sizes[i].width =
+                    gCamCapability[mCameraId]->active_array_size.width;
+            stream_config_info.stream_sizes[i].height =
+                    gCamCapability[mCameraId]->active_array_size.height;
+            stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
+        } else {
+           //for non zsl streams find out the format
+           switch (newStream->format) {
+           case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
+              {
+                 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
+                    stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
+                 } else {
+                    stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
+                 }
+              }
+              break;
+           case HAL_PIXEL_FORMAT_YCbCr_420_888:
+              stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
+#ifdef HAS_MULTIMEDIA_HINTS
+              if (m_pPowerModule) {
+                  if (m_pPowerModule->powerHint) {
+                      m_pPowerModule->powerHint(m_pPowerModule,
+                          POWER_HINT_VIDEO_ENCODE, (void *)"state=2");
+                      mHdrHint = true;
+                  }
+              }
+#endif
+              break;
+           case HAL_PIXEL_FORMAT_BLOB:
+              stream_config_info.type[i] = CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT;
+              break;
+           case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+           case HAL_PIXEL_FORMAT_RAW16:
+              stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
+              break;
+           default:
+              stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
+              break;
+           }
+        }
+        if (newStream->priv == NULL) {
+            //New stream, construct channel
+            switch (newStream->stream_type) {
+            case CAMERA3_STREAM_INPUT:
+                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
+                break;
+            case CAMERA3_STREAM_BIDIRECTIONAL:
+                newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
+                    GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            case CAMERA3_STREAM_OUTPUT:
+                /* For video encoding stream, set read/write rarely
+                 * flag so that they may be set to un-cached */
+                if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
+                    newStream->usage =
+                         (GRALLOC_USAGE_SW_READ_RARELY |
+                         GRALLOC_USAGE_SW_WRITE_RARELY |
+                         GRALLOC_USAGE_HW_CAMERA_WRITE);
+                else
+                    newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
+                break;
+            default:
+                ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
+                break;
+            }
+
+            if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
+                    newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
+                QCamera3Channel *channel = NULL;
+                switch (newStream->format) {
+                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                    newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
+                    channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
+                            mCameraHandle->ops, captureResultCb,
+                            &gCamCapability[mCameraId]->padding_info,
+                            this,
+                            newStream,
+                            (cam_stream_type_t) stream_config_info.type[i]);
+                    if (channel == NULL) {
+                        ALOGE("%s: allocation of channel failed", __func__);
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+
+                    newStream->priv = channel;
+                    break;
+                case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+                case HAL_PIXEL_FORMAT_RAW16:
+                    newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
+                    mRawChannel = new QCamera3RawChannel(
+                            mCameraHandle->camera_handle,
+                            mCameraHandle->ops, captureResultCb,
+                            &gCamCapability[mCameraId]->padding_info,
+                            this, newStream, (newStream->format == HAL_PIXEL_FORMAT_RAW16));
+                    if (mRawChannel == NULL) {
+                        ALOGE("%s: allocation of raw channel failed", __func__);
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+
+                    newStream->priv = (QCamera3Channel*)mRawChannel;
+                    break;
+                case HAL_PIXEL_FORMAT_BLOB:
+                    newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
+                    mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
+                            mCameraHandle->ops, captureResultCb,
+                            &gCamCapability[mCameraId]->padding_info, this, newStream);
+                    if (mPictureChannel == NULL) {
+                        ALOGE("%s: allocation of channel failed", __func__);
+                        pthread_mutex_unlock(&mMutex);
+                        return -ENOMEM;
+                    }
+                    newStream->priv = (QCamera3Channel*)mPictureChannel;
+                    break;
+
+                default:
+                    ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
+                    break;
+                }
+            }
+
+            for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+                    it != mStreamInfo.end(); it++) {
+                if ((*it)->stream == newStream) {
+                    (*it)->channel = (QCamera3Channel*) newStream->priv;
+                    break;
+                }
+            }
+        } else {
+            // Channel already exists for this stream
+            // Do nothing for now
+        }
+    }
+
+    if (isZsl)
+        mPictureChannel->overrideYuvSize(
+                gCamCapability[mCameraId]->active_array_size.width,
+                gCamCapability[mCameraId]->active_array_size.height);
+
+    int32_t hal_version = CAM_HAL_V3;
+    stream_config_info.num_streams = streamList->num_streams;
+    if (mSupportChannel) {
+        stream_config_info.stream_sizes[stream_config_info.num_streams] =
+                QCamera3SupportChannel::kDim;
+        stream_config_info.type[stream_config_info.num_streams] =
+                CAM_STREAM_TYPE_CALLBACK;
+        stream_config_info.num_streams++;
+    }
+
+    // settings/parameters don't carry over for new configureStreams
+    memset(mParameters, 0, sizeof(metadata_buffer_t));
+
+    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
+    AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
+                sizeof(hal_version), &hal_version);
+
+    AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
+                sizeof(stream_config_info), &stream_config_info);
+
+    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
+
+    /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
+    mPendingRequestsList.clear();
+    mPendingFrameDropList.clear();
+    // Initialize/Reset the pending buffers list
+    mPendingBuffersMap.num_buffers = 0;
+    mPendingBuffersMap.mPendingBufferList.clear();
+
+    mFirstRequest = true;
+
+    //Get min frame duration for this streams configuration
+    deriveMinFrameDuration();
+
+    pthread_mutex_unlock(&mMutex);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : validateCaptureRequest
+ *
+ * DESCRIPTION: validate a capture request from camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::validateCaptureRequest(
+                    camera3_capture_request_t *request)
+{
+    ssize_t idx = 0;
+    const camera3_stream_buffer_t *b;
+    CameraMetadata meta;
+
+    /* Sanity check the request */
+    if (request == NULL) {
+        ALOGE("%s: NULL capture request", __func__);
+        return BAD_VALUE;
+    }
+
+    if (request->settings == NULL && mFirstRequest) {
+        /*settings cannot be null for the first request*/
+        return BAD_VALUE;
+    }
+
+    uint32_t frameNumber = request->frame_number;
+    if (request->input_buffer != NULL &&
+            request->input_buffer->stream != mInputStream) {
+        ALOGE("%s: Request %d: Input buffer not from input stream!",
+                __FUNCTION__, frameNumber);
+        return BAD_VALUE;
+    }
+    if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
+        ALOGE("%s: Request %d: No output buffers provided!",
+                __FUNCTION__, frameNumber);
+        return BAD_VALUE;
+    }
+    if (request->input_buffer != NULL) {
+        b = request->input_buffer;
+        QCamera3Channel *channel =
+            static_cast<QCamera3Channel*>(b->stream->priv);
+        if (channel == NULL) {
+            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
+            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->release_fence != -1) {
+            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->buffer == NULL) {
+            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+    }
+
+    // Validate all buffers
+    b = request->output_buffers;
+    do {
+        QCamera3Channel *channel =
+                static_cast<QCamera3Channel*>(b->stream->priv);
+        if (channel == NULL) {
+            ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->status != CAMERA3_BUFFER_STATUS_OK) {
+            ALOGE("%s: Request %d: Buffer %d: Status not OK!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->release_fence != -1) {
+            ALOGE("%s: Request %d: Buffer %d: Has a release fence!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        if (b->buffer == NULL) {
+            ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!",
+                    __func__, frameNumber, idx);
+            return BAD_VALUE;
+        }
+        idx++;
+        b = request->output_buffers + idx;
+    } while (idx < (ssize_t)request->num_output_buffers);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deriveMinFrameDuration
+ *
+ * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
+ *              on currently configured streams.
+ *
+ * PARAMETERS : NONE
+ *
+ * RETURN     : NONE
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::deriveMinFrameDuration()
+{
+    int32_t maxJpegDim, maxProcessedDim, maxRawDim;
+
+    maxJpegDim = 0;
+    maxProcessedDim = 0;
+    maxRawDim = 0;
+
+    // Figure out maximum jpeg, processed, and raw dimensions
+    for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+
+        // Input stream doesn't have valid stream_type
+        if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
+            continue;
+
+        int32_t dimension = (*it)->stream->width * (*it)->stream->height;
+        if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            if (dimension > maxJpegDim)
+                maxJpegDim = dimension;
+        } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+                (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
+            if (dimension > maxRawDim)
+                maxRawDim = dimension;
+        } else {
+            if (dimension > maxProcessedDim)
+                maxProcessedDim = dimension;
+        }
+    }
+
+    //Assume all jpeg dimensions are in processed dimensions.
+    if (maxJpegDim > maxProcessedDim)
+        maxProcessedDim = maxJpegDim;
+    //Find the smallest raw dimension that is greater or equal to jpeg dimension
+    if (maxProcessedDim > maxRawDim) {
+        maxRawDim = INT32_MAX;
+        for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
+            i++) {
+
+            int32_t dimension =
+                gCamCapability[mCameraId]->raw_dim[i].width *
+                gCamCapability[mCameraId]->raw_dim[i].height;
+
+            if (dimension >= maxProcessedDim && dimension < maxRawDim)
+                maxRawDim = dimension;
+        }
+    }
+
+    //Find minimum durations for processed, jpeg, and raw
+    for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
+            i++) {
+        if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
+                gCamCapability[mCameraId]->raw_dim[i].height) {
+            mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
+            break;
+        }
+    }
+    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
+        if (maxProcessedDim ==
+            gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
+            gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
+            mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
+            mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMinFrameDuration
+ *
+ * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
+ *              and current request configuration.
+ *
+ * PARAMETERS : @request: requset sent by the frameworks
+ *
+ * RETURN     : min farme duration for a particular request
+ *
+ *==========================================================================*/
+int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
+{
+    bool hasJpegStream = false;
+    bool hasRawStream = false;
+    for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
+        const camera3_stream_t *stream = request->output_buffers[i].stream;
+        if (stream->format == HAL_PIXEL_FORMAT_BLOB)
+            hasJpegStream = true;
+        else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
+                stream->format == HAL_PIXEL_FORMAT_RAW16)
+            hasRawStream = true;
+    }
+
+    if (!hasJpegStream)
+        return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
+    else
+        return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
+}
+
+/*===========================================================================
+ * FUNCTION   : handleMetadataWithLock
+ *
+ * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @metadata_buf: metadata buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleMetadataWithLock(
+    mm_camera_super_buf_t *metadata_buf)
+{
+    metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
+    int32_t frame_number_valid = *(int32_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
+    uint32_t pending_requests = *(uint32_t *)POINTER_OF(
+        CAM_INTF_META_PENDING_REQUESTS, metadata);
+    uint32_t frame_number = *(uint32_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
+    const struct timeval *tv = (const struct timeval *)
+        POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
+    nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC +
+        tv->tv_usec * NSEC_PER_USEC;
+    cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
+        POINTER_OF(CAM_INTF_META_FRAME_DROPPED, metadata);
+
+    int32_t urgent_frame_number_valid = *(int32_t *)
+        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
+    uint32_t urgent_frame_number = *(uint32_t *)
+        POINTER_OF(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
+
+    if (urgent_frame_number_valid) {
+        ALOGV("%s: valid urgent frame_number = %d, capture_time = %lld",
+          __func__, urgent_frame_number, capture_time);
+
+        //Recieved an urgent Frame Number, handle it
+        //using HAL3.1 quirk for partial results
+        for (List<PendingRequestInfo>::iterator i =
+            mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
+            camera3_notify_msg_t notify_msg;
+            ALOGV("%s: Iterator Frame = %d urgent frame = %d",
+                __func__, i->frame_number, urgent_frame_number);
+
+            if (i->frame_number < urgent_frame_number &&
+                i->bNotified == 0) {
+                notify_msg.type = CAMERA3_MSG_SHUTTER;
+                notify_msg.message.shutter.frame_number = i->frame_number;
+                notify_msg.message.shutter.timestamp = capture_time -
+                    (urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+                i->timestamp = notify_msg.message.shutter.timestamp;
+                i->bNotified = 1;
+                ALOGV("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
+                    __func__, i->frame_number, notify_msg.message.shutter.timestamp);
+            }
+
+            if (i->frame_number == urgent_frame_number) {
+
+                camera3_capture_result_t result;
+                memset(&result, 0, sizeof(camera3_capture_result_t));
+
+                // Send shutter notify to frameworks
+                notify_msg.type = CAMERA3_MSG_SHUTTER;
+                notify_msg.message.shutter.frame_number = i->frame_number;
+                notify_msg.message.shutter.timestamp = capture_time;
+                mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+                i->timestamp = capture_time;
+                i->bNotified = 1;
+
+                // Extract 3A metadata
+                result.result =
+                    translateCbUrgentMetadataToResultMetadata(metadata);
+                // Populate metadata result
+                result.frame_number = urgent_frame_number;
+                result.num_output_buffers = 0;
+                result.output_buffers = NULL;
+                mCallbackOps->process_capture_result(mCallbackOps, &result);
+                ALOGV("%s: urgent frame_number = %d, capture_time = %lld",
+                     __func__, result.frame_number, capture_time);
+                free_camera_metadata((camera_metadata_t *)result.result);
+                break;
+            }
+        }
+    }
+
+    if (!frame_number_valid) {
+        ALOGV("%s: Not a valid normal frame number, used as SOF only", __func__);
+        mMetadataChannel->bufDone(metadata_buf);
+        free(metadata_buf);
+        goto done_metadata;
+    }
+    ALOGV("%s: valid normal frame_number = %d, capture_time = %lld", __func__,
+            frame_number, capture_time);
+
+    // Go through the pending requests info and send shutter/results to frameworks
+    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+        i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
+        camera3_capture_result_t result;
+        memset(&result, 0, sizeof(camera3_capture_result_t));
+        ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number);
+
+        // Flush out all entries with less or equal frame numbers.
+        mPendingRequest--;
+
+        // Check whether any stream buffer corresponding to this is dropped or not
+        // If dropped, then notify ERROR_BUFFER for the corresponding stream and
+        // buffer with CAMERA3_BUFFER_STATUS_ERROR
+        if (cam_frame_drop.frame_dropped) {
+            camera3_notify_msg_t notify_msg;
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+                QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
+                uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+                for (uint32_t k=0; k<cam_frame_drop.cam_stream_ID.num_streams; k++) {
+                  if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
+                      // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
+                      ALOGV("%s: Start of reporting error frame#=%d, streamID=%d",
+                             __func__, i->frame_number, streamID);
+                      notify_msg.type = CAMERA3_MSG_ERROR;
+                      notify_msg.message.error.frame_number = i->frame_number;
+                      notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
+                      notify_msg.message.error.error_stream = j->stream;
+                      mCallbackOps->notify(mCallbackOps, &notify_msg);
+                      ALOGV("%s: End of reporting error frame#=%d, streamID=%d",
+                             __func__, i->frame_number, streamID);
+                      PendingFrameDropInfo PendingFrameDrop;
+                      PendingFrameDrop.frame_number=i->frame_number;
+                      PendingFrameDrop.stream_ID = streamID;
+                      // Add the Frame drop info to mPendingFrameDropList
+                      mPendingFrameDropList.push_back(PendingFrameDrop);
+                  }
+                }
+            }
+        }
+
+        // Send empty metadata with already filled buffers for dropped metadata
+        // and send valid metadata with already filled buffers for current metadata
+        if (i->frame_number < frame_number) {
+            CameraMetadata dummyMetadata;
+            dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
+                    &i->timestamp, 1);
+            dummyMetadata.update(ANDROID_REQUEST_ID,
+                    &(i->request_id), 1);
+            result.result = dummyMetadata.release();
+        } else {
+            result.result = translateFromHalMetadata(metadata,
+                    i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth);
+
+            if (i->blob_request) {
+                {
+                    //Dump tuning metadata if enabled and available
+                    char prop[PROPERTY_VALUE_MAX];
+                    memset(prop, 0, sizeof(prop));
+                    property_get("persist.camera.dumpmetadata", prop, "0");
+                    int32_t enabled = atoi(prop);
+                    if (enabled && metadata->is_tuning_params_valid) {
+                        dumpMetadataToFile(metadata->tuning_params,
+                               mMetaFrameCount,
+                               enabled,
+                               "Snapshot",
+                               frame_number);
+                    }
+                }
+
+                //If it is a blob request then send the metadata to the picture channel
+                metadata_buffer_t *reproc_meta =
+                        (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
+                if (reproc_meta == NULL) {
+                    ALOGE("%s: Failed to allocate memory for reproc data.", __func__);
+                    goto done_metadata;
+                }
+                *reproc_meta = *metadata;
+                mPictureChannel->queueReprocMetadata(reproc_meta);
+            }
+            // Return metadata buffer
+            mMetadataChannel->bufDone(metadata_buf);
+            free(metadata_buf);
+        }
+        if (!result.result) {
+            ALOGE("%s: metadata is NULL", __func__);
+        }
+        result.frame_number = i->frame_number;
+        result.num_output_buffers = 0;
+        result.output_buffers = NULL;
+        for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+            if (j->buffer) {
+                result.num_output_buffers++;
+            }
+        }
+
+        if (result.num_output_buffers > 0) {
+            camera3_stream_buffer_t *result_buffers =
+                new camera3_stream_buffer_t[result.num_output_buffers];
+            if (!result_buffers) {
+                ALOGE("%s: Fatal error: out of memory", __func__);
+            }
+            size_t result_buffers_idx = 0;
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                    j != i->buffers.end(); j++) {
+                if (j->buffer) {
+                    for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                            m != mPendingFrameDropList.end(); m++) {
+                        QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
+                        uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+                        if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
+                            j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                            ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
+                                  __func__, frame_number, streamID);
+                            m = mPendingFrameDropList.erase(m);
+                            break;
+                        }
+                    }
+
+                    for (List<PendingBufferInfo>::iterator k =
+                      mPendingBuffersMap.mPendingBufferList.begin();
+                      k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
+                      if (k->buffer == j->buffer->buffer) {
+                        ALOGV("%s: Found buffer %p in pending buffer List "
+                              "for frame %d, Take it out!!", __func__,
+                               k->buffer, k->frame_number);
+                        mPendingBuffersMap.num_buffers--;
+                        k = mPendingBuffersMap.mPendingBufferList.erase(k);
+                        break;
+                      }
+                    }
+
+                    result_buffers[result_buffers_idx++] = *(j->buffer);
+                    free(j->buffer);
+                    j->buffer = NULL;
+                }
+            }
+            result.output_buffers = result_buffers;
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
+                    __func__, result.frame_number, i->timestamp);
+            free_camera_metadata((camera_metadata_t *)result.result);
+            delete[] result_buffers;
+        } else {
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            ALOGV("%s: meta frame_number = %d, capture_time = %lld",
+                        __func__, result.frame_number, i->timestamp);
+            free_camera_metadata((camera_metadata_t *)result.result);
+        }
+        // erase the element from the list
+        i = mPendingRequestsList.erase(i);
+    }
+
+done_metadata:
+    for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+        i != mPendingRequestsList.end() ;i++) {
+        i->pipeline_depth++;
+    }
+    if (!pending_requests)
+        unblockRequestIfNecessary();
+
+}
+
+/*===========================================================================
+ * FUNCTION   : handleBufferWithLock
+ *
+ * DESCRIPTION: Handles image buffer callback with mMutex lock held.
+ *
+ * PARAMETERS : @buffer: image buffer for the callback
+ *              @frame_number: frame number of the image buffer
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::handleBufferWithLock(
+    camera3_stream_buffer_t *buffer, uint32_t frame_number)
+{
+    // If the frame number doesn't exist in the pending request list,
+    // directly send the buffer to the frameworks, and update pending buffers map
+    // Otherwise, book-keep the buffer.
+    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+    while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
+        i++;
+    }
+    if (i == mPendingRequestsList.end()) {
+        // Verify all pending requests frame_numbers are greater
+        for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
+                j != mPendingRequestsList.end(); j++) {
+            if (j->frame_number < frame_number) {
+                ALOGE("%s: Error: pending frame number %d is smaller than %d",
+                        __func__, j->frame_number, frame_number);
+            }
+        }
+        camera3_capture_result_t result;
+        memset(&result, 0, sizeof(camera3_capture_result_t));
+        result.result = NULL;
+        result.frame_number = frame_number;
+        result.num_output_buffers = 1;
+        for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
+                m != mPendingFrameDropList.end(); m++) {
+            QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
+            uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
+            if((m->stream_ID==streamID) && (m->frame_number==frame_number)) {
+                buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
+                ALOGV("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
+                        __func__, frame_number, streamID);
+                m = mPendingFrameDropList.erase(m);
+                break;
+            }
+        }
+        result.output_buffers = buffer;
+        ALOGV("%s: result frame_number = %d, buffer = %p",
+                __func__, frame_number, buffer->buffer);
+
+        for (List<PendingBufferInfo>::iterator k =
+                mPendingBuffersMap.mPendingBufferList.begin();
+                k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
+            if (k->buffer == buffer->buffer) {
+                ALOGV("%s: Found Frame buffer, take it out from list",
+                        __func__);
+
+                mPendingBuffersMap.num_buffers--;
+                k = mPendingBuffersMap.mPendingBufferList.erase(k);
+                break;
+            }
+        }
+        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
+            __func__, mPendingBuffersMap.num_buffers);
+
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+    } else {
+        if (i->input_buffer_present) {
+            camera3_capture_result result;
+            memset(&result, 0, sizeof(camera3_capture_result_t));
+            result.result = NULL;
+            result.frame_number = frame_number;
+            result.num_output_buffers = 1;
+            result.output_buffers = buffer;
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            i = mPendingRequestsList.erase(i);
+            mPendingRequest--;
+        } else {
+            for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
+                j != i->buffers.end(); j++) {
+                if (j->stream == buffer->stream) {
+                    if (j->buffer != NULL) {
+                        ALOGE("%s: Error: buffer is already set", __func__);
+                    } else {
+                        j->buffer = (camera3_stream_buffer_t *)malloc(
+                            sizeof(camera3_stream_buffer_t));
+                        *(j->buffer) = *buffer;
+                        ALOGV("%s: cache buffer %p at result frame_number %d",
+                            __func__, buffer, frame_number);
+                    }
+                }
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : unblockRequestIfNecessary
+ *
+ * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
+ *              that mMutex is held when this function is called.
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::unblockRequestIfNecessary()
+{
+   // Unblock process_capture_request
+   pthread_cond_signal(&mRequestCond);
+}
+
+/*===========================================================================
+ * FUNCTION   : registerStreamBuffers
+ *
+ * DESCRIPTION: Register buffers for a given stream with the HAL device.
+ *
+ * PARAMETERS :
+ *   @stream_list : streams to be configured
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::registerStreamBuffers(
+        const camera3_stream_buffer_set_t * /*buffer_set*/)
+{
+    //Deprecated
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processCaptureRequest
+ *
+ * DESCRIPTION: process a capture request from camera service
+ *
+ * PARAMETERS :
+ *   @request : request from framework to process
+ *
+ * RETURN     :
+ *
+ *==========================================================================*/
+int QCamera3HardwareInterface::processCaptureRequest(
+                    camera3_capture_request_t *request)
+{
+    int rc = NO_ERROR;
+    int32_t request_id;
+    CameraMetadata meta;
+
+    pthread_mutex_lock(&mMutex);
+
+    rc = validateCaptureRequest(request);
+    if (rc != NO_ERROR) {
+        ALOGE("%s: incoming request is not valid", __func__);
+        pthread_mutex_unlock(&mMutex);
+        return rc;
+    }
+
+    meta = request->settings;
+
+    // For first capture request, send capture intent, and
+    // stream on all streams
+    if (mFirstRequest) {
+
+        for (size_t i = 0; i < request->num_output_buffers; i++) {
+            const camera3_stream_buffer_t& output = request->output_buffers[i];
+            QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
+            rc = channel->registerBuffer(output.buffer);
+            if (rc < 0) {
+                ALOGE("%s: registerBuffer failed",
+                        __func__);
+                pthread_mutex_unlock(&mMutex);
+                return -ENODEV;
+            }
+        }
+
+        if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+            int32_t hal_version = CAM_HAL_V3;
+            uint8_t captureIntent =
+                meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
+
+            memset(mParameters, 0, sizeof(metadata_buffer_t));
+            mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
+            AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
+                sizeof(hal_version), &hal_version);
+            AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
+                sizeof(captureIntent), &captureIntent);
+            mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+                mParameters);
+        }
+
+        ALOGD("%s: Start META Channel", __func__);
+        mMetadataChannel->start();
+
+        if (mSupportChannel)
+            mSupportChannel->start();
+
+        //First initialize all streams
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            rc = channel->initialize();
+            if (NO_ERROR != rc) {
+                ALOGE("%s : Channel initialization failed %d", __func__, rc);
+                if (mSupportChannel)
+                    mSupportChannel->stop();
+                mMetadataChannel->stop();
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+        }
+        //Then start them.
+        for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end(); it++) {
+            QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+            ALOGD("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
+            channel->start();
+        }
+    }
+
+    uint32_t frameNumber = request->frame_number;
+    cam_stream_ID_t streamID;
+
+    if (meta.exists(ANDROID_REQUEST_ID)) {
+        request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
+        mCurrentRequestId = request_id;
+        ALOGV("%s: Received request with id: %d",__func__, request_id);
+    } else if (mFirstRequest || mCurrentRequestId == -1){
+        ALOGE("%s: Unable to find request id field, \
+                & no previous id available", __func__);
+        return NAME_NOT_FOUND;
+    } else {
+        ALOGV("%s: Re-using old request id", __func__);
+        request_id = mCurrentRequestId;
+    }
+
+    ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
+                                    __func__, __LINE__,
+                                    request->num_output_buffers,
+                                    request->input_buffer,
+                                    frameNumber);
+    // Acquire all request buffers first
+    streamID.num_streams = 0;
+    int blob_request = 0;
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        const camera3_stream_buffer_t& output = request->output_buffers[i];
+        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
+        sp<Fence> acquireFence = new Fence(output.acquire_fence);
+
+        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            //Call function to store local copy of jpeg data for encode params.
+            blob_request = 1;
+        }
+
+        rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
+        if (rc != OK) {
+            ALOGE("%s: fence wait failed %d", __func__, rc);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+
+        streamID.streamID[streamID.num_streams] =
+            channel->getStreamID(channel->getStreamTypeMask());
+        streamID.num_streams++;
+    }
+
+    if(request->input_buffer == NULL) {
+       rc = setFrameParameters(request, streamID);
+        if (rc < 0) {
+            ALOGE("%s: fail to set frame parameters", __func__);
+            pthread_mutex_unlock(&mMutex);
+            return rc;
+        }
+    }
+
+    /* Update pending request list and pending buffers map */
+    PendingRequestInfo pendingRequest;
+    pendingRequest.frame_number = frameNumber;
+    pendingRequest.num_buffers = request->num_output_buffers;
+    pendingRequest.request_id = request_id;
+    pendingRequest.blob_request = blob_request;
+    pendingRequest.bNotified = 0;
+    pendingRequest.input_buffer_present = (request->input_buffer != NULL)? 1 : 0;
+    pendingRequest.pipeline_depth = 0;
+    extractJpegMetadata(pendingRequest.jpegMetadata, request);
+
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        RequestedBufferInfo requestedBuf;
+        requestedBuf.stream = request->output_buffers[i].stream;
+        requestedBuf.buffer = NULL;
+        pendingRequest.buffers.push_back(requestedBuf);
+
+        // Add to buffer handle the pending buffers list
+        PendingBufferInfo bufferInfo;
+        bufferInfo.frame_number = frameNumber;
+        bufferInfo.buffer = request->output_buffers[i].buffer;
+        bufferInfo.stream = request->output_buffers[i].stream;
+        mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
+        mPendingBuffersMap.num_buffers++;
+        ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
+          __func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
+          bufferInfo.stream->format);
+    }
+    ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
+          __func__, mPendingBuffersMap.num_buffers);
+
+    mPendingRequestsList.push_back(pendingRequest);
+
+    // Notify metadata channel we receive a request
+    mMetadataChannel->request(NULL, frameNumber);
+
+    // Call request on other streams
+    for (size_t i = 0; i < request->num_output_buffers; i++) {
+        const camera3_stream_buffer_t& output = request->output_buffers[i];
+        QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
+        mm_camera_buf_def_t *pInputBuffer = NULL;
+
+        if (channel == NULL) {
+            ALOGE("%s: invalid channel pointer for stream", __func__);
+            continue;
+        }
+
+        if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            QCamera3RegularChannel* inputChannel = NULL;
+            if(request->input_buffer != NULL){
+
+                //Try to get the internal format
+                inputChannel = (QCamera3RegularChannel*)
+                    request->input_buffer->stream->priv;
+                if(inputChannel == NULL ){
+                    ALOGE("%s: failed to get input channel handle", __func__);
+                } else {
+                    pInputBuffer =
+                        inputChannel->getInternalFormatBuffer(
+                                request->input_buffer->buffer);
+                    ALOGD("%s: Input buffer dump",__func__);
+                    ALOGD("Stream id: %d", pInputBuffer->stream_id);
+                    ALOGD("streamtype:%d", pInputBuffer->stream_type);
+                    ALOGD("frame len:%d", pInputBuffer->frame_len);
+                    ALOGD("Handle:%p", request->input_buffer->buffer);
+                }
+                rc = channel->request(output.buffer, frameNumber,
+                            pInputBuffer, mParameters);
+                if (rc < 0) {
+                    ALOGE("%s: Fail to request on picture channel", __func__);
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+
+                rc = setReprocParameters(request);
+                if (rc < 0) {
+                    ALOGE("%s: fail to set reproc parameters", __func__);
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
+            } else{
+                 ALOGV("%s: %d, snapshot request with buffer %p, frame_number %d", __func__,
+                       __LINE__, output.buffer, frameNumber);
+                 if (mRepeatingRequest) {
+                   rc = channel->request(output.buffer, frameNumber,
+                               NULL, mPrevParameters);
+                 } else {
+                    rc = channel->request(output.buffer, frameNumber,
+                               NULL, mParameters);
+                 }
+            }
+        } else {
+            ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__,
+                __LINE__, output.buffer, frameNumber);
+           rc = channel->request(output.buffer, frameNumber);
+        }
+        if (rc < 0)
+            ALOGE("%s: request failed", __func__);
+    }
+
+    /*set the parameters to backend*/
+    mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
+
+    mFirstRequest = false;
+    // Added a timed condition wait
+    struct timespec ts;
+    uint8_t isValidTimeout = 1;
+    rc = clock_gettime(CLOCK_REALTIME, &ts);
+    if (rc < 0) {
+        isValidTimeout = 0;
+        ALOGE("%s: Error reading the real time clock!!", __func__);
+    }
+    else {
+        // Make timeout as 5 sec for request to be honored
+        ts.tv_sec += 5;
+    }
+    //Block on conditional variable
+
+    mPendingRequest++;
+    while (mPendingRequest >= kMaxInFlight) {
+        if (!isValidTimeout) {
+            ALOGV("%s: Blocking on conditional wait", __func__);
+            pthread_cond_wait(&mRequestCond, &mMutex);
+        }
+        else {
+            ALOGV("%s: Blocking on timed conditional wait", __func__);
+            rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
+            if (rc == ETIMEDOUT) {
+                rc = -ENODEV;
+                ALOGE("%s: Unblocked on timeout!!!!", __func__);
+                break;
+            }
+        }
+        ALOGV("%s: Unblocked", __func__);
+    }
+    pthread_mutex_unlock(&mMutex);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+void QCamera3HardwareInterface::dump(int /*fd*/)
+{
+    /*Enable lock when we implement this function*/
+    /*
+    pthread_mutex_lock(&mMutex);
+
+    pthread_mutex_unlock(&mMutex);
+    */
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::flush()
+{
+
+    unsigned int frameNum = 0;
+    camera3_notify_msg_t notify_msg;
+    camera3_capture_result_t result;
+    camera3_stream_buffer_t pStream_Buf;
+
+    ALOGV("%s: Unblocking Process Capture Request", __func__);
+
+    // Stop the Streams/Channels
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+        channel->stop();
+        (*it)->status = INVALID;
+    }
+
+    if (mSupportChannel) {
+        mSupportChannel->stop();
+    }
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        mMetadataChannel->stop();
+    }
+
+    // Mutex Lock
+    pthread_mutex_lock(&mMutex);
+
+    // Unblock process_capture_request
+    mPendingRequest = 0;
+    pthread_cond_signal(&mRequestCond);
+
+    List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
+    frameNum = i->frame_number;
+    ALOGV("%s: Latest frame num on  mPendingRequestsList = %d",
+      __func__, frameNum);
+
+    // Go through the pending buffers and send buffer errors
+    for (List<PendingBufferInfo>::iterator k =
+         mPendingBuffersMap.mPendingBufferList.begin();
+         k != mPendingBuffersMap.mPendingBufferList.end();  ) {
+         ALOGV("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
+          __func__, k->frame_number, k->buffer, k->stream,
+          k->stream->format);
+
+        if (k->frame_number < frameNum) {
+            // Send Error notify to frameworks for each buffer for which
+            // metadata buffer is already sent
+            ALOGV("%s: Sending ERROR BUFFER for frame %d, buffer %p",
+              __func__, k->frame_number, k->buffer);
+
+            notify_msg.type = CAMERA3_MSG_ERROR;
+            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+            notify_msg.message.error.error_stream = k->stream;
+            notify_msg.message.error.frame_number = k->frame_number;
+            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            ALOGV("%s: notify frame_number = %d", __func__,
+                    i->frame_number);
+
+            pStream_Buf.acquire_fence = -1;
+            pStream_Buf.release_fence = -1;
+            pStream_Buf.buffer = k->buffer;
+            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
+            pStream_Buf.stream = k->stream;
+
+            memset(&result, 0, sizeof(camera3_capture_result_t));
+            result.result = NULL;
+            result.frame_number = k->frame_number;
+            result.num_output_buffers = 1;
+            result.output_buffers = &pStream_Buf ;
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+
+            mPendingBuffersMap.num_buffers--;
+            k = mPendingBuffersMap.mPendingBufferList.erase(k);
+        }
+        else {
+          k++;
+        }
+    }
+
+    ALOGV("%s:Sending ERROR REQUEST for all pending requests", __func__);
+
+    // Go through the pending requests info and send error request to framework
+    for (i = mPendingRequestsList.begin(); i != mPendingRequestsList.end(); ) {
+        int numBuffers = 0;
+        ALOGV("%s:Sending ERROR REQUEST for frame %d",
+              __func__, i->frame_number);
+
+        // Send shutter notify to frameworks
+        notify_msg.type = CAMERA3_MSG_ERROR;
+        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+        notify_msg.message.error.error_stream = NULL;
+        notify_msg.message.error.frame_number = i->frame_number;
+        mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+        memset(&result, 0, sizeof(camera3_capture_result_t));
+        result.frame_number = i->frame_number;
+        result.num_output_buffers = 0;
+        result.output_buffers = NULL;
+        numBuffers = 0;
+
+        for (List<PendingBufferInfo>::iterator k =
+             mPendingBuffersMap.mPendingBufferList.begin();
+             k != mPendingBuffersMap.mPendingBufferList.end(); ) {
+          if (k->frame_number == i->frame_number) {
+            ALOGV("%s: Sending Error for frame = %d, buffer = %p,"
+                   " stream = %p, stream format = %d",__func__,
+                   k->frame_number, k->buffer, k->stream, k->stream->format);
+
+            pStream_Buf.acquire_fence = -1;
+            pStream_Buf.release_fence = -1;
+            pStream_Buf.buffer = k->buffer;
+            pStream_Buf.status = CAMERA3_BUFFER_STATUS_ERROR;
+            pStream_Buf.stream = k->stream;
+
+            result.num_output_buffers = 1;
+            result.output_buffers = &pStream_Buf;
+            result.result = NULL;
+            result.frame_number = i->frame_number;
+
+            mCallbackOps->process_capture_result(mCallbackOps, &result);
+            mPendingBuffersMap.num_buffers--;
+            k = mPendingBuffersMap.mPendingBufferList.erase(k);
+            numBuffers++;
+          }
+          else {
+            k++;
+          }
+        }
+        ALOGV("%s: mPendingBuffersMap.num_buffers = %d",
+              __func__, mPendingBuffersMap.num_buffers);
+
+        i = mPendingRequestsList.erase(i);
+    }
+
+    /* Reset pending buffer list and requests list */
+    mPendingRequestsList.clear();
+    /* Reset pending frame Drop list and requests list */
+    mPendingFrameDropList.clear();
+
+    mPendingBuffersMap.num_buffers = 0;
+    mPendingBuffersMap.mPendingBufferList.clear();
+    ALOGV("%s: Cleared all the pending buffers ", __func__);
+
+    mFirstRequest = true;
+    pthread_mutex_unlock(&mMutex);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureResultCb
+ *
+ * DESCRIPTION: Callback handler for all capture result
+ *              (streams, as well as metadata)
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information
+ *   @buffer   : actual gralloc buffer to be returned to frameworks.
+ *               NULL if metadata.
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number)
+{
+    pthread_mutex_lock(&mMutex);
+
+    /* Assume flush() is called before any reprocessing. Send
+     * notify and result immediately upon receipt of any callback*/
+    if (mLoopBackResult) {
+        /* Send notify */
+        camera3_notify_msg_t notify_msg;
+        notify_msg.type = CAMERA3_MSG_SHUTTER;
+        notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
+        notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
+        mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+        /* Send capture result */
+        mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
+        free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
+        free(mLoopBackResult);
+        mLoopBackResult = NULL;
+    }
+
+    if (metadata_buf)
+        handleMetadataWithLock(metadata_buf);
+    else
+        handleBufferWithLock(buffer, frame_number);
+
+    pthread_mutex_unlock(&mMutex);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateFromHalMetadata
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *
+ * RETURN     : camera_metadata_t*
+ *              metadata in a format specified by fwk
+ *==========================================================================*/
+camera_metadata_t*
+QCamera3HardwareInterface::translateFromHalMetadata(
+                                 metadata_buffer_t *metadata,
+                                 nsecs_t timestamp,
+                                 int32_t request_id,
+                                 const CameraMetadata& jpegMetadata,
+                                 uint8_t pipeline_depth)
+{
+    CameraMetadata camMetadata;
+    camera_metadata_t* resultMetadata;
+
+    if (jpegMetadata.entryCount())
+        camMetadata.append(jpegMetadata);
+
+    camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
+    camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
+    camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
+
+    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
+    uint8_t next_entry;
+    while (curr_entry != CAM_INTF_PARM_MAX) {
+       switch (curr_entry) {
+         case CAM_INTF_META_FRAME_NUMBER:{
+             int64_t frame_number = *(uint32_t *) POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata);
+             camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
+             break;
+         }
+         case CAM_INTF_META_FACE_DETECTION:{
+             cam_face_detection_data_t *faceDetectionInfo =
+                (cam_face_detection_data_t *)POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata);
+             uint8_t numFaces = faceDetectionInfo->num_faces_detected;
+             int32_t faceIds[MAX_ROI];
+             uint8_t faceScores[MAX_ROI];
+             int32_t faceRectangles[MAX_ROI * 4];
+             int32_t faceLandmarks[MAX_ROI * 6];
+             int j = 0, k = 0;
+             for (int i = 0; i < numFaces; i++) {
+                 faceIds[i] = faceDetectionInfo->faces[i].face_id;
+                 faceScores[i] = faceDetectionInfo->faces[i].score;
+                 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
+                         faceRectangles+j, -1);
+                 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
+                 j+= 4;
+                 k+= 6;
+             }
+
+             if (numFaces <= 0) {
+                memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
+                memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
+                memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
+                memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
+             }
+
+             camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
+             camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
+             camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
+               faceRectangles, numFaces*4);
+             camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
+               faceLandmarks, numFaces*6);
+
+            break;
+            }
+         case CAM_INTF_META_COLOR_CORRECT_MODE:{
+             uint8_t  *color_correct_mode =
+                           (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
+             camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
+             break;
+          }
+
+         // 3A state is sent in urgent partial result (uses quirk)
+         case CAM_INTF_META_AEC_PRECAPTURE_ID:
+         case CAM_INTF_META_AEC_STATE:
+         case CAM_INTF_PARM_AEC_LOCK:
+         case CAM_INTF_PARM_EV:
+         case CAM_INTF_PARM_FOCUS_MODE:
+         case CAM_INTF_META_AF_STATE:
+         case CAM_INTF_META_AF_TRIGGER_ID:
+         case CAM_INTF_PARM_WHITE_BALANCE:
+         case CAM_INTF_META_AWB_REGIONS:
+         case CAM_INTF_META_AWB_STATE:
+         case CAM_INTF_PARM_AWB_LOCK:
+         case CAM_INTF_META_PRECAPTURE_TRIGGER:
+         case CAM_INTF_META_AEC_MODE:
+         case CAM_INTF_PARM_LED_MODE:
+         case CAM_INTF_PARM_REDEYE_REDUCTION:
+         case CAM_INTF_META_AF_TRIGGER_NOTICE: {
+           ALOGV("%s: 3A metadata: %d, do not process", __func__, curr_entry);
+           break;
+         }
+
+          case CAM_INTF_META_MODE: {
+             uint8_t *mode =(uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata);
+             camMetadata.update(ANDROID_CONTROL_MODE, mode, 1);
+             break;
+          }
+
+          case CAM_INTF_META_EDGE_MODE: {
+             cam_edge_application_t  *edgeApplication =
+                (cam_edge_application_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
+             uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
+             camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
+             camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
+             break;
+          }
+          case CAM_INTF_META_FLASH_POWER: {
+             uint8_t  *flashPower =
+                  (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata);
+             camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
+             break;
+          }
+          case CAM_INTF_META_FLASH_FIRING_TIME: {
+             int64_t  *flashFiringTime =
+                  (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
+             camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
+             break;
+          }
+          case CAM_INTF_META_FLASH_STATE: {
+             uint8_t  flashState =
+                *((uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata));
+             if (!gCamCapability[mCameraId]->flash_available) {
+                 flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
+             }
+             camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
+             break;
+          }
+          case CAM_INTF_META_FLASH_MODE:{
+             uint8_t flashMode = *((uint8_t*)
+                 POINTER_OF(CAM_INTF_META_FLASH_MODE, metadata));
+             uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
+                                          sizeof(FLASH_MODES_MAP),
+                                          flashMode);
+             camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
+             break;
+          }
+          case CAM_INTF_META_HOTPIXEL_MODE: {
+              uint8_t  *hotPixelMode =
+                 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata);
+              camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
+              break;
+          }
+          case CAM_INTF_META_LENS_APERTURE:{
+             float  *lensAperture =
+                (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata);
+             camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
+             break;
+          }
+          case CAM_INTF_META_LENS_FILTERDENSITY: {
+             float  *filterDensity =
+                (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
+             camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
+             break;
+          }
+          case CAM_INTF_META_LENS_FOCAL_LENGTH:{
+             float  *focalLength =
+                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
+             camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
+             break;
+          }
+          case CAM_INTF_META_LENS_FOCUS_DISTANCE: {
+             float  *focusDistance =
+                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata);
+             camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
+             break;
+          }
+          case CAM_INTF_META_LENS_FOCUS_RANGE: {
+             float  *focusRange =
+                (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata);
+             camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
+             break;
+          }
+          case CAM_INTF_META_LENS_STATE: {
+             uint8_t *lensState = (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_STATE, metadata);
+             camMetadata.update(ANDROID_LENS_STATE , lensState, 1);
+             break;
+          }
+          case CAM_INTF_META_LENS_OPT_STAB_MODE: {
+             uint8_t  *opticalStab =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
+             camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
+             break;
+          }
+          case CAM_INTF_META_NOISE_REDUCTION_MODE: {
+             uint8_t  *noiseRedMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
+             camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
+             break;
+          }
+          case CAM_INTF_META_NOISE_REDUCTION_STRENGTH: {
+             uint8_t  *noiseRedStrength =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
+             camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
+             break;
+          }
+          case CAM_INTF_META_SCALER_CROP_REGION: {
+             cam_crop_region_t  *hScalerCropRegion =(cam_crop_region_t *)
+             POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata);
+             int32_t scalerCropRegion[4];
+             scalerCropRegion[0] = hScalerCropRegion->left;
+             scalerCropRegion[1] = hScalerCropRegion->top;
+             scalerCropRegion[2] = hScalerCropRegion->width;
+             scalerCropRegion[3] = hScalerCropRegion->height;
+             camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
+             break;
+          }
+          case CAM_INTF_META_AEC_ROI: {
+            cam_area_t  *hAeRegions =
+                (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata);
+            int32_t aeRegions[5];
+            convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
+            camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5);
+            ALOGV("%s: Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
+                __func__, aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
+                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width, hAeRegions->rect.height);
+            break;
+          }
+          case CAM_INTF_META_AF_ROI:{
+            /*af regions*/
+            cam_area_t  *hAfRegions =
+                (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata);
+            int32_t afRegions[5];
+            convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
+            camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5);
+            ALOGV("%s: Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d, %d, %d, %d] HAL: [%d, %d, %d, %d]",
+                __func__, afRegions[0], afRegions[1], afRegions[2], afRegions[3],
+                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width, hAfRegions->rect.height);
+            break;
+          }
+          case CAM_INTF_META_SENSOR_EXPOSURE_TIME:{
+             int64_t  *sensorExpTime =
+                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
+             ALOGV("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
+             camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
+             break;
+          }
+          case CAM_INTF_META_SENSOR_FRAME_DURATION:{
+             int64_t  *sensorFameDuration =
+                (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
+             ALOGV("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
+             camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
+             break;
+          }
+          case CAM_INTF_META_SENSOR_SENSITIVITY:{
+             int32_t  *sensorSensitivity =
+                (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata);
+             ALOGV("%s: sensorSensitivity = %d", __func__, *sensorSensitivity);
+             camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
+             break;
+          }
+          case CAM_INTF_PARM_BESTSHOT_MODE: {
+              uint8_t *sceneMode =
+                  (uint8_t *)POINTER_OF(CAM_INTF_PARM_BESTSHOT_MODE, metadata);
+              uint8_t fwkSceneMode =
+                  (uint8_t)lookupFwkName(SCENE_MODES_MAP,
+                  sizeof(SCENE_MODES_MAP)/
+                  sizeof(SCENE_MODES_MAP[0]), *sceneMode);
+              camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
+                   &fwkSceneMode, 1);
+              ALOGV("%s: Metadata : ANDROID_CONTROL_SCENE_MODE: %d", __func__, fwkSceneMode);
+              break;
+          }
+
+          case CAM_INTF_META_SHADING_MODE: {
+             uint8_t  *shadingMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata);
+             camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
+             break;
+          }
+
+          case CAM_INTF_META_LENS_SHADING_MAP_MODE: {
+             uint8_t  *shadingMapMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata);
+             camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, shadingMapMode, 1);
+             break;
+          }
+
+          case CAM_INTF_META_STATS_FACEDETECT_MODE: {
+             uint8_t  *faceDetectMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
+             uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
+                                                        sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]),
+                                                        *faceDetectMode);
+             camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
+             break;
+          }
+          case CAM_INTF_META_STATS_HISTOGRAM_MODE: {
+             uint8_t  *histogramMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
+             camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
+             break;
+          }
+          case CAM_INTF_META_STATS_SHARPNESS_MAP_MODE:{
+               uint8_t  *sharpnessMapMode =
+                  (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
+               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+                                  sharpnessMapMode, 1);
+               break;
+           }
+          case CAM_INTF_META_STATS_SHARPNESS_MAP:{
+               cam_sharpness_map_t  *sharpnessMap = (cam_sharpness_map_t *)
+               POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
+               camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
+                                  (int32_t*)sharpnessMap->sharpness,
+                                  CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
+               break;
+          }
+          case CAM_INTF_META_LENS_SHADING_MAP: {
+               cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
+               POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata);
+               int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
+               int map_width  = gCamCapability[mCameraId]->lens_shading_map_size.width;
+               camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
+                                  (float*)lensShadingMap->lens_shading,
+                                  4*map_width*map_height);
+               break;
+          }
+
+          case CAM_INTF_META_TONEMAP_MODE: {
+             uint8_t  *toneMapMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_TONEMAP_MODE, metadata);
+             camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
+             break;
+          }
+
+          case CAM_INTF_META_TONEMAP_CURVES:{
+             //Populate CAM_INTF_META_TONEMAP_CURVES
+             /* ch0 = G, ch 1 = B, ch 2 = R*/
+             cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
+             POINTER_OF(CAM_INTF_META_TONEMAP_CURVES, metadata);
+             camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
+                                (float*)tonemap->curves[0].tonemap_points,
+                                tonemap->tonemap_points_cnt * 2);
+
+             camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
+                                (float*)tonemap->curves[1].tonemap_points,
+                                tonemap->tonemap_points_cnt * 2);
+
+             camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
+                                (float*)tonemap->curves[2].tonemap_points,
+                                tonemap->tonemap_points_cnt * 2);
+             break;
+          }
+
+          case CAM_INTF_META_COLOR_CORRECT_GAINS:{
+             cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
+             POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
+             camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
+             break;
+          }
+          case CAM_INTF_META_COLOR_CORRECT_TRANSFORM:{
+              cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
+              POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
+              camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
+                       (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3);
+              break;
+          }
+
+          /* DNG file realted metadata */
+          case CAM_INTF_META_PROFILE_TONE_CURVE: {
+             cam_profile_tone_curve *toneCurve = (cam_profile_tone_curve *)
+             POINTER_OF(CAM_INTF_META_PROFILE_TONE_CURVE, metadata);
+             camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
+                                (float*)toneCurve->curve.tonemap_points,
+                                toneCurve->tonemap_points_cnt * 2);
+             break;
+          }
+
+          case CAM_INTF_META_PRED_COLOR_CORRECT_GAINS:{
+             cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*)
+             POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata);
+             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
+                       predColorCorrectionGains->gains, 4);
+             break;
+          }
+          case CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM:{
+             cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*)
+                   POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata);
+             camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
+                                  (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3);
+             break;
+
+          }
+
+          case CAM_INTF_META_OTP_WB_GRGB:{
+             float *otpWbGrGb = (float*) POINTER_OF(CAM_INTF_META_OTP_WB_GRGB, metadata);
+             camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
+             break;
+          }
+
+          case CAM_INTF_META_BLACK_LEVEL_LOCK:{
+             uint8_t *blackLevelLock = (uint8_t*)
+               POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata);
+             camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1);
+             break;
+          }
+          case CAM_INTF_PARM_ANTIBANDING: {
+            uint8_t *hal_ab_mode =
+              (uint8_t *)POINTER_OF(CAM_INTF_PARM_ANTIBANDING, metadata);
+            uint8_t fwk_ab_mode = (uint8_t)lookupFwkName(ANTIBANDING_MODES_MAP,
+                     sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
+                     *hal_ab_mode);
+            camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
+                &fwk_ab_mode, 1);
+            break;
+          }
+
+          case CAM_INTF_META_CAPTURE_INTENT:{
+             uint8_t *captureIntent = (uint8_t*)
+               POINTER_OF(CAM_INTF_META_CAPTURE_INTENT, metadata);
+             camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, captureIntent, 1);
+             break;
+          }
+
+          case CAM_INTF_META_SCENE_FLICKER:{
+             uint8_t *sceneFlicker = (uint8_t*)
+             POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata);
+             camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1);
+             break;
+          }
+          case CAM_INTF_PARM_EFFECT: {
+             uint8_t *effectMode = (uint8_t*)
+                  POINTER_OF(CAM_INTF_PARM_EFFECT, metadata);
+             uint8_t fwk_effectMode = (uint8_t)lookupFwkName(EFFECT_MODES_MAP,
+                                                    sizeof(EFFECT_MODES_MAP),
+                                                    *effectMode);
+             camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
+             break;
+          }
+          case CAM_INTF_META_TEST_PATTERN_DATA: {
+             cam_test_pattern_data_t *testPatternData = (cam_test_pattern_data_t *)
+                 POINTER_OF(CAM_INTF_META_TEST_PATTERN_DATA, metadata);
+             int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
+                     sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
+                     testPatternData->mode);
+             camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE,
+                     &fwk_testPatternMode, 1);
+            int32_t fwk_testPatternData[4];
+            fwk_testPatternData[0] = testPatternData->r;
+            fwk_testPatternData[3] = testPatternData->b;
+            switch (gCamCapability[mCameraId]->color_arrangement) {
+            case CAM_FILTER_ARRANGEMENT_RGGB:
+            case CAM_FILTER_ARRANGEMENT_GRBG:
+                fwk_testPatternData[1] = testPatternData->gr;
+                fwk_testPatternData[2] = testPatternData->gb;
+                break;
+            case CAM_FILTER_ARRANGEMENT_GBRG:
+            case CAM_FILTER_ARRANGEMENT_BGGR:
+                fwk_testPatternData[2] = testPatternData->gr;
+                fwk_testPatternData[1] = testPatternData->gb;
+                break;
+            default:
+                ALOGE("%s: color arrangement %d is not supported", __func__,
+                    gCamCapability[mCameraId]->color_arrangement);
+                break;
+            }
+            camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
+            break;
+
+          }
+          case CAM_INTF_META_JPEG_GPS_COORDINATES: {
+              double *gps_coords = (double *)POINTER_OF(
+                      CAM_INTF_META_JPEG_GPS_COORDINATES, metadata);
+              camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
+              break;
+          }
+          case CAM_INTF_META_JPEG_GPS_PROC_METHODS: {
+              char *gps_methods = (char *)POINTER_OF(
+                      CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata);
+              String8 str(gps_methods);
+              camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
+              break;
+          }
+          case CAM_INTF_META_JPEG_GPS_TIMESTAMP: {
+              int64_t *gps_timestamp = (int64_t *)POINTER_OF(
+                      CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata);
+              camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
+              break;
+          }
+          case CAM_INTF_META_JPEG_ORIENTATION: {
+              int32_t *jpeg_orientation = (int32_t *)POINTER_OF(
+                      CAM_INTF_META_JPEG_ORIENTATION, metadata);
+              camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
+              break;
+          }
+          case CAM_INTF_META_JPEG_QUALITY: {
+              uint8_t *jpeg_quality = (uint8_t *)POINTER_OF(
+                      CAM_INTF_META_JPEG_QUALITY, metadata);
+              camMetadata.update(ANDROID_JPEG_QUALITY, jpeg_quality, 1);
+              break;
+          }
+          case CAM_INTF_META_JPEG_THUMB_QUALITY: {
+              uint8_t *thumb_quality = (uint8_t *)POINTER_OF(
+                      CAM_INTF_META_JPEG_THUMB_QUALITY, metadata);
+              camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, thumb_quality, 1);
+              break;
+          }
+
+          case CAM_INTF_META_JPEG_THUMB_SIZE: {
+              cam_dimension_t *thumb_size = (cam_dimension_t *)POINTER_OF(
+                      CAM_INTF_META_JPEG_THUMB_SIZE, metadata);
+              camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, (int32_t *)thumb_size, 2);
+              break;
+          }
+
+             break;
+          case CAM_INTF_META_PRIVATE_DATA: {
+             uint8_t *privateData = (uint8_t *)
+                 POINTER_OF(CAM_INTF_META_PRIVATE_DATA, metadata);
+             camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
+                 privateData, MAX_METADATA_PAYLOAD_SIZE);
+             break;
+          }
+
+          case CAM_INTF_META_NEUTRAL_COL_POINT:{
+             cam_neutral_col_point_t *neuColPoint = (cam_neutral_col_point_t*)
+                 POINTER_OF(CAM_INTF_META_NEUTRAL_COL_POINT, metadata);
+             camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+                     (camera_metadata_rational_t*)neuColPoint->neutral_col_point, 3);
+             break;
+          }
+
+          default:
+             ALOGV("%s: This is not a valid metadata type to report to fwk, %d",
+                   __func__, curr_entry);
+             break;
+       }
+       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
+       curr_entry = next_entry;
+    }
+
+    /* Constant metadata values to be update*/
+    uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+    camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
+
+    uint8_t hotPixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+    camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelMode, 1);
+
+    uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+
+    int32_t hotPixelMap[2];
+    camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
+
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateCbUrgentMetadataToResultMetadata
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *   @metadata : metadata information from callback
+ *
+ * RETURN     : camera_metadata_t*
+ *              metadata in a format specified by fwk
+ *==========================================================================*/
+camera_metadata_t*
+QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
+                                (metadata_buffer_t *metadata)
+{
+    CameraMetadata camMetadata;
+    camera_metadata_t* resultMetadata;
+    uint8_t *aeMode = NULL;
+    int32_t *flashMode = NULL;
+    int32_t *redeye = NULL;
+
+    uint8_t partial_result_tag = ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL;
+    camMetadata.update(ANDROID_QUIRKS_PARTIAL_RESULT, &partial_result_tag, 1);
+
+    uint8_t curr_entry = GET_FIRST_PARAM_ID(metadata);
+    uint8_t next_entry;
+    while (curr_entry != CAM_INTF_PARM_MAX) {
+      switch (curr_entry) {
+        case CAM_INTF_META_AEC_PRECAPTURE_ID: {
+            int32_t  *ae_precapture_id =
+              (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
+                                          ae_precapture_id, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID", __func__);
+          break;
+        }
+        case CAM_INTF_META_AEC_STATE:{
+            uint8_t *ae_state =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_STATE", __func__);
+            break;
+        }
+        case CAM_INTF_PARM_AEC_LOCK: {
+            uint8_t  *ae_lock =
+              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AEC_LOCK, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_LOCK,
+                                          ae_lock, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_LOCK", __func__);
+            break;
+        }
+        case CAM_INTF_PARM_FPS_RANGE: {
+            int32_t fps_range[2];
+            cam_fps_range_t * float_range =
+              (cam_fps_range_t *)POINTER_OF(CAM_INTF_PARM_FPS_RANGE, metadata);
+            fps_range[0] = (int32_t)float_range->min_fps;
+            fps_range[1] = (int32_t)float_range->max_fps;
+            camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+                                          fps_range, 2);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
+                __func__, fps_range[0], fps_range[1]);
+            break;
+        }
+        case CAM_INTF_PARM_EV: {
+            int32_t  *expCompensation =
+              (int32_t *)POINTER_OF(CAM_INTF_PARM_EV, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+                                          expCompensation, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION",
+                __func__);
+            break;
+        }
+        case CAM_INTF_PARM_FOCUS_MODE:{
+            uint8_t  *focusMode =
+                (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata);
+            uint8_t fwkAfMode = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
+               sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), *focusMode);
+            camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_MODE", __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_STATE: {
+            uint8_t  *afState =
+               (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata);
+            camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_STATE", __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_TRIGGER_ID: {
+            int32_t  *afTriggerId =
+                 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata);
+            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID", __func__);
+            break;
+        }
+        case CAM_INTF_PARM_WHITE_BALANCE: {
+           uint8_t  *whiteBalance =
+                (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata);
+             uint8_t fwkWhiteBalanceMode =
+                    (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                    sizeof(WHITE_BALANCE_MODES_MAP)/
+                    sizeof(WHITE_BALANCE_MODES_MAP[0]), *whiteBalance);
+             camMetadata.update(ANDROID_CONTROL_AWB_MODE,
+                 &fwkWhiteBalanceMode, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_MODE", __func__);
+             break;
+        }
+        case CAM_INTF_META_AWB_REGIONS: {
+           /*awb regions*/
+           cam_area_t  *hAwbRegions =
+               (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata);
+           int32_t awbRegions[5];
+           convertToRegions(hAwbRegions->rect, awbRegions,hAwbRegions->weight);
+           camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5);
+           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_REGIONS", __func__);
+           break;
+        }
+
+
+        case CAM_INTF_META_AWB_STATE: {
+           uint8_t  *whiteBalanceState =
+              (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata);
+           camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1);
+           ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_STATE", __func__);
+           break;
+        }
+
+
+        case CAM_INTF_PARM_AWB_LOCK: {
+            uint8_t  *awb_lock =
+              (uint8_t *)POINTER_OF(CAM_INTF_PARM_AWB_LOCK, metadata);
+            camMetadata.update(ANDROID_CONTROL_AWB_LOCK, awb_lock, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AWB_LOCK", __func__);
+            break;
+        }
+        case CAM_INTF_META_PRECAPTURE_TRIGGER: {
+            uint8_t *precaptureTrigger =
+                (uint8_t *)POINTER_OF(CAM_INTF_META_PRECAPTURE_TRIGGER, metadata);
+            camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
+                 precaptureTrigger, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER",
+                __func__);
+            break;
+        }
+        case CAM_INTF_META_AF_TRIGGER_NOTICE: {
+            uint8_t *af_trigger =
+              (uint8_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_NOTICE, metadata);
+            camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
+                af_trigger, 1);
+            ALOGV("%s: urgent Metadata : ANDROID_CONTROL_AF_TRIGGER = %d",
+                __func__, *af_trigger);
+            break;
+        }
+        case CAM_INTF_META_AEC_MODE:{
+            aeMode = (uint8_t*)
+            POINTER_OF(CAM_INTF_META_AEC_MODE, metadata);
+            break;
+        }
+        case CAM_INTF_PARM_LED_MODE:{
+            flashMode = (int32_t*)
+            POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata);
+            break;
+        }
+        case CAM_INTF_PARM_REDEYE_REDUCTION:{
+            redeye = (int32_t*)
+            POINTER_OF(CAM_INTF_PARM_REDEYE_REDUCTION, metadata);
+            break;
+        }
+        default:
+            ALOGV("%s: Normal Metadata %d, do not process",
+              __func__, curr_entry);
+            break;
+       }
+       next_entry = GET_NEXT_PARAM_ID(curr_entry, metadata);
+       curr_entry = next_entry;
+    }
+
+    uint8_t fwk_aeMode;
+    if (redeye != NULL && *redeye == 1) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else if (flashMode != NULL &&
+            ((*flashMode == CAM_FLASH_MODE_AUTO)||
+             (*flashMode == CAM_FLASH_MODE_ON))) {
+        fwk_aeMode = (uint8_t)lookupFwkName(AE_FLASH_MODE_MAP,
+                sizeof(AE_FLASH_MODE_MAP)/sizeof(AE_FLASH_MODE_MAP[0]),*flashMode);
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_ON) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else if (aeMode != NULL && *aeMode == CAM_AE_MODE_OFF) {
+        fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
+        camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
+    } else {
+        ALOGE("%s: Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%p, flashMode:%p, aeMode:%p!!!",__func__, redeye, flashMode, aeMode);
+    }
+
+    resultMetadata = camMetadata.release();
+    return resultMetadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : dumpMetadataToFile
+ *
+ * DESCRIPTION: Dumps tuning metadata to file system
+ *
+ * PARAMETERS :
+ *   @meta           : tuning metadata
+ *   @dumpFrameCount : current dump frame count
+ *   @enabled        : Enable mask
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
+                                                   uint32_t &dumpFrameCount,
+                                                   int32_t enabled,
+                                                   const char *type,
+                                                   uint32_t frameNumber)
+{
+    uint32_t frm_num = 0;
+
+    //Some sanity checks
+    if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
+        ALOGE("%s : Tuning sensor data size bigger than expected %d: %d",
+              __func__,
+              meta.tuning_sensor_data_size,
+              TUNING_SENSOR_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
+        ALOGE("%s : Tuning VFE data size bigger than expected %d: %d",
+              __func__,
+              meta.tuning_vfe_data_size,
+              TUNING_VFE_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
+        ALOGE("%s : Tuning CPP data size bigger than expected %d: %d",
+              __func__,
+              meta.tuning_cpp_data_size,
+              TUNING_CPP_DATA_MAX);
+        return;
+    }
+
+    if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
+        ALOGE("%s : Tuning CAC data size bigger than expected %d: %d",
+              __func__,
+              meta.tuning_cac_data_size,
+              TUNING_CAC_DATA_MAX);
+        return;
+    }
+    //
+
+    if(enabled){
+        frm_num = ((enabled & 0xffff0000) >> 16);
+        if(frm_num == 0) {
+            frm_num = 10; //default 10 frames
+        }
+        if(frm_num > 256) {
+            frm_num = 256; //256 buffers cycle around
+        }
+        if((frm_num == 256) && (dumpFrameCount >= frm_num)) {
+            // reset frame count if cycling
+            dumpFrameCount = 0;
+        }
+        ALOGV("DumpFrmCnt = %d, frm_num = %d",dumpFrameCount, frm_num);
+        if (dumpFrameCount < frm_num) {
+            char timeBuf[FILENAME_MAX];
+            char buf[FILENAME_MAX];
+            memset(buf, 0, sizeof(buf));
+            memset(timeBuf, 0, sizeof(timeBuf));
+            time_t current_time;
+            struct tm * timeinfo;
+            time (&current_time);
+            timeinfo = localtime (&current_time);
+            strftime (timeBuf, sizeof(timeBuf),"/data/%Y%m%d%H%M%S", timeinfo);
+            String8 filePath(timeBuf);
+            snprintf(buf,
+                     sizeof(buf),
+                     "%d_HAL_META_%s_%d.bin",
+                     dumpFrameCount,
+                     type,
+                     frameNumber);
+            filePath.append(buf);
+            int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
+            if (file_fd > 0) {
+                int written_len = 0;
+                meta.tuning_data_version = TUNING_DATA_VERSION;
+                void *data = (void *)((uint8_t *)&meta.tuning_data_version);
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
+                ALOGV("tuning_sensor_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
+                ALOGV("tuning_vfe_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
+                ALOGV("tuning_cpp_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
+                ALOGV("tuning_cac_data_size %d",(int)(*(int *)data));
+                written_len += write(file_fd, data, sizeof(uint32_t));
+                int total_size = meta.tuning_sensor_data_size;
+                data = (void *)((uint8_t *)&meta.data);
+                written_len += write(file_fd, data, total_size);
+                total_size = meta.tuning_vfe_data_size;
+                data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = meta.tuning_cpp_data_size;
+                data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                total_size = meta.tuning_cac_data_size;
+                data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
+                written_len += write(file_fd, data, total_size);
+                close(file_fd);
+            }else {
+                ALOGE("%s: fail t open file for image dumping", __func__);
+            }
+            dumpFrameCount++;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanAndSortStreamInfo
+ *
+ * DESCRIPTION: helper method to clean up invalid streams in stream_info,
+ *              and sort them such that raw stream is at the end of the list
+ *              This is a workaround for camera daemon constraint.
+ *
+ * PARAMETERS : None
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::cleanAndSortStreamInfo()
+{
+    List<stream_info_t *> newStreamInfo;
+
+    /*clean up invalid streams*/
+    for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        if(((*it)->status) == INVALID){
+            QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
+            delete channel;
+            free(*it);
+            it = mStreamInfo.erase(it);
+        } else {
+            it++;
+        }
+    }
+
+    // Move preview/video/callback/snapshot streams into newList
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
+                (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
+            newStreamInfo.push_back(*it);
+            it = mStreamInfo.erase(it);
+        } else
+            it++;
+    }
+    // Move raw streams into newList
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+            it != mStreamInfo.end();) {
+        newStreamInfo.push_back(*it);
+        it = mStreamInfo.erase(it);
+    }
+
+    mStreamInfo = newStreamInfo;
+}
+
+/*===========================================================================
+ * FUNCTION   : extractJpegMetadata
+ *
+ * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
+ *              JPEG metadata is cached in HAL, and return as part of capture
+ *              result when metadata is returned from camera daemon.
+ *
+ * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
+ *              @request:      capture request
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::extractJpegMetadata(
+        CameraMetadata& jpegMetadata,
+        const camera3_capture_request_t *request)
+{
+    CameraMetadata frame_settings;
+    frame_settings = request->settings;
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
+        jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
+                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
+                frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
+        jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
+        jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
+                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
+                frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
+        jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
+                frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
+                frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_QUALITY))
+        jpegMetadata.update(ANDROID_JPEG_QUALITY,
+                frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
+                frame_settings.find(ANDROID_JPEG_QUALITY).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
+        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE))
+        jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32,
+                frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
+}
+
+/*===========================================================================
+ * FUNCTION   : convertToRegions
+ *
+ * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
+ *
+ * PARAMETERS :
+ *   @rect   : cam_rect_t struct to convert
+ *   @region : int32_t destination array
+ *   @weight : if we are converting from cam_area_t, weight is valid
+ *             else weight = -1
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){
+    region[0] = rect.left;
+    region[1] = rect.top;
+    region[2] = rect.left + rect.width;
+    region[3] = rect.top + rect.height;
+    if (weight > -1) {
+        region[4] = weight;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : convertFromRegions
+ *
+ * DESCRIPTION: helper method to convert from array to cam_rect_t
+ *
+ * PARAMETERS :
+ *   @rect   : cam_rect_t struct to convert
+ *   @region : int32_t destination array
+ *   @weight : if we are converting from cam_area_t, weight is valid
+ *             else weight = -1
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi,
+                                                   const camera_metadata_t *settings,
+                                                   uint32_t tag){
+    CameraMetadata frame_settings;
+    frame_settings = settings;
+    int32_t x_min = frame_settings.find(tag).data.i32[0];
+    int32_t y_min = frame_settings.find(tag).data.i32[1];
+    int32_t x_max = frame_settings.find(tag).data.i32[2];
+    int32_t y_max = frame_settings.find(tag).data.i32[3];
+    roi->weight = frame_settings.find(tag).data.i32[4];
+    roi->rect.left = x_min;
+    roi->rect.top = y_min;
+    roi->rect.width = x_max - x_min;
+    roi->rect.height = y_max - y_min;
+}
+
+/*===========================================================================
+ * FUNCTION   : resetIfNeededROI
+ *
+ * DESCRIPTION: helper method to reset the roi if it is greater than scaler
+ *              crop region
+ *
+ * PARAMETERS :
+ *   @roi       : cam_area_t struct to resize
+ *   @scalerCropRegion : cam_crop_region_t region to compare against
+ *
+ *
+ *==========================================================================*/
+bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
+                                                 const cam_crop_region_t* scalerCropRegion)
+{
+    int32_t roi_x_max = roi->rect.width + roi->rect.left;
+    int32_t roi_y_max = roi->rect.height + roi->rect.top;
+    int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
+    int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
+    if ((roi_x_max < scalerCropRegion->left) ||
+        (roi_y_max < scalerCropRegion->top)  ||
+        (roi->rect.left > crop_x_max) ||
+        (roi->rect.top > crop_y_max)){
+        return false;
+    }
+    if (roi->rect.left < scalerCropRegion->left) {
+        roi->rect.left = scalerCropRegion->left;
+    }
+    if (roi->rect.top < scalerCropRegion->top) {
+        roi->rect.top = scalerCropRegion->top;
+    }
+    if (roi_x_max > crop_x_max) {
+        roi_x_max = crop_x_max;
+    }
+    if (roi_y_max > crop_y_max) {
+        roi_y_max = crop_y_max;
+    }
+    roi->rect.width = roi_x_max - roi->rect.left;
+    roi->rect.height = roi_y_max - roi->rect.top;
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : convertLandmarks
+ *
+ * DESCRIPTION: helper method to extract the landmarks from face detection info
+ *
+ * PARAMETERS :
+ *   @face   : cam_rect_t struct to convert
+ *   @landmarks : int32_t destination array
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks)
+{
+    landmarks[0] = face.left_eye_center.x;
+    landmarks[1] = face.left_eye_center.y;
+    landmarks[2] = face.right_eye_center.x;
+    landmarks[3] = face.right_eye_center.y;
+    landmarks[4] = face.mouth_center.x;
+    landmarks[5] = face.mouth_center.y;
+}
+
+#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
+/*===========================================================================
+ * FUNCTION   : initCapabilities
+ *
+ * DESCRIPTION: initialize camera capabilities in static data struct
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initCapabilities(int cameraId)
+{
+    int rc = 0;
+    mm_camera_vtbl_t *cameraHandle = NULL;
+    QCamera3HeapMemory *capabilityHeap = NULL;
+
+    cameraHandle = camera_open(cameraId);
+    if (!cameraHandle) {
+        ALOGE("%s: camera_open failed", __func__);
+        rc = -1;
+        goto open_failed;
+    }
+
+    capabilityHeap = new QCamera3HeapMemory();
+    if (capabilityHeap == NULL) {
+        ALOGE("%s: creation of capabilityHeap failed", __func__);
+        goto heap_creation_failed;
+    }
+    /* Allocate memory for capability buffer */
+    rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false);
+    if(rc != OK) {
+        ALOGE("%s: No memory for cappability", __func__);
+        goto allocate_failed;
+    }
+
+    /* Map memory for capability buffer */
+    memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
+    rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
+                                CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                capabilityHeap->getFd(0),
+                                sizeof(cam_capability_t));
+    if(rc < 0) {
+        ALOGE("%s: failed to map capability buffer", __func__);
+        goto map_failed;
+    }
+
+    /* Query Capability */
+    rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
+    if(rc < 0) {
+        ALOGE("%s: failed to query capability",__func__);
+        goto query_failed;
+    }
+    gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
+    if (!gCamCapability[cameraId]) {
+        ALOGE("%s: out of memory", __func__);
+        goto query_failed;
+    }
+    memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
+                                        sizeof(cam_capability_t));
+    rc = 0;
+
+query_failed:
+    cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
+                            CAM_MAPPING_BUF_TYPE_CAPABILITY);
+map_failed:
+    capabilityHeap->deallocate();
+allocate_failed:
+    delete capabilityHeap;
+heap_creation_failed:
+    cameraHandle->ops->close_camera(cameraHandle->camera_handle);
+    cameraHandle = NULL;
+open_failed:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : initParameters
+ *
+ * DESCRIPTION: initialize camera parameters
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initParameters()
+{
+    int rc = 0;
+
+    //Allocate Set Param Buffer
+    mParamHeap = new QCamera3HeapMemory();
+    rc = mParamHeap->allocate(1, sizeof(metadata_buffer_t), false);
+    if(rc != OK) {
+        rc = NO_MEMORY;
+        ALOGE("Failed to allocate SETPARM Heap memory");
+        delete mParamHeap;
+        mParamHeap = NULL;
+        return rc;
+    }
+
+    //Map memory for parameters buffer
+    rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
+            CAM_MAPPING_BUF_TYPE_PARM_BUF,
+            mParamHeap->getFd(0),
+            sizeof(metadata_buffer_t));
+    if(rc < 0) {
+        ALOGE("%s:failed to map SETPARM buffer",__func__);
+        rc = FAILED_TRANSACTION;
+        mParamHeap->deallocate();
+        delete mParamHeap;
+        mParamHeap = NULL;
+        return rc;
+    }
+
+    mParameters = (metadata_buffer_t*) DATA_PTR(mParamHeap,0);
+
+    mPrevParameters = (metadata_buffer_t*)malloc(sizeof(metadata_buffer_t));
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinitParameters
+ *
+ * DESCRIPTION: de-initialize camera parameters
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::deinitParameters()
+{
+    mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
+            CAM_MAPPING_BUF_TYPE_PARM_BUF);
+
+    mParamHeap->deallocate();
+    delete mParamHeap;
+    mParamHeap = NULL;
+
+    mParameters = NULL;
+
+    free(mPrevParameters);
+    mPrevParameters = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : calcMaxJpegSize
+ *
+ * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : max_jpeg_size
+ *==========================================================================*/
+int QCamera3HardwareInterface::calcMaxJpegSize()
+{
+    int32_t max_jpeg_size = 0;
+    int temp_width, temp_height;
+    for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
+        temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
+        temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
+        if (temp_width * temp_height > max_jpeg_size ) {
+            max_jpeg_size = temp_width * temp_height;
+        }
+    }
+    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
+    return max_jpeg_size;
+}
+
+/*===========================================================================
+ * FUNCTION   : initStaticMetadata
+ *
+ * DESCRIPTION: initialize the static metadata
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              non-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::initStaticMetadata(int cameraId)
+{
+    int rc = 0;
+    CameraMetadata staticInfo;
+
+    /* android.info: hardware level */
+    uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
+    staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
+        &supportedHardwareLevel, 1);
+
+    int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK;
+    /*HAL 3 only*/
+    staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+                    &gCamCapability[cameraId]->min_focus_distance, 1);
+
+    staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
+                    &gCamCapability[cameraId]->hyper_focal_distance, 1);
+
+    /*should be using focal lengths but sensor doesn't provide that info now*/
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+                      &gCamCapability[cameraId]->focal_length,
+                      1);
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+                      gCamCapability[cameraId]->apertures,
+                      gCamCapability[cameraId]->apertures_count);
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+                gCamCapability[cameraId]->filter_densities,
+                gCamCapability[cameraId]->filter_densities_count);
+
+
+    staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+                      (uint8_t*)gCamCapability[cameraId]->optical_stab_modes,
+                      gCamCapability[cameraId]->optical_stab_modes_count);
+
+    staticInfo.update(ANDROID_LENS_POSITION,
+                      gCamCapability[cameraId]->lens_position,
+                      sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float));
+
+    int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width,
+                                       gCamCapability[cameraId]->lens_shading_map_size.height};
+    staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
+                      lens_shading_map_size,
+                      sizeof(lens_shading_map_size)/sizeof(int32_t));
+
+    staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
+            gCamCapability[cameraId]->sensor_physical_size, 2);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
+            gCamCapability[cameraId]->exposure_time_range, 2);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+            &gCamCapability[cameraId]->max_frame_duration, 1);
+
+    camera_metadata_rational baseGainFactor = {
+            gCamCapability[cameraId]->base_gain_factor.numerator,
+            gCamCapability[cameraId]->base_gain_factor.denominator};
+    staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
+                      &baseGainFactor, 1);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+                     (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1);
+
+    int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
+                                  gCamCapability[cameraId]->pixel_array_size.height};
+    staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+                      pixel_array_size, 2);
+
+    int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
+                                                gCamCapability[cameraId]->active_array_size.top,
+                                                gCamCapability[cameraId]->active_array_size.width,
+                                                gCamCapability[cameraId]->active_array_size.height};
+    staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+                      active_array_size, 4);
+
+    staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
+            &gCamCapability[cameraId]->white_level, 1);
+
+    staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
+            gCamCapability[cameraId]->black_level_pattern, 4);
+
+    staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
+                      &gCamCapability[cameraId]->flash_charge_duration, 1);
+
+    staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
+                      &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
+
+    int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
+                      (int32_t*)&maxFaces, 1);
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
+                      &gCamCapability[cameraId]->histogram_size, 1);
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
+            &gCamCapability[cameraId]->max_histogram_count, 1);
+
+    int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width,
+                                    gCamCapability[cameraId]->sharpness_map_size.height};
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
+            sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
+
+    staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
+            &gCamCapability[cameraId]->max_sharpness_map_value, 1);
+
+    int32_t scalar_formats[] = {
+            ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
+            ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
+            ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
+            ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
+            HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
+    int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
+                      scalar_formats,
+                      scalar_formats_count);
+
+    int32_t available_processed_sizes[CAM_FORMAT_MAX * 2];
+    makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
+              gCamCapability[cameraId]->picture_sizes_tbl_cnt,
+              available_processed_sizes);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
+                available_processed_sizes,
+                (gCamCapability[cameraId]->picture_sizes_tbl_cnt) * 2);
+
+    int32_t available_raw_sizes[CAM_FORMAT_MAX * 2];
+    makeTable(gCamCapability[cameraId]->raw_dim,
+              gCamCapability[cameraId]->supported_raw_dim_cnt,
+              available_raw_sizes);
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
+                available_raw_sizes,
+                gCamCapability[cameraId]->supported_raw_dim_cnt * 2);
+
+    int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
+    makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
+                 gCamCapability[cameraId]->fps_ranges_tbl_cnt,
+                 available_fps_ranges);
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+            available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) );
+
+    camera_metadata_rational exposureCompensationStep = {
+            gCamCapability[cameraId]->exp_compensation_step.numerator,
+            gCamCapability[cameraId]->exp_compensation_step.denominator};
+    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
+                      &exposureCompensationStep, 1);
+
+    /*TO DO*/
+    uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF};
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+                      availableVstabModes, sizeof(availableVstabModes));
+
+    /** Quirk for urgent 3A state until final interface is worked out */
+    uint8_t usePartialResultQuirk = 1;
+    staticInfo.update(ANDROID_QUIRKS_USE_PARTIAL_RESULT,
+                      &usePartialResultQuirk, 1);
+
+    /*HAL 1 and HAL 3 common*/
+    float maxZoom = 4;
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+            &maxZoom, 1);
+
+    uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_FREEFORM;
+    staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
+
+    int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
+    if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
+        max3aRegions[2] = 0; /* AF not supported */
+    staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
+            max3aRegions, 3);
+
+    uint8_t availableFaceDetectModes[] = {
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF,
+            ANDROID_STATISTICS_FACE_DETECT_MODE_FULL };
+    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+                      availableFaceDetectModes,
+                      sizeof(availableFaceDetectModes));
+
+    int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min,
+                                           gCamCapability[cameraId]->exposure_compensation_max};
+    staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
+            exposureCompensationRange,
+            sizeof(exposureCompensationRange)/sizeof(int32_t));
+
+    uint8_t lensFacing = (facingBack) ?
+            ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
+    staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
+
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
+                available_processed_sizes,
+                (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2));
+
+    staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+                      available_thumbnail_sizes,
+                      sizeof(available_thumbnail_sizes)/sizeof(int32_t));
+
+    /*android.scaler.availableStreamConfigurations*/
+    int32_t max_stream_configs_size =
+            gCamCapability[cameraId]->picture_sizes_tbl_cnt *
+            sizeof(scalar_formats)/sizeof(int32_t) * 4;
+    int32_t available_stream_configs[max_stream_configs_size];
+    int idx = 0;
+    for (int j = 0; j < scalar_formats_count; j++) {
+        switch (scalar_formats[j]) {
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+            for (int i = 0;
+                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
+                available_stream_configs[idx] = scalar_formats[j];
+                available_stream_configs[idx+1] =
+                    gCamCapability[cameraId]->raw_dim[i].width;
+                available_stream_configs[idx+2] =
+                    gCamCapability[cameraId]->raw_dim[i].height;
+                available_stream_configs[idx+3] =
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
+                idx+=4;
+            }
+            break;
+        default:
+            for (int i = 0;
+                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
+                available_stream_configs[idx] = scalar_formats[j];
+                available_stream_configs[idx+1] =
+                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
+                available_stream_configs[idx+2] =
+                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
+                available_stream_configs[idx+3] =
+                    ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT;
+                idx+=4;
+            }
+
+
+            break;
+        }
+    }
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+                      available_stream_configs, idx);
+
+    /* android.scaler.availableMinFrameDurations */
+    int64_t available_min_durations[max_stream_configs_size];
+    idx = 0;
+    for (int j = 0; j < scalar_formats_count; j++) {
+        switch (scalar_formats[j]) {
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
+        case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
+            for (int i = 0;
+                i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
+                available_min_durations[idx] = scalar_formats[j];
+                available_min_durations[idx+1] =
+                    gCamCapability[cameraId]->raw_dim[i].width;
+                available_min_durations[idx+2] =
+                    gCamCapability[cameraId]->raw_dim[i].height;
+                available_min_durations[idx+3] =
+                    gCamCapability[cameraId]->raw_min_duration[i];
+                idx+=4;
+            }
+            break;
+        default:
+            for (int i = 0;
+                i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
+                available_min_durations[idx] = scalar_formats[j];
+                available_min_durations[idx+1] =
+                    gCamCapability[cameraId]->picture_sizes_tbl[i].width;
+                available_min_durations[idx+2] =
+                    gCamCapability[cameraId]->picture_sizes_tbl[i].height;
+                available_min_durations[idx+3] =
+                    gCamCapability[cameraId]->picture_min_duration[i];
+                idx+=4;
+            }
+            break;
+        }
+    }
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
+                      &available_min_durations[0], idx);
+
+    int32_t max_jpeg_size = 0;
+    int temp_width, temp_height;
+    for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
+        temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
+        temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
+        if (temp_width * temp_height > max_jpeg_size ) {
+            max_jpeg_size = temp_width * temp_height;
+        }
+    }
+    max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
+    staticInfo.update(ANDROID_JPEG_MAX_SIZE,
+                      &max_jpeg_size, 1);
+
+    uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
+    size_t size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) {
+        int32_t val = lookupFwkName(EFFECT_MODES_MAP,
+                                   sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]),
+                                   gCamCapability[cameraId]->supported_effects[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_effects[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
+                      avail_effects,
+                      size);
+
+    uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
+    uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
+    int32_t supported_scene_modes_cnt = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) {
+        int32_t val = lookupFwkName(SCENE_MODES_MAP,
+                                sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
+                                gCamCapability[cameraId]->supported_scene_modes[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
+            supported_indexes[supported_scene_modes_cnt] = i;
+            supported_scene_modes_cnt++;
+        }
+    }
+
+    staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+                      avail_scene_modes,
+                      supported_scene_modes_cnt);
+
+    uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
+    makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
+                      supported_scene_modes_cnt,
+                      scene_mode_overrides,
+                      supported_indexes,
+                      cameraId);
+    staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
+                      scene_mode_overrides,
+                      supported_scene_modes_cnt*3);
+
+    uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
+    size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) {
+        int32_t val = lookupFwkName(ANTIBANDING_MODES_MAP,
+                                 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
+                                 gCamCapability[cameraId]->supported_antibandings[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_antibanding_modes[size] = (uint8_t)val;
+            size++;
+        }
+
+    }
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+                      avail_antibanding_modes,
+                      size);
+
+    uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
+    size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) {
+        int32_t val = lookupFwkName(FOCUS_MODES_MAP,
+                                sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
+                                gCamCapability[cameraId]->supported_focus_modes[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_af_modes[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
+                      avail_af_modes,
+                      size);
+
+    uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
+    size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) {
+        int32_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                                    sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
+                                    gCamCapability[cameraId]->supported_white_balances[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_awb_modes[size] = (uint8_t)val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
+                      avail_awb_modes,
+                      size);
+
+    uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
+    for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++)
+      available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i];
+
+    staticInfo.update(ANDROID_FLASH_FIRING_POWER,
+            available_flash_levels,
+            gCamCapability[cameraId]->supported_flash_firing_level_cnt);
+
+    uint8_t flashAvailable;
+    if (gCamCapability[cameraId]->flash_available)
+        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
+    else
+        flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
+    staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
+            &flashAvailable, 1);
+
+    uint8_t avail_ae_modes[5];
+    size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) {
+        avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i];
+        size++;
+    }
+    if (flashAvailable) {
+        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH;
+        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+        avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+    }
+    staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
+                      avail_ae_modes,
+                      size);
+
+    int32_t sensitivity_range[2];
+    sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
+    sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
+    staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
+                      sensitivity_range,
+                      sizeof(sensitivity_range) / sizeof(int32_t));
+
+    staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+                      &gCamCapability[cameraId]->max_analog_sensitivity,
+                      1);
+
+    int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
+    staticInfo.update(ANDROID_SENSOR_ORIENTATION,
+                      &sensor_orientation,
+                      1);
+
+    int32_t max_output_streams[3] = {1, 3, 1};
+    staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
+                      max_output_streams,
+                      3);
+
+    uint8_t avail_leds = 0;
+    staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
+                      &avail_leds, 0);
+
+    uint8_t focus_dist_calibrated;
+    int32_t val = lookupFwkName(FOCUS_CALIBRATION_MAP,
+            sizeof(FOCUS_CALIBRATION_MAP)/sizeof(FOCUS_CALIBRATION_MAP[0]),
+            gCamCapability[cameraId]->focus_dist_calibrated);
+    if (val != NAME_NOT_FOUND) {
+        focus_dist_calibrated = (uint8_t)val;
+        staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
+                     &focus_dist_calibrated, 1);
+    }
+
+    int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
+    size = 0;
+    for (int i = 0; i < gCamCapability[cameraId]->supported_test_pattern_modes_cnt;
+            i++) {
+        int32_t val = lookupFwkName(TEST_PATTERN_MAP,
+                                    sizeof(TEST_PATTERN_MAP)/sizeof(TEST_PATTERN_MAP[0]),
+                                    gCamCapability[cameraId]->supported_test_pattern_modes[i]);
+        if (val != NAME_NOT_FOUND) {
+            avail_testpattern_modes[size] = val;
+            size++;
+        }
+    }
+    staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+                      avail_testpattern_modes,
+                      size);
+
+    uint8_t max_pipeline_depth = kMaxInFlight + EMPTY_PIPELINE_DELAY;
+    staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
+                      &max_pipeline_depth,
+                      1);
+
+    int32_t partial_result_count = 2;
+    staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+                      &partial_result_count,
+                       1);
+
+    uint8_t available_capabilities[MAX_AVAILABLE_CAPABILITIES];
+    uint8_t available_capabilities_count = 0;
+    available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE;
+    available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR;
+    available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING;
+
+    if (facingBack) {
+        available_capabilities[available_capabilities_count++] = ANDROID_REQUEST_AVAILABLE_CAPABILITIES_DNG;
+    }
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+                      available_capabilities,
+                      available_capabilities_count);
+
+    int32_t max_input_streams = 0;
+    staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+                      &max_input_streams,
+                      1);
+
+    int32_t io_format_map[] = {};
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
+                      io_format_map, 0);
+
+    int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
+    staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
+                      &max_latency,
+                      1);
+
+    float optical_axis_angle[2];
+    optical_axis_angle[0] = 0; //need to verify
+    optical_axis_angle[1] = 0; //need to verify
+    staticInfo.update(ANDROID_LENS_OPTICAL_AXIS_ANGLE,
+                      optical_axis_angle,
+                      2);
+
+    uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST};
+    staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
+                      available_hot_pixel_modes,
+                      1);
+
+    uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
+                                      ANDROID_EDGE_MODE_FAST};
+    staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+                      available_edge_modes,
+                      2);
+
+    uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
+                                           ANDROID_NOISE_REDUCTION_MODE_FAST};
+    staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+                      available_noise_red_modes,
+                      2);
+
+    uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
+                                         ANDROID_TONEMAP_MODE_FAST};
+    staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
+                      available_tonemap_modes,
+                      2);
+
+    uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
+    staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+                      available_hot_pixel_map_modes,
+                      1);
+
+    uint8_t fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
+        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
+        gCamCapability[cameraId]->reference_illuminant1);
+    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
+                      &fwkReferenceIlluminant, 1);
+
+    fwkReferenceIlluminant = lookupFwkName(REFERENCE_ILLUMINANT_MAP,
+        sizeof(REFERENCE_ILLUMINANT_MAP) / sizeof(REFERENCE_ILLUMINANT_MAP[0]),
+        gCamCapability[cameraId]->reference_illuminant2);
+    staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
+                      &fwkReferenceIlluminant, 1);
+
+    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1,
+                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix1,
+                      3*3);
+
+    staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2,
+                      (camera_metadata_rational_t*)gCamCapability[cameraId]->forward_matrix2,
+                      3*3);
+
+    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1,
+                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform1,
+                      3*3);
+
+    staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2,
+                   (camera_metadata_rational_t*) gCamCapability[cameraId]->color_transform2,
+                      3*3);
+
+    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
+                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform1,
+                      3*3);
+
+    staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
+                   (camera_metadata_rational_t*) gCamCapability[cameraId]->calibration_transform2,
+                      3*3);
+
+
+    int32_t available_request_keys[] = {ANDROID_COLOR_CORRECTION_MODE,
+       ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
+       ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+       ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
+       ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
+       ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
+       ANDROID_CONTROL_AF_REGIONS, ANDROID_CONTROL_AF_TRIGGER,
+       ANDROID_CONTROL_AWB_LOCK, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
+       ANDROID_CONTROL_CAPTURE_INTENT, ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
+       ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+       ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE, ANDROID_EDGE_STRENGTH,
+       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
+       ANDROID_JPEG_GPS_COORDINATES,
+       ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
+       ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
+       ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
+       ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
+       ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
+       ANDROID_NOISE_REDUCTION_STRENGTH, ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
+       ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
+       ANDROID_SENSOR_FRAME_DURATION,
+       ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
+       ANDROID_SHADING_STRENGTH, ANDROID_STATISTICS_FACE_DETECT_MODE,
+       ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+       ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
+       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
+       ANDROID_BLACK_LEVEL_LOCK };
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
+                      available_request_keys,
+                      sizeof(available_request_keys)/sizeof(int32_t));
+
+    int32_t available_result_keys[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
+       ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
+       ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE, ANDROID_CONTROL_AF_REGIONS,
+       ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_AWB_REGIONS,
+       ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
+       ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
+       ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
+       ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
+       ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
+       ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
+       ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
+       ANDROID_NOISE_REDUCTION_MODE, ANDROID_QUIRKS_PARTIAL_RESULT, ANDROID_REQUEST_ID,
+       ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
+       ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
+       ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
+       ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
+       ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
+       ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
+       ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+       ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
+       ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_IDS,
+       ANDROID_STATISTICS_FACE_LANDMARKS, ANDROID_STATISTICS_FACE_RECTANGLES,
+       ANDROID_STATISTICS_FACE_SCORES};
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+                      available_result_keys,
+                      sizeof(available_result_keys)/sizeof(int32_t));
+
+    int32_t available_characteristics_keys[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
+       ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
+       ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
+       ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
+       ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
+       ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
+       ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
+       ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
+       ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+       ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
+       ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
+       ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
+       ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
+       ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
+       ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
+       ANDROID_LENS_FACING, ANDROID_LENS_OPTICAL_AXIS_ANGLE,ANDROID_LENS_POSITION,
+       ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
+       ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
+       ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
+       ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
+       ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
+       ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
+       ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+       /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
+       ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
+       ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
+       ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
+       ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
+       ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+       ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
+       ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
+       ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
+       ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
+       ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
+       ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
+       ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
+       ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
+       ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
+       ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
+       ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
+       ANDROID_EDGE_AVAILABLE_EDGE_MODES,
+       ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+       ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
+       ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
+       ANDROID_TONEMAP_MAX_CURVE_POINTS, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL };
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+                      available_characteristics_keys,
+                      sizeof(available_characteristics_keys)/sizeof(int32_t));
+
+    /*available stall durations depend on the hw + sw and will be different for different devices */
+    /*have to add for raw after implementation*/
+    int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
+    size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
+
+    size_t available_stall_size = gCamCapability[cameraId]->picture_sizes_tbl_cnt * 4;
+    int64_t available_stall_durations[available_stall_size];
+    idx = 0;
+    for (uint32_t j = 0; j < stall_formats_count; j++) {
+       if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
+          for (uint32_t i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) {
+             available_stall_durations[idx]   = stall_formats[j];
+             available_stall_durations[idx+1] = gCamCapability[cameraId]->picture_sizes_tbl[i].width;
+             available_stall_durations[idx+2] = gCamCapability[cameraId]->picture_sizes_tbl[i].height;
+             available_stall_durations[idx+3] = gCamCapability[cameraId]->jpeg_stall_durations[i];
+             idx+=4;
+          }
+       } else {
+          for (uint32_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
+             available_stall_durations[idx]   = stall_formats[j];
+             available_stall_durations[idx+1] = gCamCapability[cameraId]->raw_dim[i].width;
+             available_stall_durations[idx+2] = gCamCapability[cameraId]->raw_dim[i].height;
+             available_stall_durations[idx+3] = gCamCapability[cameraId]->raw16_stall_durations[i];
+             idx+=4;
+          }
+       }
+    }
+    staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
+                      available_stall_durations,
+                      idx);
+    //QCAMERA3_OPAQUE_RAW
+    uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
+    cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
+    switch (gCamCapability[cameraId]->opaque_raw_fmt) {
+    case LEGACY_RAW:
+        if (gCamCapability[cameraId]->white_level == (1<<8)-1)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
+            fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
+        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
+        break;
+    case MIPI_RAW:
+        if (gCamCapability[cameraId]->white_level == (1<<8)-1)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == (1<<10)-1)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
+        else if (gCamCapability[cameraId]->white_level == (1<<12)-1)
+            fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
+        raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
+        break;
+    default:
+        ALOGE("%s: unknown opaque_raw_format %d", __func__,
+                gCamCapability[cameraId]->opaque_raw_fmt);
+        break;
+    }
+    staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
+
+    int32_t strides[3*gCamCapability[cameraId]->supported_raw_dim_cnt];
+    for (size_t i = 0; i < gCamCapability[cameraId]->supported_raw_dim_cnt; i++) {
+        cam_stream_buf_plane_info_t buf_planes;
+        strides[i*3] = gCamCapability[cameraId]->raw_dim[i].width;
+        strides[i*3+1] = gCamCapability[cameraId]->raw_dim[i].height;
+        mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
+            &gCamCapability[cameraId]->padding_info, &buf_planes);
+        strides[i*3+2] = buf_planes.plane_info.mp[0].stride;
+    }
+    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides,
+            3*gCamCapability[cameraId]->supported_raw_dim_cnt);
+
+    gStaticMetadata[cameraId] = staticInfo.release();
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : makeTable
+ *
+ * DESCRIPTION: make a table of sizes
+ *
+ * PARAMETERS :
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size,
+                                          int32_t* sizeTable)
+{
+    int j = 0;
+    for (int i = 0; i < size; i++) {
+        sizeTable[j] = dimTable[i].width;
+        sizeTable[j+1] = dimTable[i].height;
+        j+=2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : makeFPSTable
+ *
+ * DESCRIPTION: make a table of fps ranges
+ *
+ * PARAMETERS :
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
+                                          int32_t* fpsRangesTable)
+{
+    int j = 0;
+    for (int i = 0; i < size; i++) {
+        fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
+        fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
+        j+=2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : makeOverridesList
+ *
+ * DESCRIPTION: make a list of scene mode overrides
+ *
+ * PARAMETERS :
+ *
+ *
+ *==========================================================================*/
+void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable,
+                                                  uint8_t size, uint8_t* overridesList,
+                                                  uint8_t* supported_indexes,
+                                                  int camera_id)
+{
+    /*daemon will give a list of overrides for all scene modes.
+      However we should send the fwk only the overrides for the scene modes
+      supported by the framework*/
+    int j = 0, index = 0, supt = 0;
+    uint8_t focus_override;
+    for (int i = 0; i < size; i++) {
+        supt = 0;
+        index = supported_indexes[i];
+        overridesList[j] = gCamCapability[camera_id]->flash_available ? ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH:ANDROID_CONTROL_AE_MODE_ON;
+        overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP,
+                                 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]),
+                                                    overridesTable[index].awb_mode);
+        focus_override = (uint8_t)overridesTable[index].af_mode;
+        for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) {
+           if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
+              supt = 1;
+              break;
+           }
+        }
+        if (supt) {
+           overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP,
+                                              sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]),
+                                              focus_override);
+        } else {
+           overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
+        }
+        j+=3;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getPreviewHalPixelFormat
+ *
+ * DESCRIPTION: convert the format to type recognized by framework
+ *
+ * PARAMETERS : format : the format from backend
+ *
+ ** RETURN    : format recognized by framework
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format)
+{
+    int32_t halPixelFormat;
+
+    switch (format) {
+    case CAM_FORMAT_YUV_420_NV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        halPixelFormat = HAL_PIXEL_FORMAT_YV12;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+    default:
+        halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+        break;
+    }
+    return halPixelFormat;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSensorSensitivity
+ *
+ * DESCRIPTION: convert iso_mode to an integer value
+ *
+ * PARAMETERS : iso_mode : the iso_mode supported by sensor
+ *
+ ** RETURN    : sensitivity supported by sensor
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
+{
+    int32_t sensitivity;
+
+    switch (iso_mode) {
+    case CAM_ISO_MODE_100:
+        sensitivity = 100;
+        break;
+    case CAM_ISO_MODE_200:
+        sensitivity = 200;
+        break;
+    case CAM_ISO_MODE_400:
+        sensitivity = 400;
+        break;
+    case CAM_ISO_MODE_800:
+        sensitivity = 800;
+        break;
+    case CAM_ISO_MODE_1600:
+        sensitivity = 1600;
+        break;
+    default:
+        sensitivity = -1;
+        break;
+    }
+    return sensitivity;
+}
+
+/*===========================================================================
+ * FUNCTION   : AddSetMetaEntryToBatch
+ *
+ * DESCRIPTION: add set parameter entry into batch
+ *
+ * PARAMETERS :
+ *   @p_table     : ptr to parameter buffer
+ *   @paramType   : parameter type
+ *   @paramLength : length of parameter value
+ *   @paramValue  : ptr to parameter value
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
+                                                          unsigned int paramType,
+                                                          uint32_t paramLength,
+                                                          void *paramValue)
+{
+    int position = paramType;
+    int current, next;
+
+    /*************************************************************************
+    *                 Code to take care of linking next flags                *
+    *************************************************************************/
+    current = GET_FIRST_PARAM_ID(p_table);
+    if (position == current){
+        //DO NOTHING
+    } else if (position < current){
+        SET_NEXT_PARAM_ID(position, p_table, current);
+        SET_FIRST_PARAM_ID(p_table, position);
+    } else {
+        /* Search for the position in the linked list where we need to slot in*/
+        while (position > GET_NEXT_PARAM_ID(current, p_table))
+            current = GET_NEXT_PARAM_ID(current, p_table);
+
+        /*If node already exists no need to alter linking*/
+        if (position != GET_NEXT_PARAM_ID(current, p_table)) {
+            next = GET_NEXT_PARAM_ID(current, p_table);
+            SET_NEXT_PARAM_ID(current, p_table, position);
+            SET_NEXT_PARAM_ID(position, p_table, next);
+        }
+    }
+
+    /*************************************************************************
+    *                   Copy contents into entry                             *
+    *************************************************************************/
+
+    if (paramLength > sizeof(parm_type_t)) {
+        ALOGE("%s:Size of input larger than max entry size",__func__);
+        return BAD_VALUE;
+    }
+    memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength);
+    SET_PARM_VALID_BIT(paramType,p_table,1);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupFwkName
+ *
+ * DESCRIPTION: In case the enum is not same in fwk and backend
+ *              make sure the parameter is correctly propogated
+ *
+ * PARAMETERS  :
+ *   @arr      : map between the two enums
+ *   @len      : len of the map
+ *   @hal_name : name of the hal_parm to map
+ *
+ * RETURN     : int type of status
+ *              fwk_name  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[],
+                                             int len, int hal_name)
+{
+
+    for (int i = 0; i < len; i++) {
+        if (arr[i].hal_name == hal_name)
+            return arr[i].fwk_name;
+    }
+
+    /* Not able to find matching framework type is not necessarily
+     * an error case. This happens when mm-camera supports more attributes
+     * than the frameworks do */
+    ALOGD("%s: Cannot find matching framework type", __func__);
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : lookupHalName
+ *
+ * DESCRIPTION: In case the enum is not same in fwk and backend
+ *              make sure the parameter is correctly propogated
+ *
+ * PARAMETERS  :
+ *   @arr      : map between the two enums
+ *   @len      : len of the map
+ *   @fwk_name : name of the hal_parm to map
+ *
+ * RETURN     : int32_t type of status
+ *              hal_name  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[],
+                                             int len, unsigned int fwk_name)
+{
+    for (int i = 0; i < len; i++) {
+       if (arr[i].fwk_name == fwk_name)
+           return arr[i].hal_name;
+    }
+    ALOGE("%s: Cannot find matching hal type", __func__);
+    return NAME_NOT_FOUND;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCapabilities
+ *
+ * DESCRIPTION: query camera capabilities
+ *
+ * PARAMETERS :
+ *   @cameraId  : camera Id
+ *   @info      : camera info struct to be filled in with camera capabilities
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::getCamInfo(int cameraId,
+                                    struct camera_info *info)
+{
+    int rc = 0;
+
+    if (NULL == gCamCapability[cameraId]) {
+        rc = initCapabilities(cameraId);
+        if (rc < 0) {
+            //pthread_mutex_unlock(&g_camlock);
+            return rc;
+        }
+    }
+
+    if (NULL == gStaticMetadata[cameraId]) {
+        rc = initStaticMetadata(cameraId);
+        if (rc < 0) {
+            return rc;
+        }
+    }
+
+    switch(gCamCapability[cameraId]->position) {
+    case CAM_POSITION_BACK:
+        info->facing = CAMERA_FACING_BACK;
+        break;
+
+    case CAM_POSITION_FRONT:
+        info->facing = CAMERA_FACING_FRONT;
+        break;
+
+    default:
+        ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId);
+        rc = -1;
+        break;
+    }
+
+
+    info->orientation = gCamCapability[cameraId]->sensor_mount_angle;
+    info->device_version = CAMERA_DEVICE_API_VERSION_3_2;
+    info->static_camera_characteristics = gStaticMetadata[cameraId];
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateCapabilityToMetadata
+ *
+ * DESCRIPTION: translate the capability into camera_metadata_t
+ *
+ * PARAMETERS : type of the request
+ *
+ *
+ * RETURN     : success: camera_metadata_t*
+ *              failure: NULL
+ *
+ *==========================================================================*/
+camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
+{
+    pthread_mutex_lock(&mMutex);
+
+    if (mDefaultMetadata[type] != NULL) {
+        pthread_mutex_unlock(&mMutex);
+        return mDefaultMetadata[type];
+    }
+    //first time we are handling this request
+    //fill up the metadata structure using the wrapper class
+    CameraMetadata settings;
+    //translate from cam_capability_t to camera_metadata_tag_t
+    static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
+    settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
+    int32_t defaultRequestID = 0;
+    settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
+
+    uint8_t controlIntent = 0;
+    uint8_t focusMode;
+    switch (type) {
+      case CAMERA3_TEMPLATE_PREVIEW:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        break;
+      case CAMERA3_TEMPLATE_STILL_CAPTURE:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_RECORD:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+        break;
+      case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
+        focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+        break;
+      case CAMERA3_TEMPLATE_MANUAL:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
+        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
+        break;
+      default:
+        controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
+        break;
+    }
+    settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
+
+    if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
+        focusMode = ANDROID_CONTROL_AF_MODE_OFF;
+    }
+    settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
+
+    settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
+            &gCamCapability[mCameraId]->exposure_compensation_default, 1);
+
+    static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
+    settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
+
+    static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
+    settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
+
+    static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
+
+    static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
+    settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
+
+    static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
+    settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
+
+    static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
+    settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
+
+    static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
+    settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
+
+    /*flash*/
+    static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
+    settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
+
+    static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
+    settings.update(ANDROID_FLASH_FIRING_POWER,
+            &flashFiringLevel, 1);
+
+    /* lens */
+    float default_aperture = gCamCapability[mCameraId]->apertures[0];
+    settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
+
+    if (gCamCapability[mCameraId]->filter_densities_count) {
+        float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
+        settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
+                        gCamCapability[mCameraId]->filter_densities_count);
+    }
+
+    float default_focal_length = gCamCapability[mCameraId]->focal_length;
+    settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
+
+    float default_focus_distance = 0;
+    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
+
+    static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
+    settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
+
+    static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
+    settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
+
+    static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
+    settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
+
+    static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_FULL;
+    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
+
+    static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
+
+    static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
+
+    static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
+
+    static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
+
+    static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
+
+    /* Exposure time(Update the Min Exposure Time)*/
+    int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
+    settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
+
+    /* frame duration */
+    static const int64_t default_frame_duration = NSEC_PER_33MSEC;
+    settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
+
+    /* sensitivity */
+    static const int32_t default_sensitivity = 100;
+    settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
+
+    /*edge mode*/
+    static const uint8_t edge_mode = ANDROID_EDGE_MODE_FAST;
+    settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
+
+    /*noise reduction mode*/
+    static const uint8_t noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
+    settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
+
+    /*color correction mode*/
+    static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
+    settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
+
+    /*transform matrix mode*/
+    static const uint8_t tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
+    settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
+
+    uint8_t edge_strength = (uint8_t)gCamCapability[mCameraId]->sharpness_ctrl.def_value;
+    settings.update(ANDROID_EDGE_STRENGTH, &edge_strength, 1);
+
+    int32_t scaler_crop_region[4];
+    scaler_crop_region[0] = 0;
+    scaler_crop_region[1] = 0;
+    scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
+    scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
+    settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
+
+    static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ;
+    settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
+
+    static const uint8_t vs_mode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+    settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vs_mode, 1);
+
+    uint8_t opt_stab_mode = (gCamCapability[mCameraId]->optical_stab_modes_count == 2)?
+                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON :
+                             ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
+    settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &opt_stab_mode, 1);
+
+    /*focus distance*/
+    float focus_distance = 0.0;
+    settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
+
+    /*target fps range: use maximum range for picture, and maximum fixed range for video*/
+    float max_range = 0.0;
+    float max_fixed_fps = 0.0;
+    int32_t fps_range[2] = {0, 0};
+    for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
+            i++) {
+        float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
+            gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+        if (type == CAMERA3_TEMPLATE_PREVIEW ||
+                type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
+                type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
+            if (range > max_range) {
+                fps_range[0] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                fps_range[1] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+                max_range = range;
+            }
+        } else {
+            if (range < 0.01 && max_fixed_fps <
+                    gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
+                fps_range[0] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
+                fps_range[1] =
+                    (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+                max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
+            }
+        }
+    }
+    settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
+
+    /*precapture trigger*/
+    uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
+    settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
+
+    /*af trigger*/
+    uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
+    settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
+
+    /* ae & af regions */
+    int32_t active_region[] = {
+            gCamCapability[mCameraId]->active_array_size.left,
+            gCamCapability[mCameraId]->active_array_size.top,
+            gCamCapability[mCameraId]->active_array_size.left +
+                    gCamCapability[mCameraId]->active_array_size.width,
+            gCamCapability[mCameraId]->active_array_size.top +
+                    gCamCapability[mCameraId]->active_array_size.height,
+            1};
+    settings.update(ANDROID_CONTROL_AE_REGIONS, active_region, 5);
+    settings.update(ANDROID_CONTROL_AF_REGIONS, active_region, 5);
+
+    /* black level lock */
+    uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
+    settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
+
+    /* face detect mode */
+    uint8_t facedetect_mode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &facedetect_mode, 1);
+
+    /* lens shading map mode */
+    uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
+    settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
+
+    //special defaults for manual template
+    if (type == CAMERA3_TEMPLATE_MANUAL) {
+        static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
+        settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
+
+        static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
+
+        static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
+
+        static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
+        settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
+
+        static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
+        settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
+
+        static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
+        settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
+    }
+    mDefaultMetadata[type] = settings.release();
+
+    pthread_mutex_unlock(&mMutex);
+    return mDefaultMetadata[type];
+}
+
+/*===========================================================================
+ * FUNCTION   : setFrameParameters
+ *
+ * DESCRIPTION: set parameters per frame as requested in the metadata from
+ *              framework
+ *
+ * PARAMETERS :
+ *   @request   : request that needs to be serviced
+ *   @streamID : Stream ID of all the requested streams
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int QCamera3HardwareInterface::setFrameParameters(
+                    camera3_capture_request_t *request,
+                    cam_stream_ID_t streamID)
+{
+    /*translate from camera_metadata_t type to parm_type_t*/
+    int rc = 0;
+    int32_t hal_version = CAM_HAL_V3;
+    if (mRepeatingRequest == true) {
+       //chain of repeating request
+       ALOGV("%s: chain of repeating request", __func__);
+    } else {
+       memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
+    }
+
+    memset(mParameters, 0, sizeof(metadata_buffer_t));
+    mParameters->first_flagged_entry = CAM_INTF_PARM_MAX;
+    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
+                sizeof(hal_version), &hal_version);
+    if (rc < 0) {
+        ALOGE("%s: Failed to set hal version in the parameters", __func__);
+        return BAD_VALUE;
+    }
+
+    /*we need to update the frame number in the parameters*/
+    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER,
+                                sizeof(request->frame_number), &(request->frame_number));
+    if (rc < 0) {
+        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
+        return BAD_VALUE;
+    }
+
+    /* Update stream id of all the requested buffers */
+    rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_STREAM_ID,
+                                sizeof(cam_stream_ID_t), &streamID);
+
+    if (rc < 0) {
+        ALOGE("%s: Failed to set stream type mask in the parameters", __func__);
+        return BAD_VALUE;
+    }
+
+    if(request->settings != NULL){
+        mRepeatingRequest = false;
+        rc = translateToHalMetadata(request, mParameters);
+    } else {
+       mRepeatingRequest = true;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : setReprocParameters
+ *
+ * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
+ *              queue it to picture channel for reprocessing.
+ *
+ * PARAMETERS :
+ *   @request   : request that needs to be serviced
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure: non zero failure code
+ *==========================================================================*/
+int QCamera3HardwareInterface::setReprocParameters(
+        camera3_capture_request_t *request)
+{
+    /*translate from camera_metadata_t type to parm_type_t*/
+    int rc = 0;
+    metadata_buffer_t *reprocParam = NULL;
+
+    if(request->settings != NULL){
+        ALOGE("%s: Reprocess settings cannot be NULL", __func__);
+        return BAD_VALUE;
+    }
+    reprocParam = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
+    if (!reprocParam) {
+        ALOGE("%s: Failed to allocate reprocessing metadata buffer", __func__);
+        return NO_MEMORY;
+    }
+    memset(reprocParam, 0, sizeof(metadata_buffer_t));
+    reprocParam->first_flagged_entry = CAM_INTF_PARM_MAX;
+
+    /*we need to update the frame number in the parameters*/
+    rc = AddSetMetaEntryToBatch(reprocParam, CAM_INTF_META_FRAME_NUMBER,
+                                sizeof(request->frame_number), &(request->frame_number));
+    if (rc < 0) {
+        ALOGE("%s: Failed to set the frame number in the parameters", __func__);
+        return BAD_VALUE;
+    }
+
+
+    rc = translateToHalMetadata(request, reprocParam);
+    if (rc < 0) {
+        ALOGE("%s: Failed to translate reproc request", __func__);
+        delete reprocParam;
+        return rc;
+    }
+    /*queue metadata for reprocessing*/
+    rc = mPictureChannel->queueReprocMetadata(reprocParam);
+    if (rc < 0) {
+        ALOGE("%s: Failed to queue reprocessing metadata", __func__);
+        delete reprocParam;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : translateToHalMetadata
+ *
+ * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
+ *
+ *
+ * PARAMETERS :
+ *   @request  : request sent from framework
+ *
+ *
+ * RETURN     : success: NO_ERROR
+ *              failure:
+ *==========================================================================*/
+int QCamera3HardwareInterface::translateToHalMetadata
+                                  (const camera3_capture_request_t *request,
+                                   metadata_buffer_t *hal_metadata)
+{
+    int rc = 0;
+    CameraMetadata frame_settings;
+    frame_settings = request->settings;
+
+    /* Do not change the order of the following list unless you know what you are
+     * doing.
+     * The order is laid out in such a way that parameters in the front of the table
+     * may be used to override the parameters later in the table. Examples are:
+     * 1. META_MODE should precede AEC/AWB/AF MODE
+     * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
+     * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
+     * 4. Any mode should precede it's corresponding settings
+     */
+    if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
+        uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_MODE,
+                sizeof(metaMode), &metaMode);
+        if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
+           uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
+           uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP,
+                                             sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
+                                             fwk_sceneMode);
+           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
+                sizeof(sceneMode), &sceneMode);
+        } else if (metaMode == ANDROID_CONTROL_MODE_OFF) {
+           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
+           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
+                sizeof(sceneMode), &sceneMode);
+        } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) {
+           uint8_t sceneMode = CAM_SCENE_MODE_OFF;
+           rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_BESTSHOT_MODE,
+                sizeof(sceneMode), &sceneMode);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
+        uint8_t fwk_aeMode =
+            frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
+        uint8_t aeMode;
+        int32_t redeye;
+
+        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
+            aeMode = CAM_AE_MODE_OFF;
+        } else {
+            aeMode = CAM_AE_MODE_ON;
+        }
+        if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
+            redeye = 1;
+        } else {
+            redeye = 0;
+        }
+
+        int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP,
+                                          sizeof(AE_FLASH_MODE_MAP),
+                                          fwk_aeMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_MODE,
+                sizeof(aeMode), &aeMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
+                sizeof(flashMode), &flashMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION,
+                sizeof(redeye), &redeye);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
+        uint8_t fwk_whiteLevel =
+            frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
+        uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP,
+                sizeof(WHITE_BALANCE_MODES_MAP),
+                fwk_whiteLevel);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE,
+                sizeof(whiteLevel), &whiteLevel);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
+        uint8_t fwk_focusMode =
+            frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
+        uint8_t focusMode;
+        focusMode = lookupHalName(FOCUS_MODES_MAP,
+                                   sizeof(FOCUS_MODES_MAP),
+                                   fwk_focusMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FOCUS_MODE,
+                sizeof(focusMode), &focusMode);
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
+        float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_LENS_FOCUS_DISTANCE,
+                sizeof(focalDistance), &focalDistance);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
+        uint8_t fwk_antibandingMode =
+            frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
+        uint8_t hal_antibandingMode = lookupHalName(ANTIBANDING_MODES_MAP,
+                     sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]),
+                     fwk_antibandingMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
+                sizeof(hal_antibandingMode), &hal_antibandingMode);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
+        int32_t expCompensation = frame_settings.find(
+            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
+        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
+        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
+          sizeof(expCompensation), &expCompensation);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
+        int32_t expCompensation = frame_settings.find(
+            ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
+        if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
+        if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
+            expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV,
+          sizeof(expCompensation), &expCompensation);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
+        uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AEC_LOCK,
+                sizeof(aeLock), &aeLock);
+    }
+    if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
+        cam_fps_range_t fps_range;
+        fps_range.min_fps =
+            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
+        fps_range.max_fps =
+            frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_FPS_RANGE,
+                sizeof(fps_range), &fps_range);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
+        uint8_t awbLock =
+            frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_AWB_LOCK,
+                sizeof(awbLock), &awbLock);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
+        uint8_t fwk_effectMode =
+            frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
+        uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP,
+                sizeof(EFFECT_MODES_MAP),
+                fwk_effectMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EFFECT,
+                sizeof(effectMode), &effectMode);
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
+        uint8_t colorCorrectMode =
+            frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
+        rc =
+            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
+                    sizeof(colorCorrectMode), &colorCorrectMode);
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
+        cam_color_correct_gains_t colorCorrectGains;
+        for (int i = 0; i < 4; i++) {
+            colorCorrectGains.gains[i] =
+                frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
+        }
+        rc =
+            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
+                    sizeof(colorCorrectGains), &colorCorrectGains);
+    }
+
+    if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
+        cam_color_correct_matrix_t colorCorrectTransform;
+        cam_rational_type_t transform_elem;
+        int num = 0;
+        for (int i = 0; i < 3; i++) {
+           for (int j = 0; j < 3; j++) {
+              transform_elem.numerator =
+                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
+              transform_elem.denominator =
+                 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
+              colorCorrectTransform.transform_matrix[i][j] = transform_elem;
+              num++;
+           }
+        }
+        rc =
+            AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
+                    sizeof(colorCorrectTransform), &colorCorrectTransform);
+    }
+
+    cam_trigger_t aecTrigger;
+    aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
+    aecTrigger.trigger_id = -1;
+    if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
+        frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
+        aecTrigger.trigger =
+            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
+        aecTrigger.trigger_id =
+            frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
+                sizeof(aecTrigger), &aecTrigger);
+    }
+    /*af_trigger must come with a trigger id*/
+    if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
+        frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
+        cam_trigger_t af_trigger;
+        af_trigger.trigger =
+            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
+        af_trigger.trigger_id =
+            frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger);
+    }
+
+    if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
+        int32_t demosaic =
+            frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_DEMOSAIC,
+                sizeof(demosaic), &demosaic);
+    }
+
+    if (frame_settings.exists(ANDROID_EDGE_MODE)) {
+        cam_edge_application_t edge_application;
+        edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
+        if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
+            edge_application.sharpness = 0;
+        } else {
+            if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) {
+                uint8_t edgeStrength =
+                    frame_settings.find(ANDROID_EDGE_STRENGTH).data.u8[0];
+                edge_application.sharpness = (int32_t)edgeStrength;
+            } else {
+                edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
+            }
+        }
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_EDGE_MODE,
+                sizeof(edge_application), &edge_application);
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_MODE)) {
+        int32_t respectFlashMode = 1;
+        if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
+            uint8_t fwk_aeMode =
+                frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
+            if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
+                respectFlashMode = 0;
+                ALOGV("%s: AE Mode controls flash, ignore android.flash.mode",
+                    __func__);
+            }
+        }
+        if (respectFlashMode) {
+            uint8_t flashMode =
+                frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
+            flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP,
+                                          sizeof(FLASH_MODES_MAP),
+                                          flashMode);
+            ALOGV("%s: flash mode after mapping %d", __func__, flashMode);
+            // To check: CAM_INTF_META_FLASH_MODE usage
+            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_LED_MODE,
+                          sizeof(flashMode), &flashMode);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
+        uint8_t flashPower =
+            frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_FLASH_POWER,
+                sizeof(flashPower), &flashPower);
+    }
+
+    if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
+        int64_t flashFiringTime =
+            frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime);
+    }
+
+    if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
+        uint8_t hotPixelMode =
+            frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
+                sizeof(hotPixelMode), &hotPixelMode);
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
+        float lensAperture =
+            frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_APERTURE,
+                sizeof(lensAperture), &lensAperture);
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
+        float filterDensity =
+            frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
+                sizeof(filterDensity), &filterDensity);
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
+        float focalLength =
+            frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_LENS_FOCAL_LENGTH,
+                sizeof(focalLength), &focalLength);
+    }
+
+    if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
+        uint8_t optStabMode =
+            frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_LENS_OPT_STAB_MODE,
+                sizeof(optStabMode), &optStabMode);
+    }
+
+    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
+        uint8_t noiseRedMode =
+            frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_NOISE_REDUCTION_MODE,
+                sizeof(noiseRedMode), &noiseRedMode);
+    }
+
+    if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) {
+        uint8_t noiseRedStrength =
+            frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
+                sizeof(noiseRedStrength), &noiseRedStrength);
+    }
+
+    cam_crop_region_t scalerCropRegion;
+    bool scalerCropSet = false;
+    if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
+        scalerCropRegion.left =
+            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
+        scalerCropRegion.top =
+            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
+        scalerCropRegion.width =
+            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
+        scalerCropRegion.height =
+            frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_SCALER_CROP_REGION,
+                sizeof(scalerCropRegion), &scalerCropRegion);
+        scalerCropSet = true;
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
+        int64_t sensorExpTime =
+            frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
+        ALOGV("%s: setting sensorExpTime %lld", __func__, sensorExpTime);
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+                sizeof(sensorExpTime), &sensorExpTime);
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
+        int64_t sensorFrameDuration =
+            frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
+        int64_t minFrameDuration = getMinFrameDuration(request);
+        sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
+        if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
+            sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
+        ALOGV("%s: clamp sensorFrameDuration to %lld", __func__, sensorFrameDuration);
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_SENSOR_FRAME_DURATION,
+                sizeof(sensorFrameDuration), &sensorFrameDuration);
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
+        int32_t sensorSensitivity =
+            frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
+        if (sensorSensitivity <
+                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
+            sensorSensitivity =
+                gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
+        if (sensorSensitivity >
+                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
+            sensorSensitivity =
+                gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
+        ALOGV("%s: clamp sensorSensitivity to %d", __func__, sensorSensitivity);
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_SENSOR_SENSITIVITY,
+                sizeof(sensorSensitivity), &sensorSensitivity);
+    }
+
+    if (frame_settings.exists(ANDROID_SHADING_MODE)) {
+        int32_t shadingMode =
+            frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_MODE,
+                sizeof(shadingMode), &shadingMode);
+    }
+
+    if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) {
+        uint8_t shadingStrength =
+            frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_SHADING_STRENGTH,
+                sizeof(shadingStrength), &shadingStrength);
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
+        uint8_t fwk_facedetectMode =
+            frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
+        uint8_t facedetectMode =
+            lookupHalName(FACEDETECT_MODES_MAP,
+                sizeof(FACEDETECT_MODES_MAP), fwk_facedetectMode);
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_STATS_FACEDETECT_MODE,
+                sizeof(facedetectMode), &facedetectMode);
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
+        uint8_t histogramMode =
+            frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_STATS_HISTOGRAM_MODE,
+                sizeof(histogramMode), &histogramMode);
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
+        uint8_t sharpnessMapMode =
+            frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+                sizeof(sharpnessMapMode), &sharpnessMapMode);
+    }
+
+    if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
+        uint8_t tonemapMode =
+            frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_TONEMAP_MODE,
+                sizeof(tonemapMode), &tonemapMode);
+    }
+    /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
+    /*All tonemap channels will have the same number of points*/
+    if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
+        frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
+        frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
+        cam_rgb_tonemap_curves tonemapCurves;
+        tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
+
+        /* ch0 = G*/
+        int point = 0;
+        cam_tonemap_curve_t tonemapCurveGreen;
+        for (int i = 0; i < tonemapCurves.tonemap_points_cnt ; i++) {
+            for (int j = 0; j < 2; j++) {
+               tonemapCurveGreen.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[0] = tonemapCurveGreen;
+
+        /* ch 1 = B */
+        point = 0;
+        cam_tonemap_curve_t tonemapCurveBlue;
+        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
+            for (int j = 0; j < 2; j++) {
+               tonemapCurveBlue.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[1] = tonemapCurveBlue;
+
+        /* ch 2 = R */
+        point = 0;
+        cam_tonemap_curve_t tonemapCurveRed;
+        for (int i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
+            for (int j = 0; j < 2; j++) {
+               tonemapCurveRed.tonemap_points[i][j] =
+                  frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
+               point++;
+            }
+        }
+        tonemapCurves.curves[2] = tonemapCurveRed;
+
+        rc = AddSetMetaEntryToBatch(hal_metadata,
+                CAM_INTF_META_TONEMAP_CURVES,
+                sizeof(tonemapCurves), &tonemapCurves);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+        uint8_t captureIntent =
+            frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
+                sizeof(captureIntent), &captureIntent);
+    }
+
+    if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
+        uint8_t blackLevelLock =
+            frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
+                sizeof(blackLevelLock), &blackLevelLock);
+    }
+
+    if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
+        uint8_t lensShadingMapMode =
+            frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
+                sizeof(lensShadingMapMode), &lensShadingMapMode);
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
+        cam_area_t roi;
+        bool reset = true;
+        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
+        if (scalerCropSet) {
+            reset = resetIfNeededROI(&roi, &scalerCropRegion);
+        }
+        if (reset) {
+            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AEC_ROI,
+                    sizeof(roi), &roi);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
+        cam_area_t roi;
+        bool reset = true;
+        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
+        if (scalerCropSet) {
+            reset = resetIfNeededROI(&roi, &scalerCropRegion);
+        }
+        if (reset) {
+            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AF_ROI,
+                    sizeof(roi), &roi);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) {
+        cam_area_t roi;
+        bool reset = true;
+        convertFromRegions(&roi, request->settings, ANDROID_CONTROL_AWB_REGIONS);
+        if (scalerCropSet) {
+            reset = resetIfNeededROI(&roi, &scalerCropRegion);
+        }
+        if (reset) {
+            rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_AWB_REGIONS,
+                    sizeof(roi), &roi);
+        }
+    }
+
+    if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
+        cam_test_pattern_data_t testPatternData;
+        uint32_t fwk_testPatternMode = frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
+        uint8_t testPatternMode = lookupHalName(TEST_PATTERN_MAP,
+               sizeof(TEST_PATTERN_MAP), fwk_testPatternMode);
+
+        memset(&testPatternData, 0, sizeof(testPatternData));
+        testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
+        if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
+                frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
+            int32_t* fwk_testPatternData = frame_settings.find(
+                    ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
+            testPatternData.r = fwk_testPatternData[0];
+            testPatternData.b = fwk_testPatternData[3];
+            switch (gCamCapability[mCameraId]->color_arrangement) {
+            case CAM_FILTER_ARRANGEMENT_RGGB:
+            case CAM_FILTER_ARRANGEMENT_GRBG:
+                testPatternData.gr = fwk_testPatternData[1];
+                testPatternData.gb = fwk_testPatternData[2];
+                break;
+            case CAM_FILTER_ARRANGEMENT_GBRG:
+            case CAM_FILTER_ARRANGEMENT_BGGR:
+                testPatternData.gr = fwk_testPatternData[2];
+                testPatternData.gb = fwk_testPatternData[1];
+                break;
+            default:
+                ALOGE("%s: color arrangement %d is not supported", __func__,
+                    gCamCapability[mCameraId]->color_arrangement);
+                break;
+            }
+        }
+        rc = AddSetMetaEntryToBatch(mParameters, CAM_INTF_META_TEST_PATTERN_DATA,
+            sizeof(testPatternData), &testPatternData);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
+        double *gps_coords =
+            frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d;
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES, sizeof(double)*3, gps_coords);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
+        char gps_methods[GPS_PROCESSING_METHOD_SIZE];
+        const char *gps_methods_src = (const char *)
+                frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
+        uint32_t count = frame_settings.find(
+                ANDROID_JPEG_GPS_PROCESSING_METHOD).count;
+        memset(gps_methods, 0, sizeof(gps_methods));
+        strncpy(gps_methods, gps_methods_src, sizeof(gps_methods));
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS, sizeof(gps_methods), gps_methods);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
+        int64_t gps_timestamp =
+            frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP, sizeof(int64_t), &gps_timestamp);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
+        int32_t orientation =
+            frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, sizeof(orientation), &orientation);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
+        int8_t quality =
+            frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_QUALITY, sizeof(quality), &quality);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
+        int8_t thumb_quality =
+            frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY, sizeof(thumb_quality), &thumb_quality);
+    }
+
+    if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
+        cam_dimension_t dim;
+        dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
+        dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, sizeof(dim), &dim);
+    }
+
+    // Internal metadata
+    if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
+        uint8_t* privatedata =
+            frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS).data.u8;
+        rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
+            sizeof(uint8_t) * MAX_METADATA_PAYLOAD_SIZE, privatedata);
+    }
+
+    // EV step
+    rc = AddSetMetaEntryToBatch(hal_metadata, CAM_INTF_PARM_EV_STEP,
+            sizeof(cam_rational_type_t), &(gCamCapability[mCameraId]->exp_compensation_step));
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : captureResultCb
+ *
+ * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
+ *
+ * PARAMETERS :
+ *   @frame  : frame information from mm-camera-interface
+ *   @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
+ *   @userdata: userdata
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer,
+                uint32_t frame_number, void *userdata)
+{
+    QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
+    if (hw == NULL) {
+        ALOGE("%s: Invalid hw %p", __func__, hw);
+        return;
+    }
+
+    hw->captureResultCb(metadata, buffer, frame_number);
+    return;
+}
+
+
+/*===========================================================================
+ * FUNCTION   : initialize
+ *
+ * DESCRIPTION: Pass framework callback pointers to HAL
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success : 0
+ *              Failure: -ENODEV
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
+                                  const camera3_callback_ops_t *callback_ops)
+{
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return -ENODEV;
+    }
+
+    int rc = hw->initialize(callback_ops);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : configure_streams
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success: 0
+ *              Failure: -EINVAL (if stream configuration is invalid)
+ *                       -ENODEV (fatal error)
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::configure_streams(
+        const struct camera3_device *device,
+        camera3_stream_configuration_t *stream_list)
+{
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return -ENODEV;
+    }
+    int rc = hw->configureStreams(stream_list);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : register_stream_buffers
+ *
+ * DESCRIPTION: Register stream buffers with the device
+ *
+ * PARAMETERS :
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::register_stream_buffers(
+        const struct camera3_device *device,
+        const camera3_stream_buffer_set_t *buffer_set)
+{
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return -ENODEV;
+    }
+    int rc = hw->registerStreamBuffers(buffer_set);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : construct_default_request_settings
+ *
+ * DESCRIPTION: Configure a settings buffer to meet the required use case
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     : Success: Return valid metadata
+ *              Failure: Return NULL
+ *==========================================================================*/
+const camera_metadata_t* QCamera3HardwareInterface::
+    construct_default_request_settings(const struct camera3_device *device,
+                                        int type)
+{
+
+    ALOGV("%s: E", __func__);
+    camera_metadata_t* fwk_metadata = NULL;
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return NULL;
+    }
+
+    fwk_metadata = hw->translateCapabilityToMetadata(type);
+
+    ALOGV("%s: X", __func__);
+    return fwk_metadata;
+}
+
+/*===========================================================================
+ * FUNCTION   : process_capture_request
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::process_capture_request(
+                    const struct camera3_device *device,
+                    camera3_capture_request_t *request)
+{
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return -EINVAL;
+    }
+
+    int rc = hw->processCaptureRequest(request);
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dump
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+
+void QCamera3HardwareInterface::dump(
+                const struct camera3_device *device, int fd)
+{
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return;
+    }
+
+    hw->dump(fd);
+    ALOGV("%s: X", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+
+int QCamera3HardwareInterface::flush(
+                const struct camera3_device *device)
+{
+    int rc;
+    ALOGV("%s: E", __func__);
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
+    if (!hw) {
+        ALOGE("%s: NULL camera device", __func__);
+        return -EINVAL;
+    }
+
+    rc = hw->flush();
+    ALOGV("%s: X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : close_camera_device
+ *
+ * DESCRIPTION:
+ *
+ * PARAMETERS :
+ *
+ *
+ * RETURN     :
+ *==========================================================================*/
+int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
+{
+    ALOGV("%s: E", __func__);
+    int ret = NO_ERROR;
+    QCamera3HardwareInterface *hw =
+        reinterpret_cast<QCamera3HardwareInterface *>(
+            reinterpret_cast<camera3_device_t *>(device)->priv);
+    if (!hw) {
+        ALOGE("NULL camera device");
+        return BAD_VALUE;
+    }
+    delete hw;
+
+    pthread_mutex_lock(&mCameraSessionLock);
+    mCameraSessionActive = 0;
+    pthread_mutex_unlock(&mCameraSessionLock);
+    ALOGV("%s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getWaveletDenoiseProcessPlate
+ *
+ * DESCRIPTION: query wavelet denoise process plate
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : WNR prcocess plate vlaue
+ *==========================================================================*/
+cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
+{
+    char prop[PROPERTY_VALUE_MAX];
+    memset(prop, 0, sizeof(prop));
+    property_get("persist.denoise.process.plates", prop, "0");
+    int processPlate = atoi(prop);
+    switch(processPlate) {
+    case 0:
+        return CAM_WAVELET_DENOISE_YCBCR_PLANE;
+    case 1:
+        return CAM_WAVELET_DENOISE_CBCR_ONLY;
+    case 2:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    case 3:
+        return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
+    default:
+        return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : needRotationReprocess
+ *
+ * DESCRIPTION: if rotation needs to be done by reprocess in pp
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera3HardwareInterface::needRotationReprocess()
+{
+    if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
+        // current rotation is not zero, and pp has the capability to process rotation
+        ALOGD("%s: need do reprocess for rotation", __func__);
+        return true;
+    }
+
+    return false;
+}
+
+/*===========================================================================
+ * FUNCTION   : needReprocess
+ *
+ * DESCRIPTION: if reprocess in needed
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : true: needed
+ *              false: no need
+ *==========================================================================*/
+bool QCamera3HardwareInterface::needReprocess()
+{
+    if (gCamCapability[mCameraId]->min_required_pp_mask > 0) {
+        // TODO: add for ZSL HDR later
+        // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
+        ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__);
+        return true;
+    }
+    return needRotationReprocess();
+}
+
+/*===========================================================================
+ * FUNCTION   : addOfflineReprocChannel
+ *
+ * DESCRIPTION: add a reprocess channel that will do reprocess on frames
+ *              coming from input channel
+ *
+ * PARAMETERS :
+ *   @pInputChannel : ptr to input channel whose frames will be post-processed
+ *
+ * RETURN     : Ptr to the newly created channel obj. NULL if failed.
+ *==========================================================================*/
+QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
+              QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3ReprocessChannel *pChannel = NULL;
+    if (pInputChannel == NULL) {
+        ALOGE("%s: input channel obj is NULL", __func__);
+        return NULL;
+    }
+
+    pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
+            mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle);
+    if (NULL == pChannel) {
+        ALOGE("%s: no mem for reprocess channel", __func__);
+        return NULL;
+    }
+
+    rc = pChannel->initialize();
+    if (rc != NO_ERROR) {
+        ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc);
+        delete pChannel;
+        return NULL;
+    }
+
+    // pp feature config
+    cam_pp_feature_config_t pp_config;
+    memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
+
+    if (IS_PARM_VALID(CAM_INTF_META_EDGE_MODE, metadata)) {
+        cam_edge_application_t *edge = (cam_edge_application_t *)
+                POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata);
+        if (edge->edge_mode != CAM_EDGE_MODE_OFF) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
+            pp_config.sharpness = edge->sharpness;
+        }
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
+        uint8_t *noise_mode = (uint8_t *)POINTER_OF(
+                CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
+        if (*noise_mode != CAM_NOISE_REDUCTION_MODE_OFF) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
+            pp_config.denoise2d.denoise_enable = 1;
+            pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate();
+        }
+    }
+
+    if (IS_PARM_VALID(CAM_INTF_META_JPEG_ORIENTATION, metadata)) {
+        int32_t *rotation = (int32_t *)POINTER_OF(
+                CAM_INTF_META_JPEG_ORIENTATION, metadata);
+
+        if (needRotationReprocess()) {
+            pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION;
+            if (*rotation == 0) {
+                pp_config.rotation = ROTATE_0;
+            } else if (*rotation == 90) {
+                pp_config.rotation = ROTATE_90;
+            } else if (*rotation == 180) {
+                pp_config.rotation = ROTATE_180;
+            } else if (*rotation == 270) {
+                pp_config.rotation = ROTATE_270;
+            }
+        }
+    }
+
+    rc = pChannel->addReprocStreamsFromSource(pp_config,
+                                             pInputChannel,
+                                             mMetadataChannel);
+
+    if (rc != NO_ERROR) {
+        delete pChannel;
+        return NULL;
+    }
+    return pChannel;
+}
+
+}; //end namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3HWI.h b/camera/QCamera2/HAL3/QCamera3HWI.h
new file mode 100644
index 0000000..cfb9586
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3HWI.h
@@ -0,0 +1,306 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3HARDWAREINTERFACE_H__
+#define __QCAMERA3HARDWAREINTERFACE_H__
+
+#include <pthread.h>
+#include <utils/List.h>
+#include <utils/KeyedVector.h>
+#include <hardware/camera3.h>
+#include <camera/CameraMetadata.h>
+#include "QCamera3HALHeader.h"
+#include "QCamera3Channel.h"
+
+#include <hardware/power.h>
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+/* Time related macros */
+typedef int64_t nsecs_t;
+#define NSEC_PER_SEC 1000000000LL
+#define NSEC_PER_USEC 1000
+#define NSEC_PER_33MSEC 33000000LL
+
+class QCamera3MetadataChannel;
+class QCamera3PicChannel;
+class QCamera3HeapMemory;
+class QCamera3Exif;
+
+typedef struct {
+    camera3_stream_t *stream;
+    stream_status_t status;
+    QCamera3Channel *channel;
+} stream_info_t;
+
+class QCamera3HardwareInterface {
+public:
+    /* static variable and functions accessed by camera service */
+    static camera3_device_ops_t mCameraOps;
+    static int initialize(const struct camera3_device *,
+                const camera3_callback_ops_t *callback_ops);
+    static int configure_streams(const struct camera3_device *,
+                camera3_stream_configuration_t *stream_list);
+    static int register_stream_buffers(const struct camera3_device *,
+                const camera3_stream_buffer_set_t *buffer_set);
+    static const camera_metadata_t* construct_default_request_settings(
+                                const struct camera3_device *, int type);
+    static int process_capture_request(const struct camera3_device *,
+                                camera3_capture_request_t *request);
+
+    static void dump(const struct camera3_device *, int fd);
+    static int flush(const struct camera3_device *);
+    static int close_camera_device(struct hw_device_t* device);
+
+public:
+    QCamera3HardwareInterface(int cameraId,
+            const camera_module_callbacks_t *callbacks);
+    virtual ~QCamera3HardwareInterface();
+    int openCamera(struct hw_device_t **hw_device);
+    int getMetadata(int type);
+    camera_metadata_t* translateCapabilityToMetadata(int type);
+
+    static int getCamInfo(int cameraId, struct camera_info *info);
+    static int initCapabilities(int cameraId);
+    static int initStaticMetadata(int cameraId);
+    static void makeTable(cam_dimension_t* dimTable, uint8_t size, int32_t* sizeTable);
+    static void makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size,
+                                          int32_t* fpsRangesTable);
+    static void makeOverridesList(cam_scene_mode_overrides_t* overridesTable, uint8_t size,
+                                   uint8_t* overridesList, uint8_t* supported_indexes, int camera_id);
+    static void convertToRegions(cam_rect_t rect, int32_t* region, int weight);
+    static void convertFromRegions(cam_area_t* roi, const camera_metadata_t *settings,
+                                   uint32_t tag);
+    static bool resetIfNeededROI(cam_area_t* roi, const cam_crop_region_t* scalerCropRegion);
+    static void convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks);
+    static void postproc_channel_cb_routine(mm_camera_super_buf_t *recvd_frame,
+                                            void *userdata);
+    static int32_t getScalarFormat(int32_t format);
+    static int32_t getSensorSensitivity(int32_t iso_mode);
+    static void captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number,
+                void *userdata);
+
+    int initialize(const camera3_callback_ops_t *callback_ops);
+    int configureStreams(camera3_stream_configuration_t *stream_list);
+    int registerStreamBuffers(const camera3_stream_buffer_set_t *buffer_set);
+    int processCaptureRequest(camera3_capture_request_t *request);
+    void dump(int fd);
+    int flush();
+
+    int setFrameParameters(camera3_capture_request_t *request,
+            cam_stream_ID_t streamID);
+    int setReprocParameters(camera3_capture_request_t *request);
+    int translateToHalMetadata(const camera3_capture_request_t *request,
+            metadata_buffer_t *parm);
+    camera_metadata_t* translateCbUrgentMetadataToResultMetadata (
+                             metadata_buffer_t *metadata);
+
+    camera_metadata_t* translateFromHalMetadata(metadata_buffer_t *metadata,
+                            nsecs_t timestamp, int32_t request_id,
+                            const CameraMetadata& jpegMetadata, uint8_t pipeline_depth);
+    int getJpegSettings(const camera_metadata_t *settings);
+    int initParameters();
+    void deinitParameters();
+    QCamera3ReprocessChannel *addOfflineReprocChannel(QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle, metadata_buffer_t *metadata);
+    bool needRotationReprocess();
+    bool needReprocess();
+    bool isWNREnabled();
+    cam_denoise_process_type_t getWaveletDenoiseProcessPlate();
+
+    void captureResultCb(mm_camera_super_buf_t *metadata,
+                camera3_stream_buffer_t *buffer, uint32_t frame_number);
+
+    typedef struct {
+        uint32_t fwk_name;
+        uint8_t hal_name;
+    } QCameraMap;
+
+private:
+
+    int openCamera();
+    int closeCamera();
+    int AddSetMetaEntryToBatch(metadata_buffer_t *p_table,
+                               unsigned int paramType,
+                               uint32_t paramLength,
+                               void *paramValue);
+    static int8_t lookupHalName(const QCameraMap arr[],
+                      int len, unsigned int fwk_name);
+    static int32_t lookupFwkName(const QCameraMap arr[],
+                      int len, int hal_name);
+
+    int validateCaptureRequest(camera3_capture_request_t *request);
+
+    void deriveMinFrameDuration();
+    int64_t getMinFrameDuration(const camera3_capture_request_t *request);
+
+    void handleMetadataWithLock(mm_camera_super_buf_t *metadata_buf);
+    void handleBufferWithLock(camera3_stream_buffer_t *buffer,
+        uint32_t frame_number);
+    void unblockRequestIfNecessary();
+    void dumpMetadataToFile(tuning_params_t &meta,
+                            uint32_t &dumpFrameCount,
+                            int32_t enabled,
+                            const char *type,
+                            uint32_t frameNumber);
+
+    void cleanAndSortStreamInfo();
+    void extractJpegMetadata(CameraMetadata& jpegMetadata,
+            const camera3_capture_request_t *request);
+public:
+
+    bool needOnlineRotation();
+    int getJpegQuality();
+    int calcMaxJpegSize();
+    QCamera3Exif *getExifData();
+public:
+    static int kMaxInFlight;
+private:
+    camera3_device_t   mCameraDevice;
+    uint8_t            mCameraId;
+    mm_camera_vtbl_t  *mCameraHandle;
+    bool               mCameraOpened;
+    bool               mCameraInitialized;
+    camera_metadata_t *mDefaultMetadata[CAMERA3_TEMPLATE_COUNT];
+    int mBlobRequest;
+
+    const camera3_callback_ops_t *mCallbackOps;
+
+    camera3_stream_t *mInputStream;
+    QCamera3MetadataChannel *mMetadataChannel;
+    QCamera3PicChannel *mPictureChannel;
+    QCamera3RawChannel *mRawChannel;
+    QCamera3SupportChannel *mSupportChannel;
+
+     //First request yet to be processed after configureStreams
+    bool mFirstRequest;
+    bool mRepeatingRequest;
+    QCamera3HeapMemory *mParamHeap;
+    metadata_buffer_t* mParameters;
+    metadata_buffer_t* mPrevParameters;
+    bool m_bWNROn;
+
+    /* Data structure to store pending request */
+    typedef struct {
+        camera3_stream_t *stream;
+        camera3_stream_buffer_t *buffer;
+    } RequestedBufferInfo;
+    typedef struct {
+        uint32_t frame_number;
+        uint32_t num_buffers;
+        int32_t request_id;
+        List<RequestedBufferInfo> buffers;
+        int blob_request;
+        nsecs_t timestamp;
+        uint8_t bNotified;
+        int input_buffer_present;
+        CameraMetadata jpegMetadata;
+        uint8_t pipeline_depth;
+    } PendingRequestInfo;
+    typedef struct {
+        uint32_t frame_number;
+        uint32_t stream_ID;
+    } PendingFrameDropInfo;
+
+    // Store the Pending buffers for Flushing
+    typedef struct {
+        // Frame number pertaining to the buffer
+        uint32_t frame_number;
+        camera3_stream_t *stream;
+        // Buffer handle
+        buffer_handle_t *buffer;
+    } PendingBufferInfo;
+
+    typedef struct {
+        // Total number of buffer requests pending
+        uint32_t num_buffers;
+        // List of pending buffers
+        List<PendingBufferInfo> mPendingBufferList;
+    } PendingBuffersMap;
+
+    List<PendingRequestInfo> mPendingRequestsList;
+    List<PendingFrameDropInfo> mPendingFrameDropList;
+    PendingBuffersMap mPendingBuffersMap;
+    pthread_cond_t mRequestCond;
+    int mPendingRequest;
+    int32_t mCurrentRequestId;
+    camera3_capture_result_t *mLoopBackResult;
+    nsecs_t mLoopBackTimestamp;
+
+    //mutex for serialized access to camera3_device_ops_t functions
+    pthread_mutex_t mMutex;
+
+    List<stream_info_t*> mStreamInfo;
+
+    int64_t mMinProcessedFrameDuration;
+    int64_t mMinJpegFrameDuration;
+    int64_t mMinRawFrameDuration;
+
+    power_module_t *m_pPowerModule;   // power module
+
+#ifdef HAS_MULTIMEDIA_HINTS
+    bool mHdrHint;
+#endif
+    uint32_t mMetaFrameCount;
+    const camera_module_callbacks_t *mCallbacks;
+
+    static const QCameraMap EFFECT_MODES_MAP[];
+    static const QCameraMap WHITE_BALANCE_MODES_MAP[];
+    static const QCameraMap SCENE_MODES_MAP[];
+    static const QCameraMap FOCUS_MODES_MAP[];
+    static const QCameraMap ANTIBANDING_MODES_MAP[];
+    static const QCameraMap AE_FLASH_MODE_MAP[];
+    static const QCameraMap FLASH_MODES_MAP[];
+    static const QCameraMap FACEDETECT_MODES_MAP[];
+    static const QCameraMap FOCUS_CALIBRATION_MAP[];
+    static const QCameraMap TEST_PATTERN_MAP[];
+    static const QCameraMap REFERENCE_ILLUMINANT_MAP[];
+
+    static pthread_mutex_t mCameraSessionLock;
+    static unsigned int mCameraSessionActive;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA2HARDWAREINTERFACE_H__ */
diff --git a/camera/QCamera2/HAL3/QCamera3Hal.cpp b/camera/QCamera2/HAL3/QCamera3Hal.cpp
new file mode 100644
index 0000000..673dc62
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Hal.cpp
@@ -0,0 +1,53 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include "QCamera3Factory.h"
+#include "QCamera3VendorTags.h"
+
+static hw_module_t camera_common = {
+    tag: HARDWARE_MODULE_TAG,
+    module_api_version: CAMERA_MODULE_API_VERSION_2_3,
+    hal_api_version: HARDWARE_HAL_API_VERSION,
+    id: CAMERA_HARDWARE_MODULE_ID,
+    name: "QCamera Module",
+    author: "Qualcomm Innovation Center Inc",
+    methods: &qcamera::QCamera3Factory::mModuleMethods,
+    dso: NULL,
+    reserved:  {0},
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+    common: camera_common,
+    get_number_of_cameras: qcamera::QCamera3Factory::get_number_of_cameras,
+    get_camera_info: qcamera::QCamera3Factory::get_camera_info,
+    set_callbacks: qcamera::QCamera3Factory::set_callbacks,
+    get_vendor_tag_ops: qcamera::QCamera3VendorTags::get_vendor_tag_ops,
+    open_legacy: qcamera::QCamera3Factory::open_legacy,
+    reserved: {0}
+};
diff --git a/camera/QCamera2/HAL3/QCamera3Mem.cpp b/camera/QCamera2/HAL3/QCamera3Mem.cpp
new file mode 100644
index 0000000..bb98563
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Mem.cpp
@@ -0,0 +1,854 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#define LOG_TAG "QCamera3HWI_Mem"
+
+#include <string.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include <gralloc_priv.h>
+#include "QCamera3Mem.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+using namespace android;
+
+namespace qcamera {
+
+// QCaemra2Memory base class
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Memory
+ *
+ * DESCRIPTION: default constructor of QCamera3Memory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Memory::QCamera3Memory()
+{
+    mBufferCount = 0;
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i++) {
+        mMemInfo[i].fd = 0;
+        mMemInfo[i].main_ion_fd = 0;
+        mMemInfo[i].handle = NULL;
+        mMemInfo[i].size = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Memory
+ *
+ * DESCRIPTION: deconstructor of QCamera3Memory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Memory::~QCamera3Memory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOpsInternal
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *   @vaddr   : ptr to the virtual address
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3Memory::cacheOpsInternal(int index, unsigned int cmd, void *vaddr)
+{
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = OK;
+
+    if (index >= mBufferCount) {
+        ALOGE("%s: index %d out of bound [0, %d)", __func__, index, mBufferCount);
+        return BAD_INDEX;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = vaddr;
+    cache_inv_data.fd = mMemInfo[index].fd;
+    cache_inv_data.handle = mMemInfo[index].handle;
+    cache_inv_data.length = mMemInfo[index].size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    ALOGV("%s: addr = %p, fd = %d, handle = %p length = %d, ION Fd = %d",
+         __func__, cache_inv_data.vaddr, cache_inv_data.fd,
+         cache_inv_data.handle, cache_inv_data.length,
+         mMemInfo[index].main_ion_fd);
+    ret = ioctl(mMemInfo[index].main_ion_fd, ION_IOC_CUSTOM, &custom_data);
+    if (ret < 0)
+        ALOGE("%s: Cache Invalidate failed: %s\n", __func__, strerror(errno));
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFd
+ *
+ * DESCRIPTION: return file descriptor of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : file descriptor
+ *==========================================================================*/
+int QCamera3Memory::getFd(int index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return mMemInfo[index].fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : getSize
+ *
+ * DESCRIPTION: return buffer size of the indexed buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer size
+ *==========================================================================*/
+int QCamera3Memory::getSize(int index) const
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+
+    return (int)mMemInfo[index].size;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCnt
+ *
+ * DESCRIPTION: query number of buffers allocated
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : number of buffers allocated
+ *==========================================================================*/
+int QCamera3Memory::getCnt() const
+{
+    return mBufferCount;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufDef
+ *
+ * DESCRIPTION: query detailed buffer information
+ *
+ * PARAMETERS :
+ *   @offset  : [input] frame buffer offset
+ *   @bufDef  : [output] reference to struct to store buffer definition
+ *   @index   : [input] index of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Memory::getBufDef(const cam_frame_len_offset_t &offset,
+        mm_camera_buf_def_t &bufDef, int index) const
+{
+    if (!mBufferCount) {
+        ALOGE("Memory not allocated");
+        return NO_INIT;
+    }
+
+    bufDef.fd = mMemInfo[index].fd;
+    bufDef.frame_len = mMemInfo[index].size;
+    bufDef.mem_info = (void *)this;
+    bufDef.num_planes = offset.num_planes;
+    bufDef.buffer = getPtr(index);
+    bufDef.buf_idx = index;
+
+    /* Plane 0 needs to be set separately. Set other planes in a loop */
+    bufDef.planes[0].length = offset.mp[0].len;
+    bufDef.planes[0].m.userptr = mMemInfo[index].fd;
+    bufDef.planes[0].data_offset = offset.mp[0].offset;
+    bufDef.planes[0].reserved[0] = 0;
+    for (int i = 1; i < bufDef.num_planes; i++) {
+         bufDef.planes[i].length = offset.mp[i].len;
+         bufDef.planes[i].m.userptr = mMemInfo[i].fd;
+         bufDef.planes[i].data_offset = offset.mp[i].offset;
+         bufDef.planes[i].reserved[0] =
+                 bufDef.planes[i-1].reserved[0] +
+                 bufDef.planes[i-1].length;
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3HeapMemory
+ *
+ * DESCRIPTION: constructor of QCamera3HeapMemory for ion memory used internally in HAL
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HeapMemory::QCamera3HeapMemory()
+    : QCamera3Memory()
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++)
+        mPtr[i] = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3HeapMemory
+ *
+ * DESCRIPTION: deconstructor of QCamera3HeapMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3HeapMemory::~QCamera3HeapMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : alloc
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *   @heap_id : heap id to indicate where the buffers will be allocated from
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::alloc(int count, int size, int heap_id)
+{
+    int rc = OK;
+    if (count > MM_CAMERA_MAX_NUM_FRAMES) {
+        ALOGE("Buffer count %d out of bound. Max is %d", count, MM_CAMERA_MAX_NUM_FRAMES);
+        return BAD_INDEX;
+    }
+    if (mBufferCount) {
+        ALOGE("Allocating a already allocated heap memory");
+        return INVALID_OPERATION;
+    }
+
+    for (int i = 0; i < count; i ++) {
+        rc = allocOneBuffer(mMemInfo[i], heap_id, size);
+        if (rc < 0) {
+            ALOGE("AllocateIonMemory failed");
+            for (int j = i-1; j >= 0; j--)
+                deallocOneBuffer(mMemInfo[j]);
+            break;
+        }
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dealloc
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HeapMemory::dealloc()
+{
+    for (int i = 0; i < mBufferCount; i++)
+        deallocOneBuffer(mMemInfo[i]);
+}
+
+/*===========================================================================
+ * FUNCTION   : allocOneBuffer
+ *
+ * DESCRIPTION: impl of allocating one buffers of certain size
+ *
+ * PARAMETERS :
+ *   @memInfo : [output] reference to struct to store additional memory allocation info
+ *   @heap    : [input] heap id to indicate where the buffers will be allocated from
+ *   @size    : [input] lenght of the buffer to be allocated
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::allocOneBuffer(QCamera3MemInfo &memInfo, int heap_id, int size)
+{
+    int rc = OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = 0;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd <= 0) {
+        ALOGE("Ion dev open failed: %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095) & (~4095);
+    alloc.align = 4096;
+    alloc.flags = ION_FLAG_CACHED;
+    alloc.heap_id_mask = heap_id;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        ALOGE("ION allocation for len %d failed: %s\n", alloc.len,
+            strerror(errno));
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        ALOGE("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    memInfo.main_ion_fd = main_ion_fd;
+    memInfo.fd = ion_info_fd.fd;
+    memInfo.handle = ion_info_fd.handle;
+    memInfo.size = alloc.len;
+    return OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return NO_MEMORY;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocOneBuffer
+ *
+ * DESCRIPTION: impl of deallocating one buffers
+ *
+ * PARAMETERS :
+ *   @memInfo : reference to struct that stores additional memory allocation info
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HeapMemory::deallocOneBuffer(QCamera3MemInfo &memInfo)
+{
+    struct ion_handle_data handle_data;
+
+    if (memInfo.fd > 0) {
+        close(memInfo.fd);
+        memInfo.fd = 0;
+    }
+
+    if (memInfo.main_ion_fd > 0) {
+        memset(&handle_data, 0, sizeof(handle_data));
+        handle_data.handle = memInfo.handle;
+        ioctl(memInfo.main_ion_fd, ION_IOC_FREE, &handle_data);
+        close(memInfo.main_ion_fd);
+        memInfo.main_ion_fd = 0;
+    }
+    memInfo.handle = NULL;
+    memInfo.size = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3HeapMemory::getPtr(int index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : allocate
+ *
+ * DESCRIPTION: allocate requested number of buffers of certain size
+ *
+ * PARAMETERS :
+ *   @count   : number of buffers to be allocated
+ *   @size    : lenght of the buffer to be allocated
+ *   @queueAll: whether to queue all allocated buffers at the beginning
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::allocate(int count, int size, bool queueAll)
+{
+    int heap_mask = 0x1 << ION_IOMMU_HEAP_ID;
+    int rc = alloc(count, size, heap_mask);
+    if (rc < 0)
+        return rc;
+
+    for (int i = 0; i < count; i ++) {
+        void *vaddr = mmap(NULL,
+                    mMemInfo[i].size,
+                    PROT_READ | PROT_WRITE,
+                    MAP_SHARED,
+                    mMemInfo[i].fd, 0);
+        if (vaddr == MAP_FAILED) {
+            for (int j = i-1; j >= 0; j --) {
+                munmap(mPtr[i], mMemInfo[i].size);
+                rc = NO_MEMORY;
+                break;
+            }
+        } else
+            mPtr[i] = vaddr;
+    }
+    if (rc == 0)
+        mBufferCount = count;
+
+    mQueueAll = queueAll;
+    return OK;
+}
+
+/*===========================================================================
+ * FUNCTION   : deallocate
+ *
+ * DESCRIPTION: deallocate buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3HeapMemory::deallocate()
+{
+    for (int i = 0; i < mBufferCount; i++) {
+        munmap(mPtr[i], mMemInfo[i].size);
+        mPtr[i] = NULL;
+    }
+    dealloc();
+    mBufferCount = 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::cacheOps(int index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3HeapMemory::getRegFlags(uint8_t * regFlags) const
+{
+    int i;
+    for (i = 0; i < mBufferCount; i ++)
+        regFlags[i] = (mQueueAll ? 1 : 0);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by object ptr
+ *
+ * PARAMETERS :
+ *   @object  : object ptr
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3HeapMemory::getMatchBufIndex(void * /*object*/)
+{
+
+/*
+    TODO for HEAP memory type, would there be an equivalent requirement?
+
+    int index = -1;
+    buffer_handle_t *key = (buffer_handle_t*) object;
+    if (!key) {
+        return BAD_VALUE;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mBufferHandle[i] == key) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+*/
+    ALOGE("%s: FATAL: Not supposed to come here", __func__);
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3GrallocMemory
+ *
+ * DESCRIPTION: constructor of QCamera3GrallocMemory
+ *              preview stream buffers are allocated from gralloc native_windoe
+ *
+ * PARAMETERS :
+ *   @getMemory : camera memory request ops table
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3GrallocMemory::QCamera3GrallocMemory()
+        : QCamera3Memory()
+{
+    for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
+        mBufferHandle[i] = NULL;
+        mPrivateHandle[i] = NULL;
+        mCurrentFrameNumbers[i] = -1;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3GrallocMemory
+ *
+ * DESCRIPTION: deconstructor of QCamera3GrallocMemory
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+QCamera3GrallocMemory::~QCamera3GrallocMemory()
+{
+}
+
+/*===========================================================================
+ * FUNCTION   : registerBuffer
+ *
+ * DESCRIPTION: registers frameworks-allocated gralloc buffer_handle_t
+ *
+ * PARAMETERS :
+ *   @buffers : buffer_handle_t pointer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3GrallocMemory::registerBuffer(buffer_handle_t *buffer)
+{
+    status_t ret = NO_ERROR;
+    struct ion_fd_data ion_info_fd;
+    void *vaddr = NULL;
+    ALOGV(" %s : E ", __FUNCTION__);
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+
+    if (mBufferCount >= (MM_CAMERA_MAX_NUM_FRAMES - 1)) {
+        ALOGE("%s: Number of buffers %d greater than what's supported %d",
+            __func__, mBufferCount, MM_CAMERA_MAX_NUM_FRAMES);
+        return -EINVAL;
+    }
+
+    if (0 <= getMatchBufIndex((void *) buffer)) {
+        ALOGV("%s: Buffer already registered", __func__);
+        return ALREADY_EXISTS;
+    }
+
+    mBufferHandle[mBufferCount] = buffer;
+    mPrivateHandle[mBufferCount] =
+        (struct private_handle_t *)(*mBufferHandle[mBufferCount]);
+    mMemInfo[mBufferCount].main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (mMemInfo[mBufferCount].main_ion_fd < 0) {
+        ALOGE("%s: failed: could not open ion device", __func__);
+        ret = NO_MEMORY;
+        goto end;
+    } else {
+        ion_info_fd.fd = mPrivateHandle[mBufferCount]->fd;
+        if (ioctl(mMemInfo[mBufferCount].main_ion_fd,
+                  ION_IOC_IMPORT, &ion_info_fd) < 0) {
+            ALOGE("%s: ION import failed\n", __func__);
+            close(mMemInfo[mBufferCount].main_ion_fd);
+            ret = NO_MEMORY;
+            goto end;
+        }
+    }
+    ALOGV("%s: idx = %d, fd = %d, size = %d, offset = %d",
+            __func__, mBufferCount, mPrivateHandle[mBufferCount]->fd,
+            mPrivateHandle[mBufferCount]->size,
+            mPrivateHandle[mBufferCount]->offset);
+    mMemInfo[mBufferCount].fd =
+            mPrivateHandle[mBufferCount]->fd;
+    mMemInfo[mBufferCount].size =
+            mPrivateHandle[mBufferCount]->size;
+    mMemInfo[mBufferCount].handle = ion_info_fd.handle;
+
+    vaddr = mmap(NULL,
+            mMemInfo[mBufferCount].size,
+            PROT_READ | PROT_WRITE,
+            MAP_SHARED,
+            mMemInfo[mBufferCount].fd, 0);
+    if (vaddr == MAP_FAILED) {
+        ret = NO_MEMORY;
+    } else {
+        mPtr[mBufferCount] = vaddr;
+        mBufferCount++;
+    }
+
+end:
+    ALOGV(" %s : X ",__func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : unregisterBuffers
+ *
+ * DESCRIPTION: unregister buffers
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3GrallocMemory::unregisterBuffers()
+{
+    ALOGV("%s: E ", __FUNCTION__);
+
+    for (int cnt = 0; cnt < mBufferCount; cnt++) {
+        munmap(mPtr[cnt], mMemInfo[cnt].size);
+        mPtr[cnt] = NULL;
+
+        struct ion_handle_data ion_handle;
+        memset(&ion_handle, 0, sizeof(ion_handle));
+        ion_handle.handle = mMemInfo[cnt].handle;
+        if (ioctl(mMemInfo[cnt].main_ion_fd, ION_IOC_FREE, &ion_handle) < 0) {
+            ALOGE("ion free failed");
+        }
+        close(mMemInfo[cnt].main_ion_fd);
+        ALOGV("put buffer %d successfully", cnt);
+    }
+    mBufferCount = 0;
+    ALOGV(" %s : X ",__FUNCTION__);
+}
+
+/*===========================================================================
+ * FUNCTION   : markFrameNumber
+ *
+ * DESCRIPTION: We use this function from the request call path to mark the
+ *              buffers with the frame number they are intended for this info
+ *              is used later when giving out callback & it is duty of PP to
+ *              ensure that data for that particular frameNumber/Request is
+ *              written to this buffer.
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @frame#  : Frame number from the framework
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::markFrameNumber(int index, uint32_t frameNumber)
+{
+    if(index >= mBufferCount || index >= MM_CAMERA_MAX_NUM_FRAMES) {
+        ALOGE("%s: Index out of bounds",__func__);
+        return BAD_INDEX;
+    }
+    mCurrentFrameNumbers[index] = frameNumber;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameNumber
+ *
+ * DESCRIPTION: We use this to fetch the frameNumber for the request with which
+ *              this buffer was given to HAL
+ *
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : int32_t frameNumber
+ *              postive/zero  -- success
+ *              negetive failure
+ *==========================================================================*/
+int32_t QCamera3GrallocMemory::getFrameNumber(int index)
+{
+    if(index >= mBufferCount || index >= MM_CAMERA_MAX_NUM_FRAMES) {
+        ALOGE("%s: Index out of bounds",__func__);
+        return -1;
+    }
+
+    return mCurrentFrameNumbers[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : cacheOps
+ *
+ * DESCRIPTION: ion related memory cache operations
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *   @cmd     : cache ops command
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3GrallocMemory::cacheOps(int index, unsigned int cmd)
+{
+    if (index >= mBufferCount)
+        return BAD_INDEX;
+    return cacheOpsInternal(index, cmd, mPtr[index]);
+}
+
+/*===========================================================================
+ * FUNCTION   : getRegFlags
+ *
+ * DESCRIPTION: query initial reg flags
+ *
+ * PARAMETERS :
+ *   @regFlags: initial reg flags of the allocated buffers
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int QCamera3GrallocMemory::getRegFlags(uint8_t *regFlags) const
+{
+    int i;
+    for (i = 0; i < mBufferCount; i ++)
+        regFlags[i] = 0;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMatchBufIndex
+ *
+ * DESCRIPTION: query buffer index by object ptr
+ *
+ * PARAMETERS :
+ *   @opaque  : opaque ptr
+ *
+ * RETURN     : buffer index if match found,
+ *              -1 if failed
+ *==========================================================================*/
+int QCamera3GrallocMemory::getMatchBufIndex(void *object)
+{
+    int index = -1;
+    buffer_handle_t *key = (buffer_handle_t*) object;
+    if (!key) {
+        return BAD_VALUE;
+    }
+    for (int i = 0; i < mBufferCount; i++) {
+        if (mBufferHandle[i] == key) {
+            index = i;
+            break;
+        }
+    }
+    return index;
+}
+
+/*===========================================================================
+ * FUNCTION   : getPtr
+ *
+ * DESCRIPTION: return buffer pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr
+ *==========================================================================*/
+void *QCamera3GrallocMemory::getPtr(int index) const
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return (void *)BAD_INDEX;
+    }
+    return mPtr[index];
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufferHandle
+ *
+ * DESCRIPTION: return framework pointer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer
+ *
+ * RETURN     : buffer ptr if match found
+                NULL if failed
+ *==========================================================================*/
+void *QCamera3GrallocMemory::getBufferHandle(int index)
+{
+    if (index >= mBufferCount) {
+        ALOGE("index out of bound");
+        return NULL;
+    }
+    return mBufferHandle[index];
+}
+
+}; //namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3Mem.h b/camera/QCamera2/HAL3/QCamera3Mem.h
new file mode 100644
index 0000000..d275df4
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Mem.h
@@ -0,0 +1,126 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3HWI_MEM_H__
+#define __QCAMERA3HWI_MEM_H__
+#include <hardware/camera3.h>
+#include <utils/Mutex.h>
+
+extern "C" {
+#include <sys/types.h>
+#include <linux/msm_ion.h>
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+// Base class for all memory types. Abstract.
+class QCamera3Memory {
+
+public:
+    int cleanCache(int index) {return cacheOps(index, ION_IOC_CLEAN_CACHES);}
+    int invalidateCache(int index) {return cacheOps(index, ION_IOC_INV_CACHES);}
+    int cleanInvalidateCache(int index) {return cacheOps(index, ION_IOC_CLEAN_INV_CACHES);}
+    int getFd(int index) const;
+    int getSize(int index) const;
+    int getCnt() const;
+
+    virtual int cacheOps(int index, unsigned int cmd) = 0;
+    virtual int getRegFlags(uint8_t *regFlags) const = 0;
+    virtual int getMatchBufIndex(void *object) = 0;
+    virtual void *getPtr(int index) const= 0;
+
+    QCamera3Memory();
+    virtual ~QCamera3Memory();
+
+    int32_t getBufDef(const cam_frame_len_offset_t &offset,
+                mm_camera_buf_def_t &bufDef, int index) const;
+
+protected:
+    struct QCamera3MemInfo {
+        int fd;
+        int main_ion_fd;
+        struct ion_handle *handle;
+        uint32_t size;
+    };
+
+    int cacheOpsInternal(int index, unsigned int cmd, void *vaddr);
+
+    int mBufferCount;
+    struct QCamera3MemInfo mMemInfo[MM_CAMERA_MAX_NUM_FRAMES];
+    void *mPtr[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+// Internal heap memory is used for memories used internally
+// They are allocated from /dev/ion. Examples are: capabilities,
+// parameters, metadata, and internal YUV data for jpeg encoding.
+class QCamera3HeapMemory : public QCamera3Memory {
+public:
+    QCamera3HeapMemory();
+    virtual ~QCamera3HeapMemory();
+
+    int allocate(int count, int size, bool queueAll);
+    void deallocate();
+
+    virtual int cacheOps(int index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual int getMatchBufIndex(void *object);
+    virtual void *getPtr(int index) const;
+private:
+    int alloc(int count, int size, int heap_id);
+    void dealloc();
+
+    int allocOneBuffer(struct QCamera3MemInfo &memInfo, int heap_id, int size);
+    void deallocOneBuffer(struct QCamera3MemInfo &memInfo);
+    bool mQueueAll;
+};
+
+// Gralloc Memory shared with frameworks
+class QCamera3GrallocMemory : public QCamera3Memory {
+public:
+    QCamera3GrallocMemory();
+    virtual ~QCamera3GrallocMemory();
+
+    int registerBuffer(buffer_handle_t *buffer);
+    void unregisterBuffers();
+    virtual int cacheOps(int index, unsigned int cmd);
+    virtual int getRegFlags(uint8_t *regFlags) const;
+    virtual int getMatchBufIndex(void *object);
+    virtual void *getPtr(int index) const;
+    int32_t markFrameNumber(int index, uint32_t frameNumber);
+    int32_t getFrameNumber(int index);
+    void *getBufferHandle(int index);
+private:
+    buffer_handle_t *mBufferHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    struct private_handle_t *mPrivateHandle[MM_CAMERA_MAX_NUM_FRAMES];
+    uint32_t mCurrentFrameNumbers[MM_CAMERA_MAX_NUM_FRAMES];
+};
+
+};
+#endif
diff --git a/camera/QCamera2/HAL3/QCamera3PostProc.cpp b/camera/QCamera2/HAL3/QCamera3PostProc.cpp
new file mode 100644
index 0000000..a529e04
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -0,0 +1,1458 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3PostProc"
+//#define LOG_NDEBUG 0
+
+#include <stdlib.h>
+#include <utils/Errors.h>
+
+#include "QCamera3PostProc.h"
+#include "QCamera3HWI.h"
+#include "QCamera3Channel.h"
+#include "QCamera3Stream.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCamera3PostProcessor
+ *
+ * DESCRIPTION: constructor of QCamera3PostProcessor.
+ *
+ * PARAMETERS :
+ *   @cam_ctrl : ptr to HWI object
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3PostProcessor::QCamera3PostProcessor(QCamera3PicChannel* ch_ctrl)
+    : m_parent(ch_ctrl),
+      mJpegCB(NULL),
+      mJpegUserData(NULL),
+      mJpegClientHandle(0),
+      mJpegSessionId(0),
+      m_pJpegExifObj(NULL),
+      m_bThumbnailNeeded(TRUE),
+      m_pReprocChannel(NULL),
+      m_inputPPQ(releasePPInputData, this),
+      m_ongoingPPQ(releaseOngoingPPData, this),
+      m_inputJpegQ(releaseJpegData, this),
+      m_ongoingJpegQ(releaseJpegData, this),
+      m_inputRawQ(releasePPInputData, this),
+      m_inputMetaQ(releaseMetaData, this),
+      m_jpegSettingsQ(releaseJpegSetting, this)
+{
+    memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    pthread_mutex_init(&mReprocJobLock, NULL);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3PostProcessor
+ *
+ * DESCRIPTION: deconstructor of QCamera3PostProcessor.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3PostProcessor::~QCamera3PostProcessor()
+{
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    pthread_mutex_destroy(&mReprocJobLock);
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialization of postprocessor
+ *
+ * PARAMETERS :
+ *   @jpeg_cb      : callback to handle jpeg event from mm-camera-interface
+ *   @user_data    : user data ptr for jpeg callback
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::init(QCamera3Memory* mMemory,
+                                    jpeg_encode_callback_t jpeg_cb, void *user_data)
+{
+    mJpegCB = jpeg_cb;
+    mJpegUserData = user_data;
+    mJpegMem = mMemory;
+    mJpegClientHandle = jpeg_open(&mJpegHandle);
+    if(!mJpegClientHandle) {
+        ALOGE("%s : jpeg_open did not work", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    m_dataProcTh.launch(dataProcessRoutine, this);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : deinit
+ *
+ * DESCRIPTION: de-initialization of postprocessor
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::deinit()
+{
+    m_dataProcTh.exit();
+
+    if (m_pReprocChannel != NULL) {
+        m_pReprocChannel->stop();
+        delete m_pReprocChannel;
+        m_pReprocChannel = NULL;
+    }
+
+    if(mJpegClientHandle > 0) {
+        int rc = mJpegHandle.close(mJpegClientHandle);
+        ALOGD("%s: Jpeg closed, rc = %d, mJpegClientHandle = %x",
+              __func__, rc, mJpegClientHandle);
+        mJpegClientHandle = 0;
+        memset(&mJpegHandle, 0, sizeof(mJpegHandle));
+    }
+
+    mJpegMem = NULL;
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start postprocessor. Data process thread and data notify thread
+ *              will be launched.
+ *
+ * PARAMETERS :
+ *   @pMemory       : memory object representing buffers to store JPEG.
+ *   @pInputChannel : Input channel obj ptr that possibly needs reprocess
+ *   @metadata      : metadata for the reprocessing
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : if any reprocess is needed, a reprocess channel/stream
+ *              will be started.
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::start(QCamera3Channel *pInputChannel,
+                                     metadata_buffer_t *metadata)
+{
+    int32_t rc = NO_ERROR;
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+
+    if (hal_obj->needReprocess()) {
+
+        while (!m_inputMetaQ.isEmpty()) {
+           m_pReprocChannel->metadataBufDone((mm_camera_super_buf_t *)m_inputMetaQ.dequeue());
+        }
+        if (m_pReprocChannel != NULL) {
+            m_pReprocChannel->stop();
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+        }
+
+        // if reprocess is needed, start reprocess channel
+        QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+        ALOGV("%s: Setting input channel as pInputChannel", __func__);
+        m_pReprocChannel = hal_obj->addOfflineReprocChannel(pInputChannel, m_parent, metadata);
+        if (m_pReprocChannel == NULL) {
+            ALOGE("%s: cannot add reprocess channel", __func__);
+            return UNKNOWN_ERROR;
+        }
+
+        rc = m_pReprocChannel->start();
+        if (rc != 0) {
+            ALOGE("%s: cannot start reprocess channel", __func__);
+            delete m_pReprocChannel;
+            m_pReprocChannel = NULL;
+            return rc;
+        }
+    }
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop postprocessor. Data process and notify thread will be stopped.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : reprocess channel will be stopped and deleted if there is any
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::stop()
+{
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegEncodeConfig
+ *
+ * DESCRIPTION: function to prepare encoding job information
+ *
+ * PARAMETERS :
+ *   @encode_parm   : param to be filled with encoding configuration
+ *   #main_stream   : stream object where the input buffer comes from
+ *   @jpeg_settings : jpeg settings to be applied for encoding
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::getJpegEncodeConfig(
+                mm_jpeg_encode_params_t& encode_parm,
+                QCamera3Stream *main_stream,
+                jpeg_settings_t *jpeg_settings)
+{
+    ALOGV("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+
+    encode_parm.jpeg_cb = mJpegCB;
+    encode_parm.userdata = mJpegUserData;
+
+    if (jpeg_settings->thumbnail_size.width > 0 &&
+            jpeg_settings->thumbnail_size.height > 0)
+        m_bThumbnailNeeded = TRUE;
+    else
+        m_bThumbnailNeeded = FALSE;
+    encode_parm.encode_thumbnail = m_bThumbnailNeeded;
+
+    // get color format
+    cam_format_t img_fmt = CAM_FORMAT_YUV_420_NV12;  //default value
+    main_stream->getFormat(img_fmt);
+    encode_parm.color_format = getColorfmtFromImgFmt(img_fmt);
+
+    // get jpeg quality
+    encode_parm.quality = jpeg_settings->jpeg_quality;
+
+    // get jpeg thumbnail quality
+    encode_parm.thumb_quality = jpeg_settings->jpeg_thumb_quality;
+
+    cam_frame_len_offset_t main_offset;
+    memset(&main_offset, 0, sizeof(cam_frame_len_offset_t));
+    main_stream->getFrameOffset(main_offset);
+
+    // src buf config
+    //Pass input main image buffer info to encoder.
+    QCamera3Memory *pStreamMem = main_stream->getStreamBufs();
+    if (pStreamMem == NULL) {
+        ALOGE("%s: cannot get stream bufs from main stream", __func__);
+        ret = BAD_VALUE;
+        goto on_error;
+    }
+    encode_parm.num_src_bufs = pStreamMem->getCnt();
+    for (uint32_t i = 0; i < encode_parm.num_src_bufs; i++) {
+        if (pStreamMem != NULL) {
+            encode_parm.src_main_buf[i].index = i;
+            encode_parm.src_main_buf[i].buf_size = pStreamMem->getSize(i);
+            encode_parm.src_main_buf[i].buf_vaddr = (uint8_t *)pStreamMem->getPtr(i);
+            encode_parm.src_main_buf[i].fd = pStreamMem->getFd(i);
+            encode_parm.src_main_buf[i].format = MM_JPEG_FMT_YUV;
+            encode_parm.src_main_buf[i].offset = main_offset;
+        }
+    }
+
+    //Pass input thumbnail buffer info to encoder.
+    //Note: In this version thumb_stream = main_stream
+    if (m_bThumbnailNeeded == TRUE) {
+        pStreamMem = main_stream->getStreamBufs();
+        if (pStreamMem == NULL) {
+            ALOGE("%s: cannot get stream bufs from thumb stream", __func__);
+            ret = BAD_VALUE;
+            goto on_error;
+        }
+        cam_frame_len_offset_t thumb_offset;
+        memset(&thumb_offset, 0, sizeof(cam_frame_len_offset_t));
+        main_stream->getFrameOffset(thumb_offset);
+        encode_parm.num_tmb_bufs = pStreamMem->getCnt();
+        for (int i = 0; i < pStreamMem->getCnt(); i++) {
+            if (pStreamMem != NULL) {
+                encode_parm.src_thumb_buf[i].index = i;
+                encode_parm.src_thumb_buf[i].buf_size = pStreamMem->getSize(i);
+                encode_parm.src_thumb_buf[i].buf_vaddr = (uint8_t *)pStreamMem->getPtr(i);
+                encode_parm.src_thumb_buf[i].fd = pStreamMem->getFd(i);
+                encode_parm.src_thumb_buf[i].format = MM_JPEG_FMT_YUV;
+                encode_parm.src_thumb_buf[i].offset = thumb_offset;
+            }
+        }
+    }
+
+    //Pass output jpeg buffer info to encoder.
+    //mJpegMem is allocated by framework.
+    encode_parm.num_dst_bufs = 1;
+    encode_parm.dest_buf[0].index = 0;
+    encode_parm.dest_buf[0].buf_size = mJpegMem->getSize(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].buf_vaddr = (uint8_t *)mJpegMem->getPtr(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].fd = mJpegMem->getFd(
+            jpeg_settings->out_buf_index);
+    encode_parm.dest_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_parm.dest_buf[0].offset = main_offset;
+
+    ALOGV("%s : X", __func__);
+    return NO_ERROR;
+
+on_error:
+
+    ALOGV("%s : X with error %d", __func__, ret);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : processData
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : depends on if offline reprocess is needed, received frame will
+ *              be sent to either input queue of postprocess or jpeg encoding
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processData(mm_camera_super_buf_t *frame)
+{
+    QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    if (hal_obj->needReprocess()) {
+        pthread_mutex_lock(&mReprocJobLock);
+        // enqueu to post proc input queue
+        m_inputPPQ.enqueue((void *)frame);
+        if (!(m_inputMetaQ.isEmpty())) {
+           ALOGV("%s: meta queue is not empty, do next job", __func__);
+           m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+        }
+        pthread_mutex_unlock(&mReprocJobLock);
+    } else {
+        ALOGD("%s: no need offline reprocess, sending to jpeg encoding", __func__);
+        qcamera_jpeg_data_t *jpeg_job =
+            (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+        if (jpeg_job == NULL) {
+            ALOGE("%s: No memory for jpeg job", __func__);
+            return NO_MEMORY;
+        }
+
+        memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+        jpeg_job->src_frame = frame;
+
+        // enqueu to jpeg input queue
+        m_inputJpegQ.enqueue((void *)jpeg_job);
+        m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    }
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPMetadata
+ *
+ * DESCRIPTION: enqueue data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process metadata frame received from pic channel
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processPPMetadata(metadata_buffer_t *reproc_meta)
+{
+   pthread_mutex_lock(&mReprocJobLock);
+    // enqueue to metadata input queue
+    m_inputMetaQ.enqueue((void *)reproc_meta);
+    if (!(m_inputPPQ.isEmpty())) {
+       ALOGI("%s: pp queue is not empty, do next job", __func__);
+       m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+       ALOGI("%s: pp queue is empty, not calling do next job", __func__);
+    }
+    pthread_mutex_unlock(&mReprocJobLock);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processJpegSettingData
+ *
+ * DESCRIPTION: enqueue jpegSetting into dataProc thread
+ *
+ * PARAMETERS :
+ *   @jpeg_settings : jpeg settings data received from pic channel
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processJpegSettingData(
+        jpeg_settings_t *jpeg_settings)
+{
+    if (!jpeg_settings) {
+        ALOGE("%s: invalid jpeg settings pointer", __func__);
+        return -EINVAL;
+    }
+    return m_jpegSettingsQ.enqueue((void *)jpeg_settings);
+}
+
+/*===========================================================================
+ * FUNCTION   : processRawData
+ *
+ * DESCRIPTION: enqueue raw data into dataProc thread
+ *
+ * PARAMETERS :
+ *   @frame   : process frame received from mm-camera-interface
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processRawData(mm_camera_super_buf_t *frame)
+{
+    // enqueu to raw input queue
+    m_inputRawQ.enqueue((void *)frame);
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : processPPData
+ *
+ * DESCRIPTION: process received frame after reprocess.
+ *
+ * PARAMETERS :
+ *   @frame   : received frame from reprocess channel.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ * NOTE       : The frame after reprocess need to send to jpeg encoding.
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::processPPData(mm_camera_super_buf_t *frame)
+{
+    qcamera_pp_data_t *job = (qcamera_pp_data_t *)m_ongoingPPQ.dequeue();
+    jpeg_settings_t *jpeg_settings = (jpeg_settings_t *)m_jpegSettingsQ.dequeue();
+
+    if (job == NULL || job->src_frame == NULL) {
+        ALOGE("%s: Cannot find reprocess job", __func__);
+        return BAD_VALUE;
+    }
+    if (jpeg_settings == NULL) {
+        ALOGE("%s: Cannot find jpeg settings", __func__);
+        return BAD_VALUE;
+    }
+
+    qcamera_jpeg_data_t *jpeg_job =
+        (qcamera_jpeg_data_t *)malloc(sizeof(qcamera_jpeg_data_t));
+    if (jpeg_job == NULL) {
+        ALOGE("%s: No memory for jpeg job", __func__);
+        return NO_MEMORY;
+    }
+
+    memset(jpeg_job, 0, sizeof(qcamera_jpeg_data_t));
+    jpeg_job->src_frame = frame;
+    jpeg_job->src_reproc_frame = job->src_frame;
+    jpeg_job->metadata = job->metadata;
+    jpeg_job->jpeg_settings = jpeg_settings;
+
+    // free pp job buf
+    free(job);
+
+    // enqueu reprocessed frame to jpeg input queue
+    m_inputJpegQ.enqueue((void *)jpeg_job);
+
+    // wait up data proc thread
+    m_dataProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : findJpegJobByJobId
+ *
+ * DESCRIPTION: find a jpeg job from ongoing Jpeg queue by its job ID
+ *
+ * PARAMETERS :
+ *   @jobId   : job Id of the job
+ *
+ * RETURN     : ptr to a jpeg job struct. NULL if not found.
+ *
+ * NOTE       : Currently only one job is sending to mm-jpeg-interface for jpeg
+ *              encoding. Therefore simply dequeue from the ongoing Jpeg Queue
+ *              will serve the purpose to find the jpeg job.
+ *==========================================================================*/
+qcamera_jpeg_data_t *QCamera3PostProcessor::findJpegJobByJobId(uint32_t jobId)
+{
+    qcamera_jpeg_data_t * job = NULL;
+    if (jobId == 0) {
+        ALOGE("%s: not a valid jpeg jobId", __func__);
+        return NULL;
+    }
+
+    // currely only one jpeg job ongoing, so simply dequeue the head
+    job = (qcamera_jpeg_data_t *)m_ongoingJpegQ.dequeue();
+    return job;
+}
+
+/*===========================================================================
+ * FUNCTION   : releasePPInputData
+ *
+ * DESCRIPTION: callback function to release post process input data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releasePPInputData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseSuperBuf((mm_camera_super_buf_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseMetaData
+ *
+ * DESCRIPTION: callback function to release meta data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseMetaData(void *data, void * /*user_data*/)
+{
+    metadata_buffer_t *metadata = (metadata_buffer_t *)data;
+    if (metadata != NULL)
+        free(metadata);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegSetting
+ *
+ * DESCRIPTION: callback function to release meta data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseJpegSetting(void *data, void * /*user_data*/)
+{
+    jpeg_settings_t *jpegSetting = (jpeg_settings_t *)data;
+    if (jpegSetting != NULL)
+        free(jpegSetting);
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegData
+ *
+ * DESCRIPTION: callback function to release jpeg job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to ongoing jpeg job data
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseJpegData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        pme->releaseJpegJobData((qcamera_jpeg_data_t *)data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseOngoingPPData
+ *
+ * DESCRIPTION: callback function to release ongoing postprocess job node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to onging postprocess job
+ *   @user_data : user data ptr (QCamera3Reprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseOngoingPPData(void *data, void *user_data)
+{
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)user_data;
+    if (NULL != pme) {
+        qcamera_pp_data_t *pp_job = (qcamera_pp_data_t *)data;
+        if (NULL != pp_job->src_frame) {
+            pme->releaseSuperBuf(pp_job->src_frame);
+            free(pp_job->src_frame);
+            free(pp_job->metadata);
+            pp_job->src_frame = NULL;
+            pp_job->metadata = NULL;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseSuperBuf
+ *
+ * DESCRIPTION: function to release a superbuf frame by returning back to kernel
+ *
+ * PARAMETERS :
+ *   @super_buf : ptr to the superbuf frame
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseSuperBuf(mm_camera_super_buf_t *super_buf)
+{
+    if (NULL != super_buf) {
+        if (m_parent != NULL) {
+            m_parent->bufDone(super_buf);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseJpegJobData
+ *
+ * DESCRIPTION: function to release internal resources in jpeg job struct
+ *
+ * PARAMETERS :
+ *   @job     : ptr to jpeg job struct
+ *
+ * RETURN     : None
+ *
+ * NOTE       : original source frame need to be queued back to kernel for
+ *              future use. Output buf of jpeg job need to be released since
+ *              it's allocated for each job. Exif object need to be deleted.
+ *==========================================================================*/
+void QCamera3PostProcessor::releaseJpegJobData(qcamera_jpeg_data_t *job)
+{
+    ALOGV("%s: E", __func__);
+    if (NULL != job) {
+        if (NULL != job->src_reproc_frame) {
+            free(job->src_reproc_frame);
+            job->src_reproc_frame = NULL;
+        }
+
+        if (NULL != job->src_frame) {
+            free(job->src_frame);
+            job->src_frame = NULL;
+        }
+
+        if (NULL != job->metadata) {
+            free(job->metadata);
+            job->metadata = NULL;
+        }
+
+        if (NULL != job->jpeg_settings) {
+            free(job->jpeg_settings);
+            job->jpeg_settings = NULL;
+        }
+    }
+    ALOGV("%s: X", __func__);
+}
+
+/*===========================================================================
+ * FUNCTION   : getColorfmtFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg color format based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : jpeg color format that can be understandable by omx lib
+ *==========================================================================*/
+mm_jpeg_color_format QCamera3PostProcessor::getColorfmtFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    case CAM_FORMAT_YUV_420_NV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_420_YV12:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2;
+    case CAM_FORMAT_YUV_422_NV61:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1;
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1;
+    default:
+        return MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getJpegImgTypeFromImgFmt
+ *
+ * DESCRIPTION: function to return jpeg encode image type based on its image format
+ *
+ * PARAMETERS :
+ *   @img_fmt : image format
+ *
+ * RETURN     : return jpeg source image format (YUV or Bitstream)
+ *==========================================================================*/
+mm_jpeg_format_t QCamera3PostProcessor::getJpegImgTypeFromImgFmt(cam_format_t img_fmt)
+{
+    switch (img_fmt) {
+    case CAM_FORMAT_YUV_420_NV21:
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_YV12:
+    case CAM_FORMAT_YUV_422_NV61:
+    case CAM_FORMAT_YUV_422_NV16:
+        return MM_JPEG_FMT_YUV;
+    default:
+        return MM_JPEG_FMT_YUV;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : encodeData
+ *
+ * DESCRIPTION: function to prepare encoding job information and send to
+ *              mm-jpeg-interface to do the encoding job
+ *
+ * PARAMETERS :
+ *   @jpeg_job_data : ptr to a struct saving job related information
+ *   @needNewSess   : flag to indicate if a new jpeg encoding session need
+ *                    to be created. After creation, this flag will be toggled
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3PostProcessor::encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                          uint8_t &needNewSess)
+{
+    ALOGV("%s : E", __func__);
+    int32_t ret = NO_ERROR;
+    mm_jpeg_job_t jpg_job;
+    uint32_t jobId = 0;
+    QCamera3Stream *main_stream = NULL;
+    mm_camera_buf_def_t *main_frame = NULL;
+    QCamera3Stream *thumb_stream = NULL;
+    mm_camera_buf_def_t *thumb_frame = NULL;
+    QCamera3Channel *srcChannel = NULL;
+    mm_camera_super_buf_t *recvd_frame = NULL;
+    metadata_buffer_t *metadata = NULL;
+    jpeg_settings_t *jpeg_settings = NULL;
+    QCamera3HardwareInterface* hal_obj = NULL;
+
+
+    hal_obj = (QCamera3HardwareInterface*)m_parent->mUserData;
+    recvd_frame = jpeg_job_data->src_frame;
+    metadata = jpeg_job_data->metadata;
+    jpeg_settings = jpeg_job_data->jpeg_settings;
+
+    QCamera3Channel *pChannel = NULL;
+    // first check picture channel
+    if (m_parent != NULL &&
+        m_parent->getMyHandle() == recvd_frame->ch_id) {
+        pChannel = m_parent;
+    }
+    // check reprocess channel if not found
+    if (pChannel == NULL) {
+        if (m_pReprocChannel != NULL &&
+            m_pReprocChannel->getMyHandle() == recvd_frame->ch_id) {
+            pChannel = m_pReprocChannel;
+        }
+    }
+
+    srcChannel = pChannel;
+
+    if (srcChannel == NULL) {
+        ALOGE("%s: No corresponding channel (ch_id = %d) exist, return here",
+              __func__, recvd_frame->ch_id);
+        return BAD_VALUE;
+    }
+
+    // find snapshot frame and thumnail frame
+    //Note: In this version we will receive only snapshot frame.
+    for (int i = 0; i < recvd_frame->num_bufs; i++) {
+        QCamera3Stream *srcStream =
+            srcChannel->getStreamByHandle(recvd_frame->bufs[i]->stream_id);
+        if (srcStream != NULL) {
+            switch (srcStream->getMyType()) {
+            case CAM_STREAM_TYPE_SNAPSHOT:
+            case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+            case CAM_STREAM_TYPE_OFFLINE_PROC:
+                main_stream = srcStream;
+                main_frame = recvd_frame->bufs[i];
+                break;
+            case CAM_STREAM_TYPE_PREVIEW:
+            case CAM_STREAM_TYPE_POSTVIEW:
+                thumb_stream = srcStream;
+                thumb_frame = recvd_frame->bufs[i];
+                break;
+            default:
+                break;
+            }
+        }
+    }
+
+    if(NULL == main_frame){
+       ALOGE("%s : Main frame is NULL", __func__);
+       return BAD_VALUE;
+    }
+
+    QCamera3Memory *memObj = (QCamera3Memory *)main_frame->mem_info;
+    if (NULL == memObj) {
+        ALOGE("%s : Memeory Obj of main frame is NULL", __func__);
+        return NO_MEMORY;
+    }
+
+    // clean and invalidate cache ops through mem obj of the frame
+    memObj->cleanInvalidateCache(main_frame->buf_idx);
+
+    if (thumb_frame != NULL) {
+        QCamera3Memory *thumb_memObj = (QCamera3Memory *)thumb_frame->mem_info;
+        if (NULL != thumb_memObj) {
+            // clean and invalidate cache ops through mem obj of the frame
+            thumb_memObj->cleanInvalidateCache(thumb_frame->buf_idx);
+        }
+    }
+
+    if (mJpegClientHandle <= 0) {
+        ALOGE("%s: Error: bug here, mJpegClientHandle is 0", __func__);
+        return UNKNOWN_ERROR;
+    }
+
+    ALOGD("%s: Need new session?:%d",__func__, needNewSess);
+    if (needNewSess) {
+        //creating a new session, so we must destroy the old one
+        if ( 0 < mJpegSessionId ) {
+            ret = mJpegHandle.destroy_session(mJpegSessionId);
+            if (ret != NO_ERROR) {
+                ALOGE("%s: Error destroying an old jpeg encoding session, id = %d",
+                      __func__, mJpegSessionId);
+                return ret;
+            }
+            mJpegSessionId = 0;
+        }
+        // create jpeg encoding session
+        mm_jpeg_encode_params_t encodeParam;
+        memset(&encodeParam, 0, sizeof(mm_jpeg_encode_params_t));
+
+        getJpegEncodeConfig(encodeParam, main_stream, jpeg_settings);
+        ALOGD("%s: #src bufs:%d # tmb bufs:%d #dst_bufs:%d", __func__,
+                     encodeParam.num_src_bufs,encodeParam.num_tmb_bufs,encodeParam.num_dst_bufs);
+        ret = mJpegHandle.create_session(mJpegClientHandle, &encodeParam, &mJpegSessionId);
+        if (ret != NO_ERROR) {
+            ALOGE("%s: Error creating a new jpeg encoding session, ret = %d", __func__, ret);
+            return ret;
+        }
+        needNewSess = FALSE;
+    }
+
+    // Fill in new job
+    memset(&jpg_job, 0, sizeof(mm_jpeg_job_t));
+    jpg_job.job_type = JPEG_JOB_TYPE_ENCODE;
+    jpg_job.encode_job.session_id = mJpegSessionId;
+    jpg_job.encode_job.src_index = main_frame->buf_idx;
+    jpg_job.encode_job.dst_index = 0;
+
+    cam_rect_t crop;
+    memset(&crop, 0, sizeof(cam_rect_t));
+    //TBD_later - Zoom event removed in stream
+    //main_stream->getCropInfo(crop);
+
+    cam_dimension_t src_dim;
+    memset(&src_dim, 0, sizeof(cam_dimension_t));
+    main_stream->getFrameDimension(src_dim);
+
+    cam_dimension_t dst_dim;
+    memset(&dst_dim, 0, sizeof(cam_dimension_t));
+    srcChannel->getStreamByIndex(0)->getFrameDimension(dst_dim);
+
+    // main dim
+    jpg_job.encode_job.main_dim.src_dim = src_dim;
+    jpg_job.encode_job.main_dim.dst_dim = dst_dim;
+    jpg_job.encode_job.main_dim.crop = crop;
+
+    // get exif data
+    if (m_pJpegExifObj != NULL) {
+        delete m_pJpegExifObj;
+        m_pJpegExifObj = NULL;
+    }
+    m_pJpegExifObj = m_parent->getExifData(metadata, jpeg_settings);
+    if (m_pJpegExifObj != NULL) {
+        jpg_job.encode_job.exif_info.exif_data = m_pJpegExifObj->getEntries();
+        jpg_job.encode_job.exif_info.numOfEntries =
+          m_pJpegExifObj->getNumOfEntries();
+    }
+    // thumbnail dim
+    ALOGD("%s: Thumbnail needed:%d",__func__, m_bThumbnailNeeded);
+    if (m_bThumbnailNeeded == TRUE) {
+        if (thumb_stream == NULL) {
+            // need jpeg thumbnail, but no postview/preview stream exists
+            // we use the main stream/frame to encode thumbnail
+            thumb_stream = main_stream;
+            thumb_frame = main_frame;
+        }
+        memset(&crop, 0, sizeof(cam_rect_t));
+        //TBD_later - Zoom event removed in stream
+        //thumb_stream->getCropInfo(crop);
+        jpg_job.encode_job.thumb_dim.dst_dim =
+                jpeg_settings->thumbnail_size;
+
+        if (!hal_obj->needRotationReprocess()) {
+            memset(&src_dim, 0, sizeof(cam_dimension_t));
+            thumb_stream->getFrameDimension(src_dim);
+            jpg_job.encode_job.rotation =
+                    jpeg_settings->jpeg_orientation;
+            ALOGD("%s: jpeg rotation is set to %d", __func__,
+                    jpg_job.encode_job.rotation);
+        } else if (jpeg_settings->jpeg_orientation  == 90 ||
+                jpeg_settings->jpeg_orientation == 270) {
+           //swap the thumbnail destination width and height if it has already been rotated
+            int temp = jpg_job.encode_job.thumb_dim.dst_dim.width;
+            jpg_job.encode_job.thumb_dim.dst_dim.width =
+                    jpg_job.encode_job.thumb_dim.dst_dim.height;
+            jpg_job.encode_job.thumb_dim.dst_dim.height = temp;
+        }
+        jpg_job.encode_job.thumb_dim.src_dim = src_dim;
+        jpg_job.encode_job.thumb_dim.crop = crop;
+        jpg_job.encode_job.thumb_index = thumb_frame->buf_idx;
+    }
+
+    if (metadata != NULL) {
+       //Fill in the metadata passed as parameter
+       jpg_job.encode_job.p_metadata_v3 = metadata;
+    } else {
+       ALOGE("%s: Metadata is null", __func__);
+    }
+    //Not required here
+    //jpg_job.encode_job.cam_exif_params = m_parent->mExifParams;
+    //Start jpeg encoding
+    ret = mJpegHandle.start_job(&jpg_job, &jobId);
+    if (ret == NO_ERROR) {
+        // remember job info
+        jpeg_job_data->jobId = jobId;
+    }
+
+    ALOGV("%s : X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcessRoutine
+ *
+ * DESCRIPTION: data process routine that handles input data either from input
+ *              Jpeg Queue to do jpeg encoding, or from input PP Queue to do
+ *              reprocess.
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr (QCamera3PostProcessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void *QCamera3PostProcessor::dataProcessRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    uint8_t is_active = FALSE;
+    uint8_t needNewSess = TRUE;
+    mm_camera_super_buf_t *pp_frame = NULL;
+    metadata_buffer_t *meta_buffer = NULL;
+    ALOGV("%s: E", __func__);
+    QCamera3PostProcessor *pme = (QCamera3PostProcessor *)data;
+    QCameraCmdThread *cmdThread = &pme->m_dataProcTh;
+    cmdThread->setName("cam_data_proc");
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_START_DATA_PROC:
+            ALOGD("%s: start data proc", __func__);
+            is_active = TRUE;
+            needNewSess = TRUE;
+            break;
+        case CAMERA_CMD_TYPE_STOP_DATA_PROC:
+            {
+                ALOGD("%s: stop data proc", __func__);
+                is_active = FALSE;
+
+                // cancel all ongoing jpeg jobs
+                qcamera_jpeg_data_t *jpeg_job =
+                    (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                while (jpeg_job != NULL) {
+                    pme->mJpegHandle.abort_job(jpeg_job->jobId);
+
+                    pme->releaseJpegJobData(jpeg_job);
+                    free(jpeg_job);
+
+                    jpeg_job = (qcamera_jpeg_data_t *)pme->m_ongoingJpegQ.dequeue();
+                }
+
+                // destroy jpeg encoding session
+                if ( 0 < pme->mJpegSessionId ) {
+                    pme->mJpegHandle.destroy_session(pme->mJpegSessionId);
+                    pme->mJpegSessionId = 0;
+                }
+
+                // free jpeg exif obj
+                if (pme->m_pJpegExifObj != NULL) {
+                    delete pme->m_pJpegExifObj;
+                    pme->m_pJpegExifObj = NULL;
+                }
+                needNewSess = TRUE;
+
+                // flush ongoing postproc Queue
+                pme->m_ongoingPPQ.flush();
+
+                // flush input jpeg Queue
+                pme->m_inputJpegQ.flush();
+
+                // flush input Postproc Queue
+                pme->m_inputPPQ.flush();
+
+                // flush input raw Queue
+                pme->m_inputRawQ.flush();
+
+                pme->m_inputMetaQ.flush();
+
+                // signal cmd is completed
+                cam_sem_post(&cmdThread->sync_sem);
+            }
+            break;
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                ALOGD("%s: Do next job, active is %d", __func__, is_active);
+                if (is_active == TRUE) {
+                    // check if there is any ongoing jpeg jobs
+                    if (pme->m_ongoingJpegQ.isEmpty()) {
+                       ALOGI("%s: ongoing jpeg queue is empty so doing the jpeg job", __func__);
+                        // no ongoing jpeg job, we are fine to send jpeg encoding job
+                        qcamera_jpeg_data_t *jpeg_job =
+                            (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+
+                        if (NULL != jpeg_job) {
+                            // add into ongoing jpeg job Q
+                            pme->m_ongoingJpegQ.enqueue((void *)jpeg_job);
+                            ret = pme->encodeData(jpeg_job, needNewSess);
+                            if (NO_ERROR != ret) {
+                                // dequeue the last one
+                                pme->m_ongoingJpegQ.dequeue(false);
+
+                                pme->releaseJpegJobData(jpeg_job);
+                                free(jpeg_job);
+                            }
+                        }
+                    }
+                    ALOGD("%s: dequeuing pp frame", __func__);
+                    pp_frame =
+                        (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                    meta_buffer =
+                        (metadata_buffer_t *)pme->m_inputMetaQ.dequeue();
+                    if (NULL != pp_frame && NULL != meta_buffer) {
+                        qcamera_pp_data_t *pp_job =
+                            (qcamera_pp_data_t *)malloc(sizeof(qcamera_pp_data_t));
+                        if (pp_job != NULL) {
+                            memset(pp_job, 0, sizeof(qcamera_pp_data_t));
+                            if (pme->m_pReprocChannel != NULL) {
+                                // add into ongoing PP job Q
+                                pp_job->src_frame = pp_frame;
+                                pp_job->metadata = meta_buffer;
+                                pme->m_ongoingPPQ.enqueue((void *)pp_job);
+                                ret = pme->m_pReprocChannel->doReprocessOffline(pp_frame, meta_buffer);
+                                if (NO_ERROR != ret) {
+                                    // remove from ongoing PP job Q
+                                    pme->m_ongoingPPQ.dequeue(false);
+                                }
+                            } else {
+                                ALOGE("%s: Reprocess channel is NULL", __func__);
+                                ret = -1;
+                            }
+                        } else {
+                            ALOGE("%s: no mem for qcamera_pp_data_t", __func__);
+                            ret = -1;
+                        }
+
+                        if (0 != ret) {
+                            // free pp_job
+                            if (pp_job != NULL) {
+                                free(pp_job);
+                            }
+                            // free frame
+                            if (pp_frame != NULL) {
+                                pme->releaseSuperBuf(pp_frame);
+                                free(pp_frame);
+                            }
+                        }
+                    }
+                } else {
+                    // not active, simply return buf and do no op
+                    mm_camera_super_buf_t *super_buf;
+                    qcamera_jpeg_data_t *jpeg_job =
+                        (qcamera_jpeg_data_t *)pme->m_inputJpegQ.dequeue();
+                    if (NULL != jpeg_job) {
+                        free(jpeg_job);
+                    }
+                    super_buf = (mm_camera_super_buf_t *)pme->m_inputRawQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                    super_buf = (mm_camera_super_buf_t *)pme->m_inputPPQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                    super_buf = (mm_camera_super_buf_t *)pme->m_inputMetaQ.dequeue();
+                    if (NULL != super_buf) {
+                        pme->releaseSuperBuf(super_buf);
+                        free(super_buf);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    ALOGV("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Exif
+ *
+ * DESCRIPTION: constructor of QCamera3Exif
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Exif::QCamera3Exif()
+    : m_nNumEntries(0)
+{
+    memset(m_Entries, 0, sizeof(m_Entries));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Exif
+ *
+ * DESCRIPTION: deconstructor of QCamera3Exif. Will release internal memory ptr.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Exif::~QCamera3Exif()
+{
+    for (uint32_t i = 0; i < m_nNumEntries; i++) {
+        switch (m_Entries[i].tag_entry.type) {
+            case EXIF_BYTE:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._bytes != NULL) {
+                        free(m_Entries[i].tag_entry.data._bytes);
+                        m_Entries[i].tag_entry.data._bytes = NULL;
+                    }
+                }
+                break;
+            case EXIF_ASCII:
+                {
+                    if (m_Entries[i].tag_entry.data._ascii != NULL) {
+                        free(m_Entries[i].tag_entry.data._ascii);
+                        m_Entries[i].tag_entry.data._ascii = NULL;
+                    }
+                }
+                break;
+            case EXIF_SHORT:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._shorts != NULL) {
+                        free(m_Entries[i].tag_entry.data._shorts);
+                        m_Entries[i].tag_entry.data._shorts = NULL;
+                    }
+                }
+                break;
+            case EXIF_LONG:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._longs != NULL) {
+                        free(m_Entries[i].tag_entry.data._longs);
+                        m_Entries[i].tag_entry.data._longs = NULL;
+                    }
+                }
+                break;
+            case EXIF_RATIONAL:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._rats != NULL) {
+                        free(m_Entries[i].tag_entry.data._rats);
+                        m_Entries[i].tag_entry.data._rats = NULL;
+                    }
+                }
+                break;
+            case EXIF_UNDEFINED:
+                {
+                    if (m_Entries[i].tag_entry.data._undefined != NULL) {
+                        free(m_Entries[i].tag_entry.data._undefined);
+                        m_Entries[i].tag_entry.data._undefined = NULL;
+                    }
+                }
+                break;
+            case EXIF_SLONG:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._slongs != NULL) {
+                        free(m_Entries[i].tag_entry.data._slongs);
+                        m_Entries[i].tag_entry.data._slongs = NULL;
+                    }
+                }
+                break;
+            case EXIF_SRATIONAL:
+                {
+                    if (m_Entries[i].tag_entry.count > 1 &&
+                            m_Entries[i].tag_entry.data._srats != NULL) {
+                        free(m_Entries[i].tag_entry.data._srats);
+                        m_Entries[i].tag_entry.data._srats = NULL;
+                    }
+                }
+                break;
+            default:
+                ALOGE("%s: Error, Unknown type",__func__);
+                break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : addEntry
+ *
+ * DESCRIPTION: function to add an entry to exif data
+ *
+ * PARAMETERS :
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Exif::addEntry(exif_tag_id_t tagid,
+                              exif_tag_type_t type,
+                              uint32_t count,
+                              void *data)
+{
+    int32_t rc = NO_ERROR;
+    if(m_nNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return NO_MEMORY;
+    }
+
+    m_Entries[m_nNumEntries].tag_id = tagid;
+    m_Entries[m_nNumEntries].tag_entry.type = type;
+    m_Entries[m_nNumEntries].tag_entry.count = count;
+    m_Entries[m_nNumEntries].tag_entry.copy = 1;
+    switch (type) {
+        case EXIF_BYTE:
+            {
+                if (count > 1) {
+                    uint8_t *values = (uint8_t *)malloc(count);
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for byte array", __func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count);
+                        m_Entries[m_nNumEntries].tag_entry.data._bytes = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._byte =
+                        *(uint8_t *)data;
+                }
+            }
+            break;
+        case EXIF_ASCII:
+            {
+                char *str = NULL;
+                str = (char *)malloc(count + 1);
+                if (str == NULL) {
+                    ALOGE("%s: No memory for ascii string", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memset(str, 0, count + 1);
+                    memcpy(str, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._ascii = str;
+                }
+            }
+            break;
+        case EXIF_SHORT:
+            {
+                if (count > 1) {
+                    uint16_t *values =
+                        (uint16_t *)malloc(count * sizeof(uint16_t));
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for short array", __func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count * sizeof(uint16_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._shorts =values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._short =
+                        *(uint16_t *)data;
+                }
+            }
+            break;
+        case EXIF_LONG:
+            {
+                if (count > 1) {
+                    uint32_t *values =
+                        (uint32_t *)malloc(count * sizeof(uint32_t));
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for long array", __func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count * sizeof(uint32_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._longs = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._long =
+                        *(uint32_t *)data;
+                }
+            }
+            break;
+        case EXIF_RATIONAL:
+            {
+                if (count > 1) {
+                    rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for rational array", __func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count * sizeof(rat_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._rats = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._rat =
+                        *(rat_t *)data;
+                }
+            }
+            break;
+        case EXIF_UNDEFINED:
+            {
+                uint8_t *values = (uint8_t *)malloc(count);
+                if (values == NULL) {
+                    ALOGE("%s: No memory for undefined array", __func__);
+                    rc = NO_MEMORY;
+                } else {
+                    memcpy(values, data, count);
+                    m_Entries[m_nNumEntries].tag_entry.data._undefined = values;
+                }
+            }
+            break;
+        case EXIF_SLONG:
+            {
+                if (count > 1) {
+                    int32_t *values =
+                        (int32_t *)malloc(count * sizeof(int32_t));
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for signed long array", __func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count * sizeof(int32_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._slongs =values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._slong =
+                        *(int32_t *)data;
+                }
+            }
+            break;
+        case EXIF_SRATIONAL:
+            {
+                if (count > 1) {
+                    srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+                    if (values == NULL) {
+                        ALOGE("%s: No memory for sign rational array",__func__);
+                        rc = NO_MEMORY;
+                    } else {
+                        memcpy(values, data, count * sizeof(srat_t));
+                        m_Entries[m_nNumEntries].tag_entry.data._srats = values;
+                    }
+                } else {
+                    m_Entries[m_nNumEntries].tag_entry.data._srat =
+                        *(srat_t *)data;
+                }
+            }
+            break;
+        default:
+            ALOGE("%s: Error, Unknown type",__func__);
+            break;
+    }
+
+    // Increase number of entries
+    m_nNumEntries++;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3PostProc.h b/camera/QCamera2/HAL3/QCamera3PostProc.h
new file mode 100644
index 0000000..4e4221a
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3PostProc.h
@@ -0,0 +1,164 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCamera3_POSTPROC_H__
+#define __QCamera3_POSTPROC_H__
+
+extern "C" {
+#include <mm_camera_interface.h>
+#include <mm_jpeg_interface.h>
+}
+//#include "QCamera3HWI.h"
+#include "QCameraQueue.h"
+#include "QCameraCmdThread.h"
+#include "QCamera3HALHeader.h"
+
+namespace qcamera {
+
+class QCamera3Exif;
+class QCamera3Channel;
+class QCamera3PicChannel;
+class QCamera3ReprocessChannel;
+class QCamera3Stream;
+class QCamera3Memory;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    uint32_t client_hdl;             // handle of jpeg client (obtained when open jpeg)
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    mm_camera_super_buf_t *src_reproc_frame; // original source frame for reproc if not NULL
+    metadata_buffer_t *metadata;
+    jpeg_settings_t *jpeg_settings;
+} qcamera_jpeg_data_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID
+    mm_camera_super_buf_t *src_frame;// source frame (need to be returned back to kernel after done)
+    metadata_buffer_t *metadata;
+} qcamera_pp_data_t;
+
+typedef struct {
+    mm_camera_super_buf_t *frame;    // source frame that needs post process
+} qcamera_pp_request_t;
+
+typedef struct {
+    uint32_t jobId;                  // job ID (obtained when start_jpeg_job)
+    jpeg_job_status_t status;        // jpeg encoding status
+    mm_jpeg_output_t out_data;         // ptr to jpeg output buf
+} qcamera_jpeg_evt_payload_t;
+
+#define MAX_EXIF_TABLE_ENTRIES 22
+class QCamera3Exif
+{
+public:
+    QCamera3Exif();
+    virtual ~QCamera3Exif();
+
+    int32_t addEntry(exif_tag_id_t tagid,
+                     exif_tag_type_t type,
+                     uint32_t count,
+                     void *data);
+    uint32_t getNumOfEntries() {return m_nNumEntries;};
+    QEXIF_INFO_DATA *getEntries() {return m_Entries;};
+
+private:
+    QEXIF_INFO_DATA m_Entries[MAX_EXIF_TABLE_ENTRIES];  // exif tags for JPEG encoder
+    uint32_t  m_nNumEntries;                            // number of valid entries
+};
+
+class QCamera3PostProcessor
+{
+public:
+    QCamera3PostProcessor(QCamera3PicChannel *ch_ctrl);
+    virtual ~QCamera3PostProcessor();
+
+    int32_t init(QCamera3Memory *mMemory,
+                 jpeg_encode_callback_t jpeg_cb, void *user_data);
+    int32_t deinit();
+    int32_t start(QCamera3Channel *pInputChannel,
+                  metadata_buffer_t *metadata);
+    int32_t stop();
+    int32_t processData(mm_camera_super_buf_t *frame);
+    int32_t processRawData(mm_camera_super_buf_t *frame);
+    int32_t processPPData(mm_camera_super_buf_t *frame);
+    int32_t processPPMetadata(metadata_buffer_t *reproc_meta);
+    int32_t processJpegSettingData(jpeg_settings_t *jpeg_settings);
+    int32_t processJpegEvt(qcamera_jpeg_evt_payload_t *evt);
+    qcamera_jpeg_data_t *findJpegJobByJobId(uint32_t jobId);
+    void releaseJpegJobData(qcamera_jpeg_data_t *job);
+
+private:
+    int32_t sendEvtNotify(int32_t msg_type, int32_t ext1, int32_t ext2);
+    mm_jpeg_color_format getColorfmtFromImgFmt(cam_format_t img_fmt);
+    mm_jpeg_format_t getJpegImgTypeFromImgFmt(cam_format_t img_fmt);
+    int32_t getJpegEncodeConfig(mm_jpeg_encode_params_t& encode_parm,
+                                  QCamera3Stream *main_stream,
+                                  jpeg_settings_t *jpeg_settings);
+    int32_t encodeData(qcamera_jpeg_data_t *jpeg_job_data,
+                       uint8_t &needNewSess);
+    void releaseSuperBuf(mm_camera_super_buf_t *super_buf);
+    static void releaseNotifyData(void *user_data, void *cookie);
+    int32_t processRawImageImpl(mm_camera_super_buf_t *recvd_frame);
+
+    static void releaseJpegData(void *data, void *user_data);
+    static void releasePPInputData(void *data, void *user_data);
+    static void releaseOngoingPPData(void *data, void *user_data);
+    static void releaseMetaData(void *data, void *user_data);
+    static void releaseJpegSetting(void *data, void *user_data);
+
+    static void *dataProcessRoutine(void *data);
+
+private:
+    QCamera3PicChannel         *m_parent;
+    jpeg_encode_callback_t     mJpegCB;
+    void *                     mJpegUserData;
+    mm_jpeg_ops_t              mJpegHandle;
+    uint32_t                   mJpegClientHandle;
+    uint32_t                   mJpegSessionId;
+
+    QCamera3Exif *             m_pJpegExifObj;
+    int8_t                     m_bThumbnailNeeded;
+    QCamera3Memory             *mJpegMem;
+    QCamera3ReprocessChannel *  m_pReprocChannel;
+
+    QCameraQueue m_inputPPQ;            // input queue for postproc
+    QCameraQueue m_ongoingPPQ;          // ongoing postproc queue
+    QCameraQueue m_inputJpegQ;          // input jpeg job queue
+    QCameraQueue m_ongoingJpegQ;        // ongoing jpeg job queue
+    QCameraQueue m_inputRawQ;           // input raw job queue
+    QCameraQueue m_inputMetaQ;          //input meta queue
+    QCameraQueue m_jpegSettingsQ;        // input jpet setting queue
+    QCameraCmdThread m_dataProcTh;      // thread for data processing
+
+    pthread_mutex_t mReprocJobLock;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCamera3_POSTPROC_H__ */
diff --git a/camera/QCamera2/HAL3/QCamera3Stream.cpp b/camera/QCamera2/HAL3/QCamera3Stream.cpp
new file mode 100644
index 0000000..793aa00
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Stream.cpp
@@ -0,0 +1,928 @@
+/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3Stream"
+//#define LOG_NDEBUG 0
+
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include "QCamera3HWI.h"
+#include "QCamera3Stream.h"
+#include "QCamera3Channel.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : get_bufs
+ *
+ * DESCRIPTION: static function entry to allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        ALOGE("getBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->getBufs(offset, num_bufs, initial_reg_flag, bufs, ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : put_bufs
+ *
+ * DESCRIPTION: static function entry to deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data)
+{
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        ALOGE("putBufs invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->putBufs(ops_tbl);
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidate_buf
+ *
+ * DESCRIPTION: static function entry to invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::invalidate_buf(int index, void *user_data)
+{
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->invalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : clean_invalidate_buf
+ *
+ * DESCRIPTION: static function entry to clean and invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index      : index of the stream buffer to invalidate
+ *   @user_data  : user data ptr of ops_tbl
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::clean_invalidate_buf(int index, void *user_data)
+{
+    QCamera3Stream *stream = reinterpret_cast<QCamera3Stream *>(user_data);
+    if (!stream) {
+        ALOGE("invalid stream pointer");
+        return NO_MEMORY;
+    }
+    return stream->cleanInvalidateBuf(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : QCamera3Stream
+ *
+ * DESCRIPTION: constructor of QCamera3Stream
+ *
+ * PARAMETERS :
+ *   @allocator  : memory allocator obj
+ *   @camHandle  : camera handle
+ *   @chId       : channel handle
+ *   @camOps     : ptr to camera ops table
+ *   @paddingInfo: ptr to padding info
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Stream::QCamera3Stream(uint32_t camHandle,
+                             uint32_t chId,
+                             mm_camera_ops_t *camOps,
+                             cam_padding_info_t *paddingInfo,
+                             QCamera3Channel *channel) :
+        mCamHandle(camHandle),
+        mChannelHandle(chId),
+        mHandle(0),
+        mCamOps(camOps),
+        mStreamInfo(NULL),
+        mMemOps(NULL),
+        mNumBufs(0),
+        mDataCB(NULL),
+        mUserData(NULL),
+        mDataQ(releaseFrameData, this),
+        mStreamInfoBuf(NULL),
+        mStreamBufs(NULL),
+        mBufDefs(NULL),
+        mChannel(channel),
+        m_bActive(false)
+{
+    mMemVtbl.user_data = this;
+    mMemVtbl.get_bufs = get_bufs;
+    mMemVtbl.put_bufs = put_bufs;
+    mMemVtbl.invalidate_buf = invalidate_buf;
+    mMemVtbl.clean_invalidate_buf = clean_invalidate_buf;
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    memcpy(&mPaddingInfo, paddingInfo, sizeof(cam_padding_info_t));
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCamera3Stream
+ *
+ * DESCRIPTION: deconstructor of QCamera3Stream
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCamera3Stream::~QCamera3Stream()
+{
+    if (mStreamInfoBuf != NULL) {
+        int rc = mCamOps->unmap_stream_buf(mCamHandle,
+                    mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+        if (rc < 0) {
+            ALOGE("Failed to map stream info buffer");
+        }
+        mStreamInfoBuf->deallocate();
+        delete mStreamInfoBuf;
+        mStreamInfoBuf = NULL;
+    }
+
+    // delete stream
+    if (mHandle > 0) {
+        mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+        mHandle = 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : init
+ *
+ * DESCRIPTION: initialize stream obj
+ *
+ * PARAMETERS :
+ *   @streamInfoBuf: ptr to buf that contains stream info
+ *   @stream_cb    : stream data notify callback. Can be NULL if not needed
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::init(cam_stream_type_t streamType,
+                            cam_format_t streamFormat,
+                            cam_dimension_t streamDim,
+                            cam_stream_reproc_config_t* reprocess_config,
+                            uint8_t minNumBuffers,
+                            stream_cb_routine stream_cb,
+                            void *userdata)
+{
+    int32_t rc = OK;
+    mm_camera_stream_config_t stream_config;
+
+    mHandle = mCamOps->add_stream(mCamHandle, mChannelHandle);
+    if (!mHandle) {
+        ALOGE("add_stream failed");
+        rc = UNKNOWN_ERROR;
+        goto done;
+    }
+
+    // allocate and map stream info memory
+    mStreamInfoBuf = new QCamera3HeapMemory();
+    if (mStreamInfoBuf == NULL) {
+        ALOGE("%s: no memory for stream info buf obj", __func__);
+        rc = -ENOMEM;
+        goto err1;
+    }
+    rc = mStreamInfoBuf->allocate(1, sizeof(cam_stream_info_t), false);
+    if (rc < 0) {
+        ALOGE("%s: no memory for stream info", __func__);
+        rc = -ENOMEM;
+        goto err2;
+    }
+
+    mStreamInfo =
+        reinterpret_cast<cam_stream_info_t *>(mStreamInfoBuf->getPtr(0));
+    memset(mStreamInfo, 0, sizeof(cam_stream_info_t));
+    mStreamInfo->stream_type = streamType;
+    mStreamInfo->fmt = streamFormat;
+    mStreamInfo->dim = streamDim;
+    mStreamInfo->num_bufs = minNumBuffers;
+
+    rc = mCamOps->map_stream_buf(mCamHandle,
+            mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+            0, -1, mStreamInfoBuf->getFd(0), mStreamInfoBuf->getSize(0));
+    if (rc < 0) {
+        ALOGE("Failed to map stream info buffer");
+        goto err3;
+    }
+
+    mNumBufs = minNumBuffers;
+    if (reprocess_config != NULL) {
+       mStreamInfo->reprocess_config = *reprocess_config;
+       mStreamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
+       //mStreamInfo->num_of_burst = reprocess_config->offline.num_of_bufs;
+       mStreamInfo->num_of_burst = 1;
+       ALOGI("%s: num_of_burst is %d", __func__, mStreamInfo->num_of_burst);
+    } else {
+       mStreamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    }
+
+    // Configure the stream
+    stream_config.stream_info = mStreamInfo;
+    stream_config.mem_vtbl = mMemVtbl;
+    stream_config.padding_info = mPaddingInfo;
+    stream_config.userdata = this;
+    stream_config.stream_cb = dataNotifyCB;
+
+    rc = mCamOps->config_stream(mCamHandle,
+            mChannelHandle, mHandle, &stream_config);
+    if (rc < 0) {
+        ALOGE("Failed to config stream, rc = %d", rc);
+        goto err4;
+    }
+
+    mDataCB = stream_cb;
+    mUserData = userdata;
+    return 0;
+
+err4:
+    mCamOps->unmap_stream_buf(mCamHandle,
+            mChannelHandle, mHandle, CAM_MAPPING_BUF_TYPE_STREAM_INFO, 0, -1);
+err3:
+    mStreamInfoBuf->deallocate();
+err2:
+    delete mStreamInfoBuf;
+    mStreamInfoBuf = NULL;
+    mStreamInfo = NULL;
+err1:
+    mCamOps->delete_stream(mCamHandle, mChannelHandle, mHandle);
+    mHandle = 0;
+    mNumBufs = 0;
+done:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : start
+ *
+ * DESCRIPTION: start stream. Will start main stream thread to handle stream
+ *              related ops.
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::start()
+{
+    int32_t rc = 0;
+    rc = mProcTh.launch(dataProcRoutine, this);
+    if (rc == NO_ERROR) {
+        m_bActive = true;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stop
+ *
+ * DESCRIPTION: stop stream. Will stop main stream thread
+ *
+ * PARAMETERS : none
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::stop()
+{
+    int32_t rc = 0;
+    rc = mProcTh.exit();
+    m_bActive = false;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : processDataNotify
+ *
+ * DESCRIPTION: process stream data notify
+ *
+ * PARAMETERS :
+ *   @frame   : stream frame received
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::processDataNotify(mm_camera_super_buf_t *frame)
+{
+    ALOGV("%s: E\n", __func__);
+    int32_t rc;
+    if (m_bActive) {
+        mDataQ.enqueue((void *)frame);
+        rc = mProcTh.sendCmd(CAMERA_CMD_TYPE_DO_NEXT_JOB, FALSE, FALSE);
+    } else {
+        ALOGD("%s: Stream thread is not active, no ops here", __func__);
+        bufDone(frame->bufs[0]->buf_idx);
+        free(frame);
+        rc = NO_ERROR;
+    }
+    ALOGV("%s: X\n", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataNotifyCB
+ *
+ * DESCRIPTION: callback for data notify. This function is registered with
+ *              mm-camera-interface to handle data notify
+ *
+ * PARAMETERS :
+ *   @recvd_frame   : stream frame received
+ *   userdata       : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void QCamera3Stream::dataNotifyCB(mm_camera_super_buf_t *recvd_frame,
+                                 void *userdata)
+{
+    ALOGV("%s: E\n", __func__);
+    QCamera3Stream* stream = (QCamera3Stream *)userdata;
+    if (stream == NULL ||
+        recvd_frame == NULL ||
+        recvd_frame->bufs[0] == NULL ||
+        recvd_frame->bufs[0]->stream_id != stream->getMyHandle()) {
+        ALOGE("%s: Not a valid stream to handle buf", __func__);
+        return;
+    }
+
+    mm_camera_super_buf_t *frame =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (frame == NULL) {
+        ALOGE("%s: No mem for mm_camera_buf_def_t", __func__);
+        stream->bufDone(recvd_frame->bufs[0]->buf_idx);
+        return;
+    }
+    *frame = *recvd_frame;
+    stream->processDataNotify(frame);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : dataProcRoutine
+ *
+ * DESCRIPTION: function to process data in the main stream thread
+ *
+ * PARAMETERS :
+ *   @data    : user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void *QCamera3Stream::dataProcRoutine(void *data)
+{
+    int running = 1;
+    int ret;
+    QCamera3Stream *pme = (QCamera3Stream *)data;
+    QCameraCmdThread *cmdThread = &pme->mProcTh;
+    cmdThread->setName("cam_stream_proc");
+
+    ALOGV("%s: E", __func__);
+    do {
+        do {
+            ret = cam_sem_wait(&cmdThread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                ALOGE("%s: cam_sem_wait error (%s)",
+                      __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        // we got notified about new cmd avail in cmd queue
+        camera_cmd_type_t cmd = cmdThread->getCmd();
+        switch (cmd) {
+        case CAMERA_CMD_TYPE_DO_NEXT_JOB:
+            {
+                ALOGV("%s: Do next job", __func__);
+                mm_camera_super_buf_t *frame =
+                    (mm_camera_super_buf_t *)pme->mDataQ.dequeue();
+                if (NULL != frame) {
+                    if (pme->mDataCB != NULL) {
+                        pme->mDataCB(frame, pme, pme->mUserData);
+                    } else {
+                        // no data cb routine, return buf here
+                        pme->bufDone(frame->bufs[0]->buf_idx);
+                    }
+                }
+            }
+            break;
+        case CAMERA_CMD_TYPE_EXIT:
+            ALOGD("%s: Exit", __func__);
+            /* flush data buf queue */
+            pme->mDataQ.flush();
+            running = 0;
+            break;
+        default:
+            break;
+        }
+    } while (running);
+    ALOGV("%s: X", __func__);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : getInternalFormatBuffer
+ *
+ * DESCRIPTION: return buffer in the internal format structure
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+mm_camera_buf_def_t* QCamera3Stream::getInternalFormatBuffer(int index)
+{
+    mm_camera_buf_def_t *rc = NULL;
+    if ((index >= mNumBufs) || (mBufDefs == NULL) ||
+            (NULL == mBufDefs[index].mem_info)) {
+        ALOGE("%s:Index out of range/no internal buffers yet", __func__);
+        return NULL;
+    }
+
+    rc = (mm_camera_buf_def_t*)malloc(sizeof(mm_camera_buf_def_t));
+    if(rc) {
+        memcpy(rc, &mBufDefs[index], sizeof(mm_camera_buf_def_t));
+    } else {
+        ALOGE("%s: Failed to allocate memory",__func__);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : bufDone
+ *
+ * DESCRIPTION: return stream buffer to kernel
+ *
+ * PARAMETERS :
+ *   @index   : index of buffer to be returned
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::bufDone(int index)
+{
+    int32_t rc = NO_ERROR;
+
+    if (index >= mNumBufs || mBufDefs == NULL)
+        return BAD_INDEX;
+
+    if( NULL == mBufDefs[index].mem_info) {
+        if (NULL == mMemOps) {
+            ALOGE("%s: Camera operations not initialized", __func__);
+            return NO_INIT;
+        }
+
+        rc = mMemOps->map_ops(index, -1, mStreamBufs->getFd(index),
+                mStreamBufs->getSize(index), mMemOps->userdata);
+        if (rc < 0) {
+            ALOGE("%s: Failed to map camera buffer %d", __func__, index);
+            return rc;
+        }
+
+        rc = mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: Couldn't find camera buffer definition", __func__);
+            mMemOps->unmap_ops(index, -1, mMemOps->userdata);
+            return rc;
+        }
+    }
+
+    rc = mCamOps->qbuf(mCamHandle, mChannelHandle, &mBufDefs[index]);
+    if (rc < 0)
+        return FAILED_TRANSACTION;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getBufs
+ *
+ * DESCRIPTION: allocate stream buffers
+ *
+ * PARAMETERS :
+ *   @offset     : offset info of stream buffers
+ *   @num_bufs   : number of buffers allocated
+ *   @initial_reg_flag: flag to indicate if buffer needs to be registered
+ *                      at kernel initially
+ *   @bufs       : output of allocated buffers
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    uint8_t *regFlags;
+
+    if (!ops_tbl) {
+        ALOGE("%s: ops_tbl is NULL", __func__);
+        return INVALID_OPERATION;
+    }
+
+    mFrameLenOffset = *offset;
+    mMemOps = ops_tbl;
+
+    mStreamBufs = mChannel->getStreamBufs(mFrameLenOffset.frame_len);
+    if (!mStreamBufs) {
+        ALOGE("%s: Failed to allocate stream buffers", __func__);
+        return NO_MEMORY;
+    }
+
+    int registeredBuffers = mStreamBufs->getCnt();
+    for (int i = 0; i < registeredBuffers; i++) {
+        rc = ops_tbl->map_ops(i, -1, mStreamBufs->getFd(i),
+                mStreamBufs->getSize(i), ops_tbl->userdata);
+        if (rc < 0) {
+            ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+            for (int j = 0; j < i; j++) {
+                ops_tbl->unmap_ops(j, -1, ops_tbl->userdata);
+            }
+            return INVALID_OPERATION;
+        }
+    }
+
+    //regFlags array is allocated by us, but consumed and freed by mm-camera-interface
+    regFlags = (uint8_t *)malloc(sizeof(uint8_t) * mNumBufs);
+    if (!regFlags) {
+        ALOGE("%s: Out of memory", __func__);
+        for (int i = 0; i < registeredBuffers; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        return NO_MEMORY;
+    }
+    memset(regFlags, 0, sizeof(uint8_t) * mNumBufs);
+
+    mBufDefs = (mm_camera_buf_def_t *)malloc(mNumBufs * sizeof(mm_camera_buf_def_t));
+    if (mBufDefs == NULL) {
+        ALOGE("%s: Failed to allocate mm_camera_buf_def_t %d", __func__, rc);
+        for (int i = 0; i < registeredBuffers; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+    memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
+    for (int i = 0; i < registeredBuffers; i++) {
+        mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+    }
+
+    rc = mStreamBufs->getRegFlags(regFlags);
+    if (rc < 0) {
+        ALOGE("%s: getRegFlags failed %d", __func__, rc);
+        for (int i = 0; i < registeredBuffers; i++) {
+            ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+        }
+        free(mBufDefs);
+        mBufDefs = NULL;
+        free(regFlags);
+        regFlags = NULL;
+        return INVALID_OPERATION;
+    }
+
+    *num_bufs = mNumBufs;
+    *initial_reg_flag = regFlags;
+    *bufs = mBufDefs;
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : putBufs
+ *
+ * DESCRIPTION: deallocate stream buffers
+ *
+ * PARAMETERS :
+ *   @ops_tbl    : ptr to buf mapping/unmapping ops
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl)
+{
+    int rc = NO_ERROR;
+    for (int i = 0; i < mNumBufs; i++) {
+        if (NULL != mBufDefs[i].mem_info) {
+            rc = ops_tbl->unmap_ops(i, -1, ops_tbl->userdata);
+            if (rc < 0) {
+                ALOGE("%s: map_stream_buf failed: %d", __func__, rc);
+            }
+        }
+    }
+    mBufDefs = NULL; // mBufDefs just keep a ptr to the buffer
+                     // mm-camera-interface own the buffer, so no need to free
+    memset(&mFrameLenOffset, 0, sizeof(mFrameLenOffset));
+    mChannel->putStreamBufs();
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : invalidateBuf
+ *
+ * DESCRIPTION: invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::invalidateBuf(int index)
+{
+    return mStreamBufs->invalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : cleanInvalidateBuf
+ *
+ * DESCRIPTION: clean and invalidate a specific stream buffer
+ *
+ * PARAMETERS :
+ *   @index   : index of the buffer to invalidate
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::cleanInvalidateBuf(int index)
+{
+    return mStreamBufs->cleanInvalidateCache(index);
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameOffset
+ *
+ * DESCRIPTION: query stream buffer frame offset info
+ *
+ * PARAMETERS :
+ *   @offset  : reference to struct to store the queried frame offset info
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFrameOffset(cam_frame_len_offset_t &offset)
+{
+    offset = mFrameLenOffset;
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFrameDimension
+ *
+ * DESCRIPTION: query stream frame dimension info
+ *
+ * PARAMETERS :
+ *   @dim     : reference to struct to store the queried frame dimension
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFrameDimension(cam_dimension_t &dim)
+{
+    if (mStreamInfo != NULL) {
+        dim = mStreamInfo->dim;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getFormat
+ *
+ * DESCRIPTION: query stream format
+ *
+ * PARAMETERS :
+ *   @fmt     : reference to stream format
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::getFormat(cam_format_t &fmt)
+{
+    if (mStreamInfo != NULL) {
+        fmt = mStreamInfo->fmt;
+        return 0;
+    }
+    return -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyServerID
+ *
+ * DESCRIPTION: query server stream ID
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : stream ID from server
+ *==========================================================================*/
+uint32_t QCamera3Stream::getMyServerID() {
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_svr_id;
+    } else {
+        return 0;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : getMyType
+ *
+ * DESCRIPTION: query stream type
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : type of stream
+ *==========================================================================*/
+cam_stream_type_t QCamera3Stream::getMyType() const
+{
+    if (mStreamInfo != NULL) {
+        return mStreamInfo->stream_type;
+    } else {
+        return CAM_STREAM_TYPE_MAX;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mapBuf
+ *
+ * DESCRIPTION: map stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *   @fd       : fd of the buffer
+ *   @size     : lenght of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::mapBuf(uint8_t buf_type,
+                              uint32_t buf_idx,
+                              int32_t plane_idx,
+                              int fd,
+                              uint32_t size)
+{
+    return mCamOps->map_stream_buf(mCamHandle, mChannelHandle,
+                                   mHandle, buf_type,
+                                   buf_idx, plane_idx,
+                                   fd, size);
+
+}
+
+/*===========================================================================
+ * FUNCTION   : unmapBuf
+ *
+ * DESCRIPTION: unmap stream related buffer to backend server
+ *
+ * PARAMETERS :
+ *   @buf_type : mapping type of buffer
+ *   @buf_idx  : index of buffer
+ *   @plane_idx: plane index
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx)
+{
+    return mCamOps->unmap_stream_buf(mCamHandle, mChannelHandle,
+                                     mHandle, buf_type,
+                                     buf_idx, plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : setParameter
+ *
+ * DESCRIPTION: set stream based parameters
+ *
+ * PARAMETERS :
+ *   @param   : ptr to parameters to be set
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3Stream::setParameter(cam_stream_parm_buffer_t &param)
+{
+    int32_t rc = NO_ERROR;
+    mStreamInfo->parm_buf = param;
+    rc = mCamOps->set_stream_parms(mCamHandle,
+                                   mChannelHandle,
+                                   mHandle,
+                                   &mStreamInfo->parm_buf);
+    if (rc == NO_ERROR) {
+        param = mStreamInfo->parm_buf;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : releaseFrameData
+ *
+ * DESCRIPTION: callback function to release frame data node
+ *
+ * PARAMETERS :
+ *   @data      : ptr to post process input data
+ *   @user_data : user data ptr (QCameraReprocessor)
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCamera3Stream::releaseFrameData(void *data, void *user_data)
+{
+    QCamera3Stream *pme = (QCamera3Stream *)user_data;
+    mm_camera_super_buf_t *frame = (mm_camera_super_buf_t *)data;
+    if (NULL != pme) {
+        pme->bufDone(frame->bufs[0]->buf_idx);
+    }
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3Stream.h b/camera/QCamera2/HAL3/QCamera3Stream.h
new file mode 100644
index 0000000..aab2a30
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3Stream.h
@@ -0,0 +1,138 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA3_STREAM_H__
+#define __QCAMERA3_STREAM_H__
+
+#include <hardware/camera3.h>
+#include "QCameraCmdThread.h"
+#include "QCamera3Mem.h"
+
+extern "C" {
+#include <mm_camera_interface.h>
+}
+
+namespace qcamera {
+
+class QCamera3Stream;
+class QCamera3Channel;
+
+typedef void (*stream_cb_routine)(mm_camera_super_buf_t *frame,
+                                  QCamera3Stream *stream,
+                                  void *userdata);
+
+class QCamera3Stream
+{
+public:
+    QCamera3Stream(uint32_t camHandle,
+                  uint32_t chId,
+                  mm_camera_ops_t *camOps,
+                  cam_padding_info_t *paddingInfo,
+                  QCamera3Channel *channel);
+    virtual ~QCamera3Stream();
+    virtual int32_t init(cam_stream_type_t streamType,
+                         cam_format_t streamFormat,
+                         cam_dimension_t streamDim,
+                         cam_stream_reproc_config_t* reprocess_config,
+                         uint8_t minStreamBufNum,
+                         stream_cb_routine stream_cb,
+                         void *userdata);
+    virtual int32_t bufDone(int index);
+    virtual int32_t processDataNotify(mm_camera_super_buf_t *bufs);
+    virtual int32_t start();
+    virtual int32_t stop();
+
+    static void dataNotifyCB(mm_camera_super_buf_t *recvd_frame, void *userdata);
+    static void *dataProcRoutine(void *data);
+    uint32_t getMyHandle() const {return mHandle;}
+    cam_stream_type_t getMyType() const;
+    int32_t getFrameOffset(cam_frame_len_offset_t &offset);
+    int32_t getFrameDimension(cam_dimension_t &dim);
+    int32_t getFormat(cam_format_t &fmt);
+    mm_camera_buf_def_t* getInternalFormatBuffer(int index);
+    QCamera3Memory *getStreamBufs() {return mStreamBufs;};
+    uint32_t getMyServerID();
+
+    int32_t mapBuf(uint8_t buf_type, uint32_t buf_idx,
+                   int32_t plane_idx, int fd, uint32_t size);
+    int32_t unmapBuf(uint8_t buf_type, uint32_t buf_idx, int32_t plane_idx);
+    int32_t setParameter(cam_stream_parm_buffer_t &param);
+
+    static void releaseFrameData(void *data, void *user_data);
+
+private:
+    uint32_t mCamHandle;
+    uint32_t mChannelHandle;
+    uint32_t mHandle; // stream handle from mm-camera-interface
+    mm_camera_ops_t *mCamOps;
+    cam_stream_info_t *mStreamInfo; // ptr to stream info buf
+    mm_camera_stream_mem_vtbl_t mMemVtbl;
+    mm_camera_map_unmap_ops_tbl_t *mMemOps;
+    uint8_t mNumBufs;
+    stream_cb_routine mDataCB;
+    void *mUserData;
+
+    QCameraQueue     mDataQ;
+    QCameraCmdThread mProcTh; // thread for dataCB
+
+    QCamera3HeapMemory *mStreamInfoBuf;
+    QCamera3Memory *mStreamBufs;
+    mm_camera_buf_def_t *mBufDefs;
+    cam_frame_len_offset_t mFrameLenOffset;
+    cam_padding_info_t mPaddingInfo;
+    QCamera3Channel *mChannel;
+    bool m_bActive; // if stream mProcTh is active
+
+    static int32_t get_bufs(
+                     cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t put_bufs(
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                     void *user_data);
+    static int32_t invalidate_buf(int index, void *user_data);
+    static int32_t clean_invalidate_buf(int index, void *user_data);
+
+    int32_t getBufs(cam_frame_len_offset_t *offset,
+                     uint8_t *num_bufs,
+                     uint8_t **initial_reg_flag,
+                     mm_camera_buf_def_t **bufs,
+                     mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t putBufs(mm_camera_map_unmap_ops_tbl_t *ops_tbl);
+    int32_t invalidateBuf(int index);
+    int32_t cleanInvalidateBuf(int index);
+
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3_STREAM_H__ */
diff --git a/camera/QCamera2/HAL3/QCamera3VendorTags.cpp b/camera/QCamera2/HAL3/QCamera3VendorTags.cpp
new file mode 100644
index 0000000..3ded922
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3VendorTags.cpp
@@ -0,0 +1,283 @@
+/* Copyright (c) 2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#define LOG_TAG "QCamera3VendorTags"
+//#define LOG_NDEBUG 0
+
+#include <hardware/camera3.h>
+#include <utils/Log.h>
+#include <utils/Errors.h>
+#include "QCamera3VendorTags.h"
+
+using namespace android;
+
+namespace qcamera {
+
+const int QCAMERA3_SECTION_COUNT = QCAMERA3_SECTIONS_END - VENDOR_SECTION;
+
+enum qcamera3_ext_tags qcamera3_ext3_section_bounds[QCAMERA3_SECTIONS_END -
+    VENDOR_SECTION] = {
+        QCAMERA3_PRIVATEDATA_END,
+        QCAMERA3_OPAQUE_RAW_END
+} ;
+
+typedef struct vendor_tag_info {
+    const char *tag_name;
+    uint8_t     tag_type;
+} vendor_tag_info_t;
+
+const char *qcamera3_ext_section_names[QCAMERA3_SECTIONS_END -
+        VENDOR_SECTION] = {
+    "org.codeaurora.qcamera3.privatedata",
+    "org.codeaurora.qcamera3.opaque_raw"
+};
+
+vendor_tag_info_t qcamera3_privatedata[QCAMERA3_PRIVATEDATA_END - QCAMERA3_PRIVATEDATA_START] = {
+    { "privatedata_reprocess", TYPE_BYTE }
+};
+
+vendor_tag_info_t qcamera3_opaque_raw[QCAMERA3_OPAQUE_RAW_END - QCAMERA3_OPAQUE_RAW_START] = {
+    { "opaque_raw_strides", TYPE_INT32 },
+    { "opaque_raw_format", TYPE_BYTE }
+};
+
+vendor_tag_info_t *qcamera3_tag_info[QCAMERA3_SECTIONS_END -
+        VENDOR_SECTION] = {
+    qcamera3_privatedata,
+    qcamera3_opaque_raw
+};
+
+uint32_t qcamera3_all_tags[] = {
+    // QCAMERA3_PRIVATEDATA
+    (uint32_t)QCAMERA3_PRIVATEDATA_REPROCESS,
+
+    // QCAMERA3_OPAQUE_RAW
+    (uint32_t)QCAMERA3_OPAQUE_RAW_STRIDES,
+    (uint32_t)QCAMERA3_OPAQUE_RAW_FORMAT
+};
+
+const vendor_tag_ops_t* QCamera3VendorTags::Ops = NULL;
+
+/*===========================================================================
+ * FUNCTION   : get_vendor_tag_ops
+ *
+ * DESCRIPTION: Get the metadata vendor tag function pointers
+ *
+ * PARAMETERS :
+ *    @ops   : function pointer table to be filled by HAL
+ *
+ *
+ * RETURN     : NONE
+ *==========================================================================*/
+void QCamera3VendorTags::get_vendor_tag_ops(
+                                vendor_tag_ops_t* ops)
+{
+    ALOGV("%s: E", __func__);
+
+    Ops = ops;
+
+    ops->get_tag_count = get_tag_count;
+    ops->get_all_tags = get_all_tags;
+    ops->get_section_name = get_section_name;
+    ops->get_tag_name = get_tag_name;
+    ops->get_tag_type = get_tag_type;
+    ops->reserved[0] = NULL;
+
+    ALOGV("%s: X", __func__);
+    return;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_count
+ *
+ * DESCRIPTION: Get number of vendor tags supported
+ *
+ * PARAMETERS :
+ *    @ops   :  Vendor tag ops data structure
+ *
+ *
+ * RETURN     : Number of vendor tags supported
+ *==========================================================================*/
+
+int QCamera3VendorTags::get_tag_count(
+                const vendor_tag_ops_t * ops)
+{
+    int count = 0;
+    if (ops == Ops)
+        count = sizeof(qcamera3_all_tags)/sizeof(qcamera3_all_tags[0]);
+
+    ALOGV("%s: count is %d", __func__, count);
+    return count;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_all_tags
+ *
+ * DESCRIPTION: Fill array with all supported vendor tags
+ *
+ * PARAMETERS :
+ *    @ops      :  Vendor tag ops data structure
+ *    @tag_array:  array of metadata tags
+ *
+ * RETURN     : Success: the section name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+void QCamera3VendorTags::get_all_tags(
+                const vendor_tag_ops_t * ops,
+                uint32_t *g_array)
+{
+    if (ops != Ops)
+        return;
+
+    for (size_t i = 0;
+            i < sizeof(qcamera3_all_tags)/sizeof(qcamera3_all_tags[0]);
+            i++) {
+        g_array[i] = qcamera3_all_tags[i];
+	ALOGV("%s: g_array[%d] is %d", __func__, i, g_array[i]);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : get_section_name
+ *
+ * DESCRIPTION: Get section name for vendor tag
+ *
+ * PARAMETERS :
+ *    @ops   :  Vendor tag ops structure
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the section name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+
+const char* QCamera3VendorTags::get_section_name(
+                const vendor_tag_ops_t * ops,
+                uint32_t tag)
+{
+    ALOGV("%s: E", __func__);
+    if (ops != Ops)
+        return NULL;
+
+    const char *ret;
+    uint32_t section = tag >> 16;
+
+    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
+        ret = NULL;
+    else
+        ret = qcamera3_ext_section_names[section - VENDOR_SECTION];
+
+    if (ret)
+        ALOGV("%s: section_name[%d] is %s", __func__, tag, ret);
+    ALOGV("%s: X", __func__);
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_name
+ *
+ * DESCRIPTION: Get name of a vendor specific tag
+ *
+ * PARAMETERS :
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the name of the specific tag
+ *              Failure: NULL
+ *==========================================================================*/
+const char* QCamera3VendorTags::get_tag_name(
+                const vendor_tag_ops_t * ops,
+                uint32_t tag)
+{
+    ALOGV("%s: E", __func__);
+    const char *ret;
+    uint32_t section = tag >> 16;
+    uint32_t section_index = section - VENDOR_SECTION;
+    uint32_t tag_index = tag & 0xFFFF;
+
+    if (ops != Ops) {
+        ret = NULL;
+        goto done;
+    }
+
+    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
+        ret = NULL;
+    else if (tag >= (uint32_t)qcamera3_ext3_section_bounds[section_index])
+        ret = NULL;
+    else
+        ret = qcamera3_tag_info[section_index][tag_index].tag_name;
+
+    if (ret)
+        ALOGV("%s: tag name for tag %d is %s", __func__, tag, ret);
+    ALOGV("%s: X", __func__);
+
+done:
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_tag_type
+ *
+ * DESCRIPTION: Get type of a vendor specific tag
+ *
+ * PARAMETERS :
+ *    @tag   :  Vendor specific tag
+ *
+ *
+ * RETURN     : Success: the type of the specific tag
+ *              Failure: -1
+ *==========================================================================*/
+int QCamera3VendorTags::get_tag_type(
+                const vendor_tag_ops_t *ops,
+                uint32_t tag)
+{
+    ALOGV("%s: E", __func__);
+    int ret;
+    uint32_t section = tag >> 16;
+    uint32_t section_index = section - VENDOR_SECTION;
+    uint32_t tag_index = tag & 0xFFFF;
+
+    if (ops != Ops) {
+        ret = -1;
+        goto done;
+    }
+    if (section < VENDOR_SECTION || section > QCAMERA3_SECTIONS_END)
+        ret = -1;
+    else if (tag >= (uint32_t )qcamera3_ext3_section_bounds[section_index])
+        ret = -1;
+    else
+        ret = qcamera3_tag_info[section_index][tag_index].tag_type;
+
+    ALOGV("%s: tag type for tag %d is %d", __func__, tag, ret);
+    ALOGV("%s: X", __func__);
+done:
+    return ret;
+}
+
+}; //end namespace qcamera
diff --git a/camera/QCamera2/HAL3/QCamera3VendorTags.h b/camera/QCamera2/HAL3/QCamera3VendorTags.h
new file mode 100644
index 0000000..016318a
--- /dev/null
+++ b/camera/QCamera2/HAL3/QCamera3VendorTags.h
@@ -0,0 +1,133 @@
+/* Copyright (c) 2014, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#ifndef __QCAMERA3VENDORTAGS_H__
+#define __QCAMERA3VENDORTAGS_H__
+
+namespace qcamera {
+
+enum qcamera3_ext_section {
+    QCAMERA3_PRIVATEDATA = VENDOR_SECTION,
+    QCAMERA3_OPAQUE_RAW,
+    QCAMERA3_SECTIONS_END
+};
+
+enum qcamera3_ext_section_ranges {
+    QCAMERA3_PRIVATEDATA_START = QCAMERA3_PRIVATEDATA << 16,
+    QCAMERA3_OPAQUE_RAW_START = QCAMERA3_OPAQUE_RAW << 16,
+};
+
+enum qcamera3_ext_tags {
+    QCAMERA3_PRIVATEDATA_REPROCESS = QCAMERA3_PRIVATEDATA_START,
+    QCAMERA3_PRIVATEDATA_END,
+
+    //Property Name:  org.codeaurora.qcamera3.opaque_raw.opaque_raw_strides
+    //
+    //Type: int32 * n * 3 [public]
+    //
+    //Description: Distance in bytes from the beginning of one row of opaque
+    //raw image data to the beginning of next row.
+    //
+    //Details: The strides are listed as (raw_width, raw_height, stride)
+    //triplets. For each supported raw size, there will be a stride associated
+    //with it.
+    QCAMERA3_OPAQUE_RAW_STRIDES = QCAMERA3_OPAQUE_RAW_START,
+
+    //Property Name: org.codeaurora.qcamera3.opaque_raw.opaque_raw_format
+    //
+    //Type: byte(enum) [public]
+    //  * LEGACY - The legacy raw format where 8, 10, or 12-bit
+    //    raw data is packed into a 64-bit word.
+    //  * MIPI - raw format matching the data packing described
+    //    in MIPI CSI-2 specification. In memory, the data
+    //    is constructed by packing sequentially received pixels
+    //    into least significant parts of the words first.
+    //    Within each pixel, the least significant bits are also
+    //    placed towards the least significant part of the word.
+    //
+    //Details: Lay out of opaque raw data in memory is decided by two factors:
+    //         opaque_raw_format and bit depth (implied by whiteLevel). Below
+    //         list illustrates their relationship:
+    //  LEGACY8:  P7(7:0) P6(7:0) P5(7:0) P4(7:0) P3(7:0) P2(7:0) P1(7:0) P0(7:0)
+    //            8 pixels occupy 8 bytes, no padding needed
+    //            min_stride = CEILING8(raw_width)
+    // LEGACY10:  0000 P5(9:0) P4(9:0) P3(9:0) P2(9:0) P1(9:0) P0(9:0)
+    //            6 pixels occupy 8 bytes, 4 bits padding at MSB
+    //            min_stride = (raw_width+5)/6 * 8
+    // LEGACY12:  0000 P4(11:0) P3(11:0) P2(11:0) P1(11:0) P0(11:0)
+    //            5 pixels occupy 8 bytes, 4 bits padding at MSB
+    //            min_stride = (raw_width+4)/5 * 8
+    //    MIPI8:  P0(7:0)
+    //            1 pixel occupy 1 byte, no padding needed
+    //            min_stride = raw_width
+    //   MIPI10:  P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2)
+    //            4 pixels occupy 5 bytes, no padding needed
+    //            min_stride = (raw_width+3)/4 * 5
+    //   MIPI12:  P1(3:0) P0(3:0) P1(11:4) P0(11:4)
+    //            2 pixels occupy 3 bytes, no padding needed
+    //            min_stride = (raw_width+1)/2 * 3
+    //Note that opaque_raw_stride needs to be at least the required minimum
+    //stride from the table above. ISP hardware may need more generous stride
+    //setting. For example, for LEGACY8, the actual stride may be
+    //CEILING16(raw_width) due to bus burst length requirement.
+    QCAMERA3_OPAQUE_RAW_FORMAT,
+    QCAMERA3_OPAQUE_RAW_END,
+};
+
+// QCAMERA3_OPAQUE_RAW_FORMAT
+typedef enum qcamera3_ext_opaque_raw_format {
+    QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY,
+    QCAMERA3_OPAQUE_RAW_FORMAT_MIPI
+} qcamera3_ext_opaque_raw_format_t;
+
+class QCamera3VendorTags {
+
+public:
+    static void get_vendor_tag_ops(vendor_tag_ops_t* ops);
+    static int get_tag_count(
+            const vendor_tag_ops_t *ops);
+    static void get_all_tags(
+            const vendor_tag_ops_t *ops,
+            uint32_t *tag_array);
+    static const char* get_section_name(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+    static const char* get_tag_name(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+    static int get_tag_type(
+            const vendor_tag_ops_t *ops,
+            uint32_t tag);
+
+    static const vendor_tag_ops_t *Ops;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA3VENDORTAGS_H__ */
diff --git a/camera/QCamera2/stack/Android.mk b/camera/QCamera2/stack/Android.mk
new file mode 100644
index 0000000..a357417
--- /dev/null
+++ b/camera/QCamera2/stack/Android.mk
@@ -0,0 +1,5 @@
+LOCAL_PATH:= $(call my-dir)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/Android.mk
+include $(LOCAL_PATH)/mm-jpeg-interface/test/Android.mk
+include $(LOCAL_PATH)/mm-camera-test/Android.mk
diff --git a/camera/QCamera2/stack/common/cam_intf.h b/camera/QCamera2/stack/common/cam_intf.h
new file mode 100644
index 0000000..99a139c
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_intf.h
@@ -0,0 +1,559 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+#include <media/msmb_isp.h>
+#include "cam_types.h"
+
+#define CAM_PRIV_IOCTL_BASE (V4L2_CID_PRIVATE_BASE + 14)
+typedef enum {
+    /* session based parameters */
+    CAM_PRIV_PARM = CAM_PRIV_IOCTL_BASE,
+    /* session based action: do auto focus. TRIGGER_AF in HAL3 request */
+    CAM_PRIV_DO_AUTO_FOCUS,
+    /* session based action: cancel auto focus. TRIGGER_AF in HAL3 request. */
+    CAM_PRIV_CANCEL_AUTO_FOCUS,
+    /* session based action: prepare for snapshot. TRIGGER_AE_PREPARE in HAL3*/
+    CAM_PRIV_PREPARE_SNAPSHOT,
+    /* sync stream info. Used for configure_streams */
+    CAM_PRIV_STREAM_INFO_SYNC,
+    /* stream based parameters*/
+    CAM_PRIV_STREAM_PARM,
+    /* start ZSL snapshot.*/
+    CAM_PRIV_START_ZSL_SNAPSHOT,
+    /* stop ZSL snapshot.*/
+    CAM_PRIV_STOP_ZSL_SNAPSHOT,
+} cam_private_ioctl_enum_t;
+
+/* capability struct definition for HAL 1*/
+typedef struct{
+    cam_hal_version_t version;
+
+    cam_position_t position;                                /* sensor position: front, back */
+
+    uint16_t isWnrSupported;
+    /* supported iso modes */
+    uint8_t supported_iso_modes_cnt;
+    cam_iso_mode_type supported_iso_modes[CAM_ISO_MODE_MAX];
+
+    /* supported flash modes */
+    uint8_t supported_flash_modes_cnt;
+    cam_flash_mode_t supported_flash_modes[CAM_FLASH_MODE_MAX];
+
+    uint8_t zoom_ratio_tbl_cnt;                             /* table size for zoom ratios */
+    int zoom_ratio_tbl[MAX_ZOOMS_CNT];                      /* zoom ratios table */
+
+    int qcom_supported_feature_mask;      /* mask of qcom specific features supported:
+                                           * such as CAM_QCOM_FEATURE_SUPPORTED_FACE_DETECTION*/
+
+    /* supported effect modes */
+    uint8_t supported_effects_cnt;
+    cam_effect_mode_type supported_effects[CAM_EFFECT_MODE_MAX];
+
+    /* supported scene modes */
+    uint8_t supported_scene_modes_cnt;
+    cam_scene_mode_type supported_scene_modes[CAM_SCENE_MODE_MAX];
+
+    /* supported auto exposure modes */
+    uint8_t supported_aec_modes_cnt;
+    cam_auto_exposure_mode_type supported_aec_modes[CAM_AEC_MODE_MAX];
+
+    uint8_t fps_ranges_tbl_cnt;                             /* fps ranges table size */
+    cam_fps_range_t fps_ranges_tbl[MAX_SIZES_CNT];          /* fps ranges table */
+
+    /* supported antibanding modes */
+    uint8_t supported_antibandings_cnt;
+    cam_antibanding_mode_type supported_antibandings[CAM_ANTIBANDING_MODE_MAX];
+
+    /* supported white balance modes */
+    uint8_t supported_white_balances_cnt;
+    cam_wb_mode_type supported_white_balances[CAM_WB_MODE_MAX];
+
+    /* supported focus modes */
+    uint8_t supported_focus_modes_cnt;
+    cam_focus_mode_type supported_focus_modes[CAM_FOCUS_MODE_MAX];
+
+    int exposure_compensation_min;       /* min value of exposure compensation index */
+    int exposure_compensation_max;       /* max value of exposure compensation index */
+    int exposure_compensation_default;   /* default value of exposure compensation index */
+    float exposure_compensation_step;
+    cam_rational_type_t exp_compensation_step;    /* exposure compensation step value */
+
+    uint8_t video_stablization_supported; /* flag id video stablization is supported */
+
+    uint8_t picture_sizes_tbl_cnt;                          /* picture sizes table size */
+    cam_dimension_t picture_sizes_tbl[MAX_SIZES_CNT];       /* picture sizes table */
+    /* The minimum frame duration that is supported for each
+     * resolution in availableProcessedSizes. Should correspond
+     * to the frame duration when only that processed stream
+     * is active, with all processing set to FAST */
+    int64_t picture_min_duration[MAX_SIZES_CNT];
+
+    /* capabilities specific to HAL 1 */
+
+    int modes_supported;                                    /* mask of modes supported: 2D, 3D */
+    uint32_t sensor_mount_angle;                            /* sensor mount angle */
+
+    float focal_length;                                     /* focal length */
+    float hor_view_angle;                                   /* horizontal view angle */
+    float ver_view_angle;                                   /* vertical view angle */
+
+    uint8_t preview_sizes_tbl_cnt;                          /* preview sizes table size */
+    cam_dimension_t preview_sizes_tbl[MAX_SIZES_CNT];       /* preiew sizes table */
+
+    uint8_t video_sizes_tbl_cnt;                            /* video sizes table size */
+    cam_dimension_t video_sizes_tbl[MAX_SIZES_CNT];         /* video sizes table */
+
+
+    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table size */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+
+    uint8_t hfr_tbl_cnt;                                    /* table size for HFR */
+    cam_hfr_info_t hfr_tbl[CAM_HFR_MODE_MAX];               /* HFR table */
+
+    /* supported preview formats */
+    uint8_t supported_preview_fmt_cnt;
+    cam_format_t supported_preview_fmts[CAM_FORMAT_MAX];
+
+    /* supported picture formats */
+    uint8_t supported_picture_fmt_cnt;
+    cam_format_t supported_picture_fmts[CAM_FORMAT_MAX];
+
+    /* dimension and supported output format of raw dump from camif */
+    uint8_t supported_raw_dim_cnt;
+    cam_dimension_t raw_dim[MAX_SIZES_CNT];
+    uint8_t supported_raw_fmt_cnt;
+    cam_format_t supported_raw_fmts[CAM_FORMAT_MAX];
+    /* The minimum frame duration that is supported for above
+       raw resolution */
+    int64_t raw_min_duration[MAX_SIZES_CNT];
+
+    /* supported focus algorithms */
+    uint8_t supported_focus_algos_cnt;
+    cam_focus_algorithm_type supported_focus_algos[CAM_FOCUS_ALGO_MAX];
+
+
+    uint8_t auto_wb_lock_supported;       /* flag if auto white balance lock is supported */
+    uint8_t zoom_supported;               /* flag if zoom is supported */
+    uint8_t smooth_zoom_supported;        /* flag if smooth zoom is supported */
+    uint8_t auto_exposure_lock_supported; /* flag if auto exposure lock is supported */
+    uint8_t video_snapshot_supported;     /* flag if video snapshot is supported */
+
+    uint8_t max_num_roi;                  /* max number of roi can be detected */
+    uint8_t max_num_focus_areas;          /* max num of focus areas */
+    uint8_t max_num_metering_areas;       /* max num opf metering areas */
+    uint8_t max_zoom_step;                /* max zoom step value */
+
+    /* QCOM specific control */
+    cam_control_range_t brightness_ctrl;  /* brightness */
+    cam_control_range_t sharpness_ctrl;   /* sharpness */
+    cam_control_range_t contrast_ctrl;    /* contrast */
+    cam_control_range_t saturation_ctrl;  /* saturation */
+    cam_control_range_t sce_ctrl;         /* skintone enhancement factor */
+
+    cam_padding_info_t padding_info;      /* padding information from PP */
+    int8_t min_num_hdr_bufs;              /* minimum number of buffers needed for HDR by imaging module */
+    int8_t min_num_pp_bufs;               /* minimum number of buffers needed by postproc module */
+    uint32_t min_required_pp_mask;        /* min required pp feature masks for ZSL.
+                                           * depends on hardware limitation, i.e. for 8974,
+                                           * sharpness is required for all ZSL snapshot frames */
+
+    /* capabilities specific to HAL 3 */
+
+    float min_focus_distance;
+    float hyper_focal_distance;
+
+    float focal_lengths[CAM_FOCAL_LENGTHS_MAX];
+    uint8_t focal_lengths_count;
+
+    /* Needs to be regular f number instead of APEX */
+    float apertures[CAM_APERTURES_MAX];
+    uint8_t apertures_count;
+
+    float filter_densities[CAM_FILTER_DENSITIES_MAX];
+    uint8_t filter_densities_count;
+
+    uint8_t optical_stab_modes[CAM_OPT_STAB_MAX];
+    uint8_t optical_stab_modes_count;
+
+    cam_dimension_t lens_shading_map_size;
+    float lens_shading_map[3 * CAM_MAX_MAP_WIDTH *
+              CAM_MAX_MAP_HEIGHT];
+
+    cam_dimension_t geo_correction_map_size;
+    float geo_correction_map[2 * 3 * CAM_MAX_MAP_WIDTH *
+              CAM_MAX_MAP_HEIGHT];
+
+    float lens_position[3];
+
+    /* nano seconds */
+    int64_t exposure_time_range[2];
+
+    /* nano seconds */
+    int64_t max_frame_duration;
+
+    cam_color_filter_arrangement_t color_arrangement;
+
+    float sensor_physical_size[2];
+
+    /* Dimensions of full pixel array, possibly including
+       black calibration pixels */
+    cam_dimension_t pixel_array_size;
+    /* Area of raw data which corresponds to only active
+       pixels; smaller or equal to pixelArraySize. */
+    cam_rect_t active_array_size;
+
+    /* Maximum raw value output by sensor */
+    int32_t white_level;
+
+    /* A fixed black level offset for each of the Bayer
+       mosaic channels */
+    int32_t black_level_pattern[4];
+
+    /* Time taken before flash can fire again in nano secs */
+    int64_t flash_charge_duration;
+
+    /* flash firing power */
+    uint8_t supported_flash_firing_level_cnt;
+    cam_format_t supported_firing_levels[CAM_FLASH_FIRING_LEVEL_MAX];
+
+    /* Flash Firing Time */
+    int64_t flash_firing_time;
+
+    /* Flash Ciolor Temperature */
+    uint8_t flash_color_temp;
+
+    /* Flash max Energy */
+    uint8_t flash_max_energy;
+
+    /* Maximum number of supported points in the tonemap
+       curve */
+    int32_t max_tone_map_curve_points;
+
+    /* supported formats */
+    uint8_t supported_scalar_format_cnt;
+    cam_format_t supported_scalar_fmts[CAM_FORMAT_MAX];
+
+    uint32_t max_face_detection_count;
+
+    /* Number of histogram buckets supported */
+    int32_t histogram_size;
+    /* Maximum value possible for a histogram bucket */
+    int32_t max_histogram_count;
+
+    cam_dimension_t sharpness_map_size;
+
+    /* Maximum value possible for a sharpness map region */
+    int32_t max_sharpness_map_value;
+
+    cam_scene_mode_overrides_t scene_mode_overrides[CAM_SCENE_MODE_MAX];
+
+    /*Autoexposure modes for camera 3 api*/
+    uint8_t supported_ae_modes_cnt;
+    cam_ae_mode_type supported_ae_modes[CAM_AE_MODE_MAX];
+
+
+    cam_sensitivity_range_t sensitivity_range;
+    int32_t max_analog_sensitivity;
+
+    uint8_t flash_available;
+
+    cam_rational_type_t base_gain_factor;    /* sensor base gain factor */
+
+    uint8_t focus_dist_calibrated;
+
+    uint8_t supported_test_pattern_modes_cnt;
+    cam_test_pattern_mode_t supported_test_pattern_modes[MAX_TEST_PATTERN_CNT];
+
+    int64_t jpeg_stall_durations[MAX_SIZES_CNT];
+    int64_t raw16_stall_durations[MAX_SIZES_CNT];
+    cam_illuminant_t reference_illuminant1;
+    cam_illuminant_t reference_illuminant2;
+    cam_rational_type_t forward_matrix1[3][3];
+    cam_rational_type_t forward_matrix2[3][3];
+    cam_rational_type_t color_transform1[3][3];
+    cam_rational_type_t color_transform2[3][3];
+    cam_rational_type_t calibration_transform1[3][3];
+    cam_rational_type_t calibration_transform2[3][3];
+
+    cam_opaque_raw_format_t opaque_raw_fmt;
+} cam_capability_t;
+
+typedef enum {
+    CAM_STREAM_PARAM_TYPE_DO_REPROCESS = CAM_INTF_PARM_DO_REPROCESS,
+    CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO = CAM_INTF_PARM_SET_BUNDLE,
+    CAM_STREAM_PARAM_TYPE_MAX
+} cam_stream_param_type_e;
+
+typedef struct {
+    uint8_t buf_index;            /* buf index to the source frame buffer that needs reprocess,
+                                    (assume buffer is already mapped)*/
+    uint32_t frame_idx;           /* frame id of source frame to be reprocessed */
+    int32_t ret_val;              /* return value from reprocess. Could have different meanings.
+                                     i.e., faceID in the case of face registration. */
+    uint8_t meta_present;         /* if there is meta data associated with this reprocess frame */
+
+    uint32_t meta_stream_handle;  /* meta data stream ID. only valid if meta_present != 0 */
+    uint8_t meta_buf_index;       /* buf index to meta data buffer. only valid if meta_present != 0 */
+
+    /* opaque metadata required for reprocessing */
+    char private_data[MAX_METADATA_PAYLOAD_SIZE];
+
+    cam_rect_t crop_rect;
+} cam_reprocess_param;
+
+typedef struct {
+    cam_stream_param_type_e type;
+    union {
+        cam_reprocess_param reprocess;  /* do reprocess */
+        cam_bundle_config_t bundleInfo; /* set bundle info*/
+    };
+} cam_stream_parm_buffer_t;
+
+/* stream info */
+typedef struct {
+    /* stream ID from server */
+    uint32_t stream_svr_id;
+
+    /* stream type */
+    cam_stream_type_t stream_type;
+
+    /* image format */
+    cam_format_t fmt;
+
+    /* image dimension */
+    cam_dimension_t dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t buf_planes;
+
+    /* number of stream bufs will be allocated */
+    uint8_t num_bufs;
+
+    /* streaming type */
+    cam_streaming_mode_t streaming_mode;
+    /* num of frames needs to be generated.
+     * only valid when streaming_mode = CAM_STREAMING_MODE_BURST */
+    uint8_t num_of_burst;
+
+    /* stream specific pp config */
+    cam_pp_feature_config_t pp_config;
+
+    /* this section is valid if offline reprocess type stream */
+    cam_stream_reproc_config_t reprocess_config;
+
+    cam_stream_parm_buffer_t parm_buf;    /* stream based parameters */
+} cam_stream_info_t;
+
+/*****************************************************************************
+ *                 Code for Domain Socket Based Parameters                   *
+ ****************************************************************************/
+
+#define POINTER_OF(PARAM_ID,TABLE_PTR)    \
+        (&(TABLE_PTR->entry[PARAM_ID].data))
+
+#define SET_PARM_VALID_BIT(PARAM_ID,TABLE_PTR,VALID_BIT) \
+        (TABLE_PTR->entry[PARAM_ID].valid=VALID_BIT)
+
+#define IS_PARM_VALID(PARAM_ID,TABLE_PTR) \
+        (TABLE_PTR->entry[PARAM_ID].valid)
+
+#define GET_FIRST_PARAM_ID(TABLE_PTR)     \
+        (TABLE_PTR->first_flagged_entry)
+
+#define SET_FIRST_PARAM_ID(TABLE_PTR,PARAM_ID)     \
+        TABLE_PTR->first_flagged_entry=PARAM_ID
+
+#define GET_NEXT_PARAM_ID(CURRENT_PARAM_ID,TABLE_PTR)    \
+        (TABLE_PTR->entry[CURRENT_PARAM_ID].next_flagged_entry)
+
+#define SET_NEXT_PARAM_ID(CURRENT_PARAM_ID,TABLE_PTR,NEXT_PARAM_ID)    \
+        TABLE_PTR->entry[CURRENT_PARAM_ID].next_flagged_entry=NEXT_PARAM_ID;
+
+#define INCLUDE(PARAM_ID,DATATYPE,COUNT)  \
+        DATATYPE member_variable_##PARAM_ID[ COUNT ]
+
+typedef union {
+/**************************************************************************************
+ *  ID from (cam_intf_metadata_type_t)                DATATYPE                     COUNT
+ **************************************************************************************/
+    /* common between HAL1 and HAL3 */
+    INCLUDE(CAM_INTF_PARM_HAL_VERSION,              	int32_t,                     1);
+    INCLUDE(CAM_INTF_META_STREAM_INFO,              	cam_stream_size_info_t,      1);
+    INCLUDE(CAM_INTF_META_STREAM_ID,                	cam_stream_ID_t,             1);
+    INCLUDE(CAM_INTF_META_HISTOGRAM,                    cam_hist_stats_t,            1);
+    INCLUDE(CAM_INTF_META_FACE_DETECTION,               cam_face_detection_data_t,   1);
+    INCLUDE(CAM_INTF_META_AUTOFOCUS_DATA,               cam_auto_focus_data_t,       1);
+    INCLUDE(CAM_INTF_META_CROP_DATA,                    cam_crop_data_t,             1);
+
+    /* Specific to HAl1 */
+    INCLUDE(CAM_INTF_META_PREP_SNAPSHOT_DONE,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_GOOD_FRAME_IDX_RANGE,         cam_frame_idx_range_t,       1);
+    INCLUDE(CAM_INTF_PARM_ANTIBANDING,                  int8_t,                      1);
+    /* Specific to HAL3 */
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER_VALID,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,    int32_t,                     1);
+    INCLUDE(CAM_INTF_META_FRAME_DROPPED,                cam_frame_dropped_t,         1);
+    INCLUDE(CAM_INTF_META_PENDING_REQUESTS,             uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_FRAME_NUMBER,                 uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_URGENT_FRAME_NUMBER,          uint32_t,                    1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_MODE,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AWB_REGIONS,                  cam_area_t,                  5);
+    /* HAL1 only control */
+    INCLUDE(CAM_INTF_PARM_SHARPNESS,                	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_CONTRAST,                 	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_SATURATION,              	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_BRIGHTNESS,               	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ISO,                      	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ZOOM,                     	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ROLLOFF,                  	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_MODE,                     	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ALGO_TYPE,            	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_ALGO_TYPE,          	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_AEC_ROI,                  	cam_set_aec_roi_t,           1);
+    INCLUDE(CAM_INTF_PARM_AF_ROI,                   	cam_roi_info_t,              1);
+    INCLUDE(CAM_INTF_PARM_SCE_FACTOR,               	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FD,                       	cam_fd_set_parm_t,           1);
+    INCLUDE(CAM_INTF_PARM_MCE,                      	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HFR,                      	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WAVELET_DENOISE,          	cam_denoise_param_t,         1);
+    INCLUDE(CAM_INTF_PARM_HISTOGRAM,                	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ASD_ENABLE,               	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_RECORDING_HINT,           	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR,                      	cam_exp_bracketing_t,        1);
+    INCLUDE(CAM_INTF_PARM_FRAMESKIP,                	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_ZSL_MODE,                 	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_HDR_NEED_1X,              	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LOCK_CAF,                 	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_VIDEO_HDR,                	int32_t,                     1);
+
+    /* HAL3 external control */
+    INCLUDE(CAM_INTF_PARM_BESTSHOT_MODE,                uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_PRECAPTURE_TRIGGER,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_TRIGGER_NOTICE,            uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_REDEYE_REDUCTION,             int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EV,                       	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EV_STEP,                  	cam_rational_type_t,         1);
+    INCLUDE(CAM_INTF_PARM_AEC_LOCK,                 	uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FPS_RANGE,                	cam_fps_range_t,             1);
+    INCLUDE(CAM_INTF_PARM_AWB_LOCK,                 	uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_EFFECT,                   	int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,   	cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_AF_TRIGGER,               	cam_trigger_t,               1);
+    INCLUDE(CAM_INTF_META_DEMOSAIC,                 	int32_t,                     1);
+    INCLUDE(CAM_INTF_PARM_LED_MODE,                 	int32_t,                     1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 	int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SHADING_STRENGTH,         	uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_TONEMAP_MODE,             	uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_TONEMAP_CURVES,           	cam_rgb_tonemap_curves,      1);
+    INCLUDE(CAM_INTF_META_CAPTURE_INTENT,           	uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_LENS_SHADING_MAP_MODE,    	uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_DIS_ENABLE,               	int32_t,                     1);
+    /* HAL3 external metadata */
+    INCLUDE(CAM_INTF_META_BLACK_LEVEL_LOCK,             uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM,      cam_color_correct_matrix_t,  1);
+    INCLUDE(CAM_INTF_META_COLOR_CORRECT_GAINS,          cam_color_correct_gains_t,   1);
+    INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, cam_color_correct_matrix_t,  1);
+    INCLUDE(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,     cam_color_correct_gains_t,   1);
+    INCLUDE(CAM_INTF_META_AEC_PRECAPTURE_ID,            int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_MODE,                     uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AEC_ROI,                      cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AEC_STATE,                    uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_FOCUS_MODE,                   uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_ROI,                       cam_area_t,                  5);
+    INCLUDE(CAM_INTF_META_AF_STATE,                     uint8_t,                     1);
+    INCLUDE(CAM_INTF_PARM_WHITE_BALANCE,                int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AWB_STATE,                    uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_MODE,                         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_EDGE_MODE,                    cam_edge_application_t,      1);
+    INCLUDE(CAM_INTF_META_FLASH_POWER,                  uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_FIRING_TIME,            int64_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_MODE,                   uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_FLASH_STATE,                  int32_t,                     1);
+    INCLUDE(CAM_INTF_META_HOTPIXEL_MODE,                uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_COORDINATES,         double,                      3);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_PROC_METHODS,        uint8_t,                     GPS_PROCESSING_METHOD_SIZE);
+    INCLUDE(CAM_INTF_META_JPEG_GPS_TIMESTAMP,           int64_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_ORIENTATION,             int32_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_QUALITY,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_THUMB_QUALITY,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_JPEG_THUMB_SIZE,              cam_dimension_t,             1);
+    INCLUDE(CAM_INTF_META_LENS_APERTURE,                float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FILTERDENSITY,           float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCAL_LENGTH,            float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_DISTANCE,          float,                       1);
+    INCLUDE(CAM_INTF_META_LENS_FOCUS_RANGE,             float,                       2);
+    INCLUDE(CAM_INTF_META_LENS_STATE,                   uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_LENS_OPT_STAB_MODE,           uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_NOISE_REDUCTION_MODE,         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_SCALER_CROP_REGION,           cam_crop_region_t,           1);
+    INCLUDE(CAM_INTF_META_SENSOR_EXPOSURE_TIME,         int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_FRAME_DURATION,        int64_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_SENSITIVITY,           int32_t,                     1);
+    INCLUDE(CAM_INTF_META_SENSOR_TIMESTAMP,             struct timeval,              1);
+    INCLUDE(CAM_INTF_META_SHADING_MODE,                 uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_FACEDETECT_MODE,        uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_SCENE_FLICKER,                uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_HISTOGRAM_MODE,         uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,     uint8_t,                     1);
+    INCLUDE(CAM_INTF_META_STATS_SHARPNESS_MAP,          cam_sharpness_map_t,         3);
+    INCLUDE(CAM_INTF_META_LENS_SHADING_MAP,             cam_lens_shading_map_t,      1);
+    /* HAL internal metadata */
+    INCLUDE(CAM_INTF_META_AEC_INFO,                     cam_3a_params_t,             1);
+    INCLUDE(CAM_INTF_META_TEST_PATTERN_DATA,            cam_test_pattern_data_t,     1);
+    INCLUDE(CAM_INTF_META_OTP_WB_GRGB,                  float,                       1);
+    INCLUDE(CAM_INTF_META_PROFILE_TONE_CURVE,           cam_profile_tone_curve,      1);
+    INCLUDE(CAM_INTF_META_NEUTRAL_COL_POINT,            cam_neutral_col_point_t,     1);
+    INCLUDE(CAM_INTF_META_PRIVATE_DATA,                 char,                        MAX_METADATA_PAYLOAD_SIZE);
+} metadata_type_t;
+
+/****************************DO NOT MODIFY BELOW THIS LINE!!!!*********************/
+
+typedef struct {
+    metadata_type_t data;
+    uint8_t valid;
+    uint8_t next_flagged_entry;
+} metadata_entry_type_t;
+
+typedef struct {
+    uint8_t first_flagged_entry;
+    metadata_entry_type_t entry[CAM_INTF_PARM_MAX];
+    /*Tuning Data */
+    uint8_t is_tuning_params_valid;
+    tuning_params_t tuning_params;
+} metadata_buffer_t;
+
+typedef metadata_buffer_t parm_buffer_t;
+typedef metadata_type_t parm_type_t;
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/camera/QCamera2/stack/common/cam_list.h b/camera/QCamera2/stack/common/cam_list.h
new file mode 100755
index 0000000..36379af
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_list.h
@@ -0,0 +1,83 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+/* This file is a slave copy from /vendor/qcom/propreitary/mm-cammera/common,
+ * Please do not modify it directly here. */
+
+#ifndef __CAMLIST_H
+#define __CAMLIST_H
+
+#include <stddef.h>
+
+#define member_of(ptr, type, member) ({ \
+  const typeof(((type *)0)->member) *__mptr = (ptr); \
+  (type *)((char *)__mptr - offsetof(type,member));})
+
+struct cam_list {
+  struct cam_list *next, *prev;
+};
+
+static inline void cam_list_init(struct cam_list *ptr)
+{
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+static inline void cam_list_add_tail_node(struct cam_list *item,
+  struct cam_list *head)
+{
+  struct cam_list *prev = head->prev;
+
+  head->prev = item;
+  item->next = head;
+  item->prev = prev;
+  prev->next = item;
+}
+
+static inline void cam_list_insert_before_node(struct cam_list *item,
+  struct cam_list *node)
+{
+  item->next = node;
+  item->prev = node->prev;
+  item->prev->next = item;
+  node->prev = item;
+}
+
+static inline void cam_list_del_node(struct cam_list *ptr)
+{
+  struct cam_list *prev = ptr->prev;
+  struct cam_list *next = ptr->next;
+
+  next->prev = ptr->prev;
+  prev->next = ptr->next;
+  ptr->next = ptr;
+  ptr->prev = ptr;
+}
+
+#endif /* __CAMLIST_H */
diff --git a/camera/QCamera2/stack/common/cam_queue.h b/camera/QCamera2/stack/common/cam_queue.h
new file mode 100755
index 0000000..a23c622
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_queue.h
@@ -0,0 +1,130 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "cam_list.h"
+
+typedef struct {
+    struct cam_list list;
+    void *data;
+} cam_node_t;
+
+typedef struct {
+    cam_node_t head; /* dummy head */
+    uint32_t size;
+    pthread_mutex_t lock;
+} cam_queue_t;
+
+static inline int32_t cam_queue_init(cam_queue_t *queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+static inline int32_t cam_queue_enq(cam_queue_t *queue, void *data)
+{
+    cam_node_t *node =
+        (cam_node_t *)malloc(sizeof(cam_node_t));
+    if (NULL == node) {
+        return -1;
+    }
+
+    memset(node, 0, sizeof(cam_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+}
+
+static inline void *cam_queue_deq(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    void *data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+static inline int32_t cam_queue_flush(cam_queue_t *queue)
+{
+    cam_node_t *node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* TODO later to consider ptr inside data */
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data) {
+            free(node->data);
+        }
+        free(node);
+
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+static inline int32_t cam_queue_deinit(cam_queue_t *queue)
+{
+    cam_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
diff --git a/camera/QCamera2/stack/common/cam_semaphore.h b/camera/QCamera2/stack/common/cam_semaphore.h
new file mode 100644
index 0000000..a52f907
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_semaphore.h
@@ -0,0 +1,85 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_SEMAPHORE_H__
+#define __QCAMERA_SEMAPHORE_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Implement semaphore with mutex and conditional variable.
+ * Reason being, POSIX semaphore on Android are not used or
+ * well tested.
+ */
+
+typedef struct {
+    int val;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+} cam_semaphore_t;
+
+static inline void cam_sem_init(cam_semaphore_t *s, int n)
+{
+    pthread_mutex_init(&(s->mutex), NULL);
+    pthread_cond_init(&(s->cond), NULL);
+    s->val = n;
+}
+
+static inline void cam_sem_post(cam_semaphore_t *s)
+{
+    pthread_mutex_lock(&(s->mutex));
+    s->val++;
+    pthread_cond_signal(&(s->cond));
+    pthread_mutex_unlock(&(s->mutex));
+}
+
+static inline int cam_sem_wait(cam_semaphore_t *s)
+{
+    int rc = 0;
+    pthread_mutex_lock(&(s->mutex));
+    while (s->val == 0)
+        rc = pthread_cond_wait(&(s->cond), &(s->mutex));
+    s->val--;
+    pthread_mutex_unlock(&(s->mutex));
+    return rc;
+}
+
+static inline void cam_sem_destroy(cam_semaphore_t *s)
+{
+    pthread_mutex_destroy(&(s->mutex));
+    pthread_cond_destroy(&(s->cond));
+    s->val = 0;
+}
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __QCAMERA_SEMAPHORE_H__ */
diff --git a/camera/QCamera2/stack/common/cam_types.h b/camera/QCamera2/stack/common/cam_types.h
new file mode 100644
index 0000000..8535ca8
--- /dev/null
+++ b/camera/QCamera2/stack/common/cam_types.h
@@ -0,0 +1,1406 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_TYPES_H__
+#define __QCAMERA_TYPES_H__
+
+#include <stdint.h>
+#include <pthread.h>
+#include <inttypes.h>
+#include <media/msmb_camera.h>
+
+#define CAM_MAX_NUM_BUFS_PER_STREAM 24
+#define MAX_METADATA_PAYLOAD_SIZE 1024
+
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ZOOMS_CNT 64
+#define MAX_SIZES_CNT 24
+#define MAX_EXP_BRACKETING_LENGTH 32
+#define MAX_ROI 5
+#define MAX_STREAM_NUM_IN_BUNDLE 4
+#define MAX_NUM_STREAMS          8
+#define MAX_TEST_PATTERN_CNT     8
+#define MAX_AVAILABLE_CAPABILITIES 4
+
+#define GPS_PROCESSING_METHOD_SIZE 33
+
+typedef enum {
+    CAM_HAL_V1 = 1,
+    CAM_HAL_V3 = 3
+} cam_hal_version_t;
+
+typedef enum {
+    CAM_STATUS_SUCCESS,       /* Operation Succeded */
+    CAM_STATUS_FAILED,        /* Failure in doing operation */
+    CAM_STATUS_INVALID_PARM,  /* Inavlid parameter provided */
+    CAM_STATUS_NOT_SUPPORTED, /* Parameter/operation not supported */
+    CAM_STATUS_ACCEPTED,      /* Parameter accepted */
+    CAM_STATUS_MAX,
+} cam_status_t;
+
+typedef enum {
+    CAM_POSITION_BACK,
+    CAM_POSITION_FRONT
+} cam_position_t;
+
+typedef enum {
+    CAM_FLICKER_NONE,
+    CAM_FLICKER_50_HZ,
+    CAM_FLICKER_60_HZ
+} cam_flicker_t;
+
+typedef enum {
+    CAM_FORMAT_JPEG = 0,
+    CAM_FORMAT_YUV_420_NV12 = 1,
+    CAM_FORMAT_YUV_420_NV21,
+    CAM_FORMAT_YUV_420_NV21_ADRENO,
+    CAM_FORMAT_YUV_420_YV12,
+    CAM_FORMAT_YUV_422_NV16,
+    CAM_FORMAT_YUV_422_NV61,
+    CAM_FORMAT_YUV_420_NV12_VENUS,
+
+    /* Please note below are the defintions for raw image.
+     * Any format other than raw image format should be declared
+     * before this line!!!!!!!!!!!!! */
+
+    /* Note: For all raw formats, each scanline needs to be 16 bytes aligned */
+
+    /* Packed YUV/YVU raw format, 16 bpp: 8 bits Y and 8 bits UV.
+     * U and V are interleaved with Y: YUYV or YVYV */
+    CAM_FORMAT_YUV_RAW_8BIT_YUYV,
+    CAM_FORMAT_YUV_RAW_8BIT_YVYU,
+    CAM_FORMAT_YUV_RAW_8BIT_UYVY,
+    CAM_FORMAT_YUV_RAW_8BIT_VYUY,
+
+    /* QCOM RAW formats where data is packed into 64bit word.
+     * 8BPP: 1 64-bit word contains 8 pixels p0 - p7, where p0 is
+     *       stored at LSB.
+     * 10BPP: 1 64-bit word contains 6 pixels p0 - p5, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB.
+     * 12BPP: 1 64-bit word contains 5 pixels p0 - p4, where most
+     *       significant 4 bits are set to 0. P0 is stored at LSB. */
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR,
+    /* MIPI RAW formats based on MIPI CSI-2 specifiction.
+     * 8BPP: Each pixel occupies one bytes, starting at LSB.
+     *       Output with of image has no restrictons.
+     * 10BPP: Four pixels are held in every 5 bytes. The output
+     *       with of image must be a multiple of 4 pixels.
+     * 12BPP: Two pixels are held in every 3 bytes. The output
+     *       width of image must be a multiple of 2 pixels. */
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB,
+    CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR,
+    /* Ideal raw formats where image data has gone through black
+     * correction, lens rolloff, demux/channel gain, bad pixel
+     * correction, and ABF.
+     * Ideal raw formats could output any of QCOM_RAW and MIPI_RAW
+     * formats, plus plain8 8bbp, plain16 800, plain16 10bpp, and
+     * plain 16 12bpp */
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB,
+    CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR,
+
+    /* generic 8-bit raw */
+    CAM_FORMAT_JPEG_RAW_8BIT,
+    CAM_FORMAT_META_RAW_8BIT,
+
+    CAM_FORMAT_MAX
+} cam_format_t;
+
+typedef enum {
+    /* applies to HAL 1 */
+    CAM_STREAM_TYPE_DEFAULT,       /* default stream type */
+    CAM_STREAM_TYPE_PREVIEW,       /* preview */
+    CAM_STREAM_TYPE_POSTVIEW,      /* postview */
+    CAM_STREAM_TYPE_SNAPSHOT,      /* snapshot */
+    CAM_STREAM_TYPE_VIDEO,         /* video */
+
+    /* applies to HAL 3 */
+    CAM_STREAM_TYPE_CALLBACK,      /* app requested callback */
+    CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT, /* non zsl snapshot */
+    CAM_STREAM_TYPE_IMPL_DEFINED, /* opaque format: could be display, video enc, ZSL YUV */
+
+    /* applies to both HAL 1 and HAL 3 */
+    CAM_STREAM_TYPE_METADATA,      /* meta data */
+    CAM_STREAM_TYPE_RAW,           /* raw dump from camif */
+    CAM_STREAM_TYPE_OFFLINE_PROC,  /* offline process */
+    CAM_STREAM_TYPE_MAX,
+} cam_stream_type_t;
+
+typedef enum {
+    CAM_PAD_NONE = 1,
+    CAM_PAD_TO_2 = 2,
+    CAM_PAD_TO_4 = 4,
+    CAM_PAD_TO_WORD = CAM_PAD_TO_4,
+    CAM_PAD_TO_8 = 8,
+    CAM_PAD_TO_16 = 16,
+    CAM_PAD_TO_32 = 32,
+    CAM_PAD_TO_64 = 64,
+    CAM_PAD_TO_1K = 1024,
+    CAM_PAD_TO_2K = 2048,
+    CAM_PAD_TO_4K = 4096,
+    CAM_PAD_TO_8K = 8192
+} cam_pad_format_t;
+
+typedef enum {
+    /* followings are per camera */
+    CAM_MAPPING_BUF_TYPE_CAPABILITY,  /* mapping camera capability buffer */
+    CAM_MAPPING_BUF_TYPE_PARM_BUF,    /* mapping parameters buffer */
+
+    /* followings are per stream */
+    CAM_MAPPING_BUF_TYPE_STREAM_BUF,        /* mapping stream buffers */
+    CAM_MAPPING_BUF_TYPE_STREAM_INFO,       /* mapping stream information buffer */
+    CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, /* mapping offline process input buffer */
+    CAM_MAPPING_BUF_TYPE_MAX
+} cam_mapping_buf_type;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if type is STREAM_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    unsigned long cookie; /* could be job_id(uint32_t) to identify mapping job */
+    int fd;               /* origin fd */
+    uint32_t size;        /* size of the buffer */
+} cam_buf_map_type;
+
+typedef struct {
+    cam_mapping_buf_type type;
+    uint32_t stream_id;   /* stream id: valid if STREAM_BUF */
+    uint32_t frame_idx;   /* frame index: valid if STREAM_BUF or HIST_BUF */
+    int32_t plane_idx;    /* planner index. valid if type is STREAM_BUF.
+                           * -1 means all planners shanre the same fd;
+                           * otherwise, each planner has its own fd */
+    unsigned long cookie; /* could be job_id(uint32_t) to identify unmapping job */
+} cam_buf_unmap_type;
+
+typedef enum {
+    CAM_MAPPING_TYPE_FD_MAPPING,
+    CAM_MAPPING_TYPE_FD_UNMAPPING,
+    CAM_MAPPING_TYPE_MAX
+} cam_mapping_type;
+
+typedef struct {
+    cam_mapping_type msg_type;
+    union {
+        cam_buf_map_type buf_map;
+        cam_buf_unmap_type buf_unmap;
+    } payload;
+} cam_sock_packet_t;
+
+typedef enum {
+    CAM_MODE_2D = (1<<0),
+    CAM_MODE_3D = (1<<1)
+} cam_mode_t;
+
+typedef struct {
+    uint32_t len;
+    uint32_t y_offset;
+    uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+    uint32_t len;
+    uint32_t offset;
+    int32_t offset_x;
+    int32_t offset_y;
+    int32_t stride;
+    int32_t scanline;
+} cam_mp_len_offset_t;
+
+typedef struct {
+    uint32_t width_padding;
+    uint32_t height_padding;
+    uint32_t plane_padding;
+} cam_padding_info_t;
+
+typedef struct {
+    int num_planes;
+    union {
+        cam_sp_len_offset_t sp;
+        cam_mp_len_offset_t mp[VIDEO_MAX_PLANES];
+    };
+    uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+    int32_t width;
+    int32_t height;
+} cam_dimension_t;
+
+typedef struct {
+    cam_frame_len_offset_t plane_info;
+} cam_stream_buf_plane_info_t;
+
+typedef struct {
+    float min_fps;
+    float max_fps;
+} cam_fps_range_t;
+
+typedef struct {
+    int32_t min_sensitivity;
+    int32_t max_sensitivity;
+} cam_sensitivity_range_t;
+
+typedef enum {
+    CAM_HFR_MODE_OFF,
+    CAM_HFR_MODE_60FPS,
+    CAM_HFR_MODE_90FPS,
+    CAM_HFR_MODE_120FPS,
+    CAM_HFR_MODE_150FPS,
+    CAM_HFR_MODE_MAX
+} cam_hfr_mode_t;
+
+typedef struct {
+    cam_hfr_mode_t mode;
+    cam_dimension_t dim;
+    uint8_t frame_skip;
+    uint8_t livesnapshot_sizes_tbl_cnt;                     /* livesnapshot sizes table size */
+    cam_dimension_t livesnapshot_sizes_tbl[MAX_SIZES_CNT];  /* livesnapshot sizes table */
+} cam_hfr_info_t;
+
+typedef enum {
+    CAM_WB_MODE_AUTO,
+    CAM_WB_MODE_CUSTOM,
+    CAM_WB_MODE_INCANDESCENT,
+    CAM_WB_MODE_FLUORESCENT,
+    CAM_WB_MODE_WARM_FLUORESCENT,
+    CAM_WB_MODE_DAYLIGHT,
+    CAM_WB_MODE_CLOUDY_DAYLIGHT,
+    CAM_WB_MODE_TWILIGHT,
+    CAM_WB_MODE_SHADE,
+    CAM_WB_MODE_OFF,
+    CAM_WB_MODE_MAX
+} cam_wb_mode_type;
+
+typedef enum {
+    CAM_ANTIBANDING_MODE_OFF,
+    CAM_ANTIBANDING_MODE_60HZ,
+    CAM_ANTIBANDING_MODE_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO,
+    CAM_ANTIBANDING_MODE_AUTO_50HZ,
+    CAM_ANTIBANDING_MODE_AUTO_60HZ,
+    CAM_ANTIBANDING_MODE_MAX,
+} cam_antibanding_mode_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+    CAM_ISO_MODE_AUTO,
+    CAM_ISO_MODE_DEBLUR,
+    CAM_ISO_MODE_100,
+    CAM_ISO_MODE_200,
+    CAM_ISO_MODE_400,
+    CAM_ISO_MODE_800,
+    CAM_ISO_MODE_1600,
+    CAM_ISO_MODE_MAX
+} cam_iso_mode_type;
+
+typedef enum {
+    CAM_AEC_MODE_FRAME_AVERAGE,
+    CAM_AEC_MODE_CENTER_WEIGHTED,
+    CAM_AEC_MODE_SPOT_METERING,
+    CAM_AEC_MODE_SMART_METERING,
+    CAM_AEC_MODE_USER_METERING,
+    CAM_AEC_MODE_SPOT_METERING_ADV,
+    CAM_AEC_MODE_CENTER_WEIGHTED_ADV,
+    CAM_AEC_MODE_MAX
+} cam_auto_exposure_mode_type;
+
+typedef enum {
+    CAM_AE_MODE_OFF,
+    CAM_AE_MODE_ON,
+    CAM_AE_MODE_MAX
+} cam_ae_mode_type;
+
+typedef enum {
+    CAM_FOCUS_ALGO_AUTO,
+    CAM_FOCUS_ALGO_SPOT,
+    CAM_FOCUS_ALGO_CENTER_WEIGHTED,
+    CAM_FOCUS_ALGO_AVERAGE,
+    CAM_FOCUS_ALGO_MAX
+} cam_focus_algorithm_type;
+
+/* Auto focus mode */
+typedef enum {
+    CAM_FOCUS_MODE_OFF,
+    CAM_FOCUS_MODE_AUTO,
+    CAM_FOCUS_MODE_INFINITY,
+    CAM_FOCUS_MODE_MACRO,
+    CAM_FOCUS_MODE_FIXED,
+    CAM_FOCUS_MODE_EDOF,
+    CAM_FOCUS_MODE_CONTINOUS_VIDEO,
+    CAM_FOCUS_MODE_CONTINOUS_PICTURE,
+    CAM_FOCUS_MODE_MAX
+} cam_focus_mode_type;
+
+typedef enum {
+    CAM_SCENE_MODE_OFF,
+    CAM_SCENE_MODE_AUTO,
+    CAM_SCENE_MODE_LANDSCAPE,
+    CAM_SCENE_MODE_SNOW,
+    CAM_SCENE_MODE_BEACH,
+    CAM_SCENE_MODE_SUNSET,
+    CAM_SCENE_MODE_NIGHT,
+    CAM_SCENE_MODE_PORTRAIT,
+    CAM_SCENE_MODE_BACKLIGHT,
+    CAM_SCENE_MODE_SPORTS,
+    CAM_SCENE_MODE_ANTISHAKE,
+    CAM_SCENE_MODE_FLOWERS,
+    CAM_SCENE_MODE_CANDLELIGHT,
+    CAM_SCENE_MODE_FIREWORKS,
+    CAM_SCENE_MODE_PARTY,
+    CAM_SCENE_MODE_NIGHT_PORTRAIT,
+    CAM_SCENE_MODE_THEATRE,
+    CAM_SCENE_MODE_ACTION,
+    CAM_SCENE_MODE_AR,
+    CAM_SCENE_MODE_FACE_PRIORITY,
+    CAM_SCENE_MODE_BARCODE,
+    CAM_SCENE_MODE_MAX
+} cam_scene_mode_type;
+
+typedef enum {
+    CAM_EFFECT_MODE_OFF,
+    CAM_EFFECT_MODE_MONO,
+    CAM_EFFECT_MODE_NEGATIVE,
+    CAM_EFFECT_MODE_SOLARIZE,
+    CAM_EFFECT_MODE_SEPIA,
+    CAM_EFFECT_MODE_POSTERIZE,
+    CAM_EFFECT_MODE_WHITEBOARD,
+    CAM_EFFECT_MODE_BLACKBOARD,
+    CAM_EFFECT_MODE_AQUA,
+    CAM_EFFECT_MODE_EMBOSS,
+    CAM_EFFECT_MODE_SKETCH,
+    CAM_EFFECT_MODE_NEON,
+    CAM_EFFECT_MODE_MAX
+} cam_effect_mode_type;
+
+typedef enum {
+    CAM_FLASH_MODE_OFF,
+    CAM_FLASH_MODE_AUTO,
+    CAM_FLASH_MODE_ON,
+    CAM_FLASH_MODE_TORCH,
+    CAM_FLASH_MODE_SINGLE,
+    CAM_FLASH_MODE_MAX
+} cam_flash_mode_t;
+
+// Flash States
+typedef enum {
+    CAM_FLASH_STATE_UNAVAILABLE,
+    CAM_FLASH_STATE_CHARGING,
+    CAM_FLASH_STATE_READY,
+    CAM_FLASH_STATE_FIRED,
+    CAM_FLASH_STATE_PARTIAL,
+    CAM_FLASH_STATE_MAX
+} cam_flash_state_t;
+
+typedef enum {
+    CAM_FLASH_FIRING_LEVEL_0,
+    CAM_FLASH_FIRING_LEVEL_1,
+    CAM_FLASH_FIRING_LEVEL_2,
+    CAM_FLASH_FIRING_LEVEL_3,
+    CAM_FLASH_FIRING_LEVEL_4,
+    CAM_FLASH_FIRING_LEVEL_5,
+    CAM_FLASH_FIRING_LEVEL_6,
+    CAM_FLASH_FIRING_LEVEL_7,
+    CAM_FLASH_FIRING_LEVEL_8,
+    CAM_FLASH_FIRING_LEVEL_9,
+    CAM_FLASH_FIRING_LEVEL_10,
+    CAM_FLASH_FIRING_LEVEL_MAX
+} cam_flash_firing_level_t;
+
+
+typedef enum {
+    CAM_AEC_TRIGGER_IDLE,
+    CAM_AEC_TRIGGER_START
+} cam_aec_trigger_type_t;
+
+typedef enum {
+    CAM_AF_TRIGGER_IDLE,
+    CAM_AF_TRIGGER_START,
+    CAM_AF_TRIGGER_CANCEL
+} cam_af_trigger_type_t;
+
+typedef enum {
+    CAM_AE_STATE_INACTIVE,
+    CAM_AE_STATE_SEARCHING,
+    CAM_AE_STATE_CONVERGED,
+    CAM_AE_STATE_LOCKED,
+    CAM_AE_STATE_FLASH_REQUIRED,
+    CAM_AE_STATE_PRECAPTURE
+} cam_ae_state_t;
+
+typedef enum {
+    CAM_NOISE_REDUCTION_MODE_OFF,
+    CAM_NOISE_REDUCTION_MODE_FAST,
+    CAM_NOISE_REDUCTION_MODE_HIGH_QUALITY
+} cam_noise_reduction_mode_t;
+
+typedef enum {
+    CAM_EDGE_MODE_OFF,
+    CAM_EDGE_MODE_FAST,
+    CAM_EDGE_MODE_HIGH_QUALITY,
+} cam_edge_mode_t;
+
+typedef struct {
+   uint8_t edge_mode;
+   int32_t sharpness;
+} cam_edge_application_t;
+
+typedef enum {
+    CAM_BLACK_LEVEL_LOCK_OFF,
+    CAM_BLACK_LEVEL_LOCK_ON,
+} cam_black_level_lock_t;
+
+typedef enum {
+    CAM_LENS_SHADING_MAP_MODE_OFF,
+    CAM_LENS_SHADING_MAP_MODE_ON,
+} cam_lens_shading_map_mode_t;
+
+typedef enum {
+    CAM_LENS_SHADING_MODE_OFF,
+    CAM_LENS_SHADING_MODE_FAST,
+    CAM_LENS_SHADING_MODE_HIGH_QUALITY,
+} cam_lens_shading_mode_t;
+
+typedef enum {
+    CAM_FACE_DETECT_MODE_OFF,
+    CAM_FACE_DETECT_MODE_SIMPLE,
+    CAM_FACE_DETECT_MODE_FULL,
+} cam_face_detect_mode_t;
+
+typedef enum {
+    CAM_TONEMAP_MODE_CONTRAST_CURVE,
+    CAM_TONEMAP_MODE_FAST,
+    CAM_TONEMAP_MODE_HIGH_QUALITY,
+} cam_tonemap_mode_t;
+
+typedef struct  {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_rect_t;
+
+typedef struct  {
+    cam_rect_t rect;
+    int32_t weight; /* weight of the area, valid for focusing/metering areas */
+} cam_area_t;
+
+typedef enum {
+    CAM_STREAMING_MODE_CONTINUOUS, /* continous streaming */
+    CAM_STREAMING_MODE_BURST,      /* burst streaming */
+    CAM_STREAMING_MODE_MAX
+} cam_streaming_mode_t;
+
+#define CAM_REPROCESS_MASK_TYPE_WNR (1<<0)
+
+/* event from server */
+typedef enum {
+    CAM_EVENT_TYPE_MAP_UNMAP_DONE  = (1<<0),
+    CAM_EVENT_TYPE_AUTO_FOCUS_DONE = (1<<1),
+    CAM_EVENT_TYPE_ZOOM_DONE       = (1<<2),
+    CAM_EVENT_TYPE_DAEMON_DIED     = (1<<3),
+    CAM_EVENT_TYPE_MAX
+} cam_event_type_t;
+
+typedef enum {
+    CAM_EXP_BRACKETING_OFF,
+    CAM_EXP_BRACKETING_ON
+} cam_bracket_mode;
+
+typedef struct {
+    cam_bracket_mode mode;
+    char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} cam_exp_bracketing_t;
+
+typedef enum {
+    CAM_AEC_ROI_OFF,
+    CAM_AEC_ROI_ON
+} cam_aec_roi_ctrl_t;
+
+typedef enum {
+    CAM_AEC_ROI_BY_INDEX,
+    CAM_AEC_ROI_BY_COORDINATE,
+} cam_aec_roi_type_t;
+
+typedef struct {
+    uint32_t x;
+    uint32_t y;
+} cam_coordinate_type_t;
+
+typedef struct {
+    int32_t numerator;
+    int32_t denominator;
+} cam_rational_type_t;
+
+typedef struct {
+    cam_aec_roi_ctrl_t aec_roi_enable;
+    cam_aec_roi_type_t aec_roi_type;
+    union {
+        cam_coordinate_type_t coordinate[MAX_ROI];
+        uint32_t aec_roi_idx[MAX_ROI];
+    } cam_aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+    uint32_t frm_id;
+    uint8_t num_roi;
+    cam_rect_t roi[MAX_ROI];
+    int32_t weight[MAX_ROI];
+    uint8_t is_multiwindow;
+} cam_roi_info_t;
+
+typedef enum {
+    CAM_WAVELET_DENOISE_YCBCR_PLANE,
+    CAM_WAVELET_DENOISE_CBCR_ONLY,
+    CAM_WAVELET_DENOISE_STREAMLINE_YCBCR,
+    CAM_WAVELET_DENOISE_STREAMLINED_CBCR
+} cam_denoise_process_type_t;
+
+typedef struct {
+    int denoise_enable;
+    cam_denoise_process_type_t process_plates;
+} cam_denoise_param_t;
+
+#define CAM_FACE_PROCESS_MASK_DETECTION    (1<<0)
+#define CAM_FACE_PROCESS_MASK_RECOGNITION  (1<<1)
+typedef struct {
+    int fd_mode;               /* mask of face process */
+    int num_fd;
+} cam_fd_set_parm_t;
+
+typedef struct {
+    int8_t face_id;            /* unique id for face tracking within view unless view changes */
+    int8_t score;              /* score of confidence (0, -100) */
+    cam_rect_t face_boundary;  /* boundary of face detected */
+    cam_coordinate_type_t left_eye_center;  /* coordinate of center of left eye */
+    cam_coordinate_type_t right_eye_center; /* coordinate of center of right eye */
+    cam_coordinate_type_t mouth_center;     /* coordinate of center of mouth */
+    uint8_t smile_degree;      /* smile degree (0, -100) */
+    uint8_t smile_confidence;  /* smile confidence (0, 100) */
+    uint8_t face_recognised;   /* if face is recognised */
+    int8_t gaze_angle;         /* -90 -45 0 45 90 for head left to rigth tilt */
+    int8_t updown_dir;         /* up down direction (-90, 90) */
+    int8_t leftright_dir;      /* left right direction (-90, 90) */
+    int8_t roll_dir;           /* roll direction (-90, 90) */
+    int8_t left_right_gaze;    /* left right gaze degree (-50, 50) */
+    int8_t top_bottom_gaze;    /* up down gaze degree (-50, 50) */
+    uint8_t blink_detected;    /* if blink is detected */
+    uint8_t left_blink;        /* left eye blink degeree (0, -100) */
+    uint8_t right_blink;       /* right eye blink degree (0, - 100) */
+} cam_face_detection_info_t;
+
+typedef struct {
+    uint32_t frame_id;                         /* frame index of which faces are detected */
+    uint8_t num_faces_detected;                /* number of faces detected */
+    cam_face_detection_info_t faces[MAX_ROI];  /* detailed information of faces detected */
+} cam_face_detection_data_t;
+
+#define CAM_HISTOGRAM_STATS_SIZE 256
+typedef struct {
+    uint32_t max_hist_value;
+    uint32_t hist_buf[CAM_HISTOGRAM_STATS_SIZE]; /* buf holding histogram stats data */
+} cam_histogram_data_t;
+
+typedef struct {
+    cam_histogram_data_t r_stats;
+    cam_histogram_data_t b_stats;
+    cam_histogram_data_t gr_stats;
+    cam_histogram_data_t gb_stats;
+} cam_bayer_hist_stats_t;
+
+typedef enum {
+    CAM_HISTOGRAM_TYPE_BAYER,
+    CAM_HISTOGRAM_TYPE_YUV
+} cam_histogram_type_t;
+
+typedef struct {
+    cam_histogram_type_t type;
+    union {
+        cam_bayer_hist_stats_t bayer_stats;
+        cam_histogram_data_t yuv_stats;
+    };
+} cam_hist_stats_t;
+
+enum cam_focus_distance_index{
+  CAM_FOCUS_DISTANCE_NEAR_INDEX,  /* 0 */
+  CAM_FOCUS_DISTANCE_OPTIMAL_INDEX,
+  CAM_FOCUS_DISTANCE_FAR_INDEX,
+  CAM_FOCUS_DISTANCE_MAX_INDEX
+};
+
+typedef struct {
+  float focus_distance[CAM_FOCUS_DISTANCE_MAX_INDEX];
+} cam_focus_distances_info_t;
+
+/* Different autofocus cycle when calling do_autoFocus
+ * CAM_AF_COMPLETE_EXISTING_SWEEP: Complete existing sweep
+ * if one is ongoing, and lock.
+ * CAM_AF_DO_ONE_FULL_SWEEP: Do one full sweep, regardless
+ * of the current state, and lock.
+ * CAM_AF_START_CONTINUOUS_SWEEP: Start continous sweep.
+ * After do_autoFocus, HAL receives an event: CAM_AF_FOCUSED,
+ * or CAM_AF_NOT_FOCUSED.
+ * cancel_autoFocus stops any lens movement.
+ * Each do_autoFocus call only produces 1 FOCUSED/NOT_FOCUSED
+ * event, not both.
+ */
+typedef enum {
+    CAM_AF_COMPLETE_EXISTING_SWEEP,
+    CAM_AF_DO_ONE_FULL_SWEEP,
+    CAM_AF_START_CONTINUOUS_SWEEP
+} cam_autofocus_cycle_t;
+
+typedef enum {
+    CAM_AF_SCANNING,
+    CAM_AF_FOCUSED,
+    CAM_AF_NOT_FOCUSED
+} cam_autofocus_state_t;
+
+typedef struct {
+    cam_autofocus_state_t focus_state;           /* state of focus */
+    cam_focus_distances_info_t focus_dist;       /* focus distance */
+} cam_auto_focus_data_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_rect_t crop;
+} cam_stream_crop_info_t;
+
+typedef struct {
+    uint8_t num_of_streams;
+    cam_stream_crop_info_t crop_info[MAX_NUM_STREAMS];
+} cam_crop_data_t;
+
+typedef enum {
+    DO_NOT_NEED_FUTURE_FRAME,
+    NEED_FUTURE_FRAME,
+} cam_prep_snapshot_state_t;
+
+typedef struct {
+    float gains[4];
+} cam_color_correct_gains_t;
+
+typedef struct {
+    uint32_t min_frame_idx;
+    uint32_t max_frame_idx;
+} cam_frame_idx_range_t;
+
+
+typedef  struct {
+   float aperture_value;
+   /* Store current LED flash state */
+   cam_flash_mode_t         flash_mode;
+   cam_flash_state_t        flash_state;
+} cam_sensor_params_t;
+
+typedef struct {
+    float exp_time;
+    int iso_value;
+    cam_wb_mode_type wb_mode;
+} cam_3a_params_t;
+
+typedef struct {
+    cam_dimension_t stream_sizes[MAX_NUM_STREAMS];
+    uint32_t num_streams;
+    uint32_t type[MAX_NUM_STREAMS];
+} cam_stream_size_info_t;
+
+typedef struct {
+    uint32_t num_streams;
+    uint32_t streamID[MAX_NUM_STREAMS];
+} cam_stream_ID_t;
+
+typedef  struct {
+    uint8_t is_stats_valid;               /* if histgram data is valid */
+    cam_hist_stats_t stats_data;          /* histogram data */
+
+    uint8_t is_faces_valid;               /* if face detection data is valid */
+    cam_face_detection_data_t faces_data; /* face detection result */
+
+    uint8_t is_focus_valid;               /* if focus data is valid */
+    cam_auto_focus_data_t focus_data;     /* focus data */
+
+    uint8_t is_crop_valid;                /* if crop data is valid */
+    cam_crop_data_t crop_data;            /* crop data */
+
+    uint8_t is_prep_snapshot_done_valid;  /* if prep snapshot done is valid */
+    cam_prep_snapshot_state_t prep_snapshot_done_state;  /* prepare snapshot done state */
+
+    /* if good frame idx range is valid */
+    uint8_t is_good_frame_idx_range_valid;
+    /* good frame idx range, make sure:
+     * 1. good_frame_idx_range.min_frame_idx > current_frame_idx
+     * 2. good_frame_idx_range.min_frame_idx - current_frame_idx < 100 */
+    cam_frame_idx_range_t good_frame_idx_range;
+
+    char private_metadata[MAX_METADATA_PAYLOAD_SIZE];
+
+    /* AE parameters */
+    uint8_t is_3a_params_valid;
+    cam_3a_params_t cam_3a_params;
+    /* sensor parameters */
+    uint8_t is_sensor_params_valid;
+    cam_sensor_params_t sensor_params;
+} cam_metadata_info_t;
+
+#define TUNING_DATA_VERSION        1
+#define TUNING_SENSOR_DATA_MAX     0x10000 /*(need value from sensor team)*/
+#define TUNING_VFE_DATA_MAX        0x10000 /*(need value from vfe team)*/
+#define TUNING_CPP_DATA_MAX        0x10000 /*(need value from pproc team)*/
+#define TUNING_CAC_DATA_MAX        0x10000 /*(need value from imglib team)*/
+#define TUNING_DATA_MAX            (TUNING_SENSOR_DATA_MAX + \
+                                    TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX + \
+                                    TUNING_CAC_DATA_MAX)
+
+#define TUNING_SENSOR_DATA_OFFSET  0
+#define TUNING_VFE_DATA_OFFSET     TUNING_SENSOR_DATA_MAX
+#define TUNING_CPP_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + TUNING_VFE_DATA_MAX)
+#define TUNING_CAC_DATA_OFFSET     (TUNING_SENSOR_DATA_MAX + \
+                                    TUNING_VFE_DATA_MAX + TUNING_CPP_DATA_MAX)
+
+typedef struct {
+    uint32_t tuning_data_version;
+    uint32_t tuning_sensor_data_size;
+    uint32_t tuning_vfe_data_size;
+    uint32_t tuning_cpp_data_size;
+    uint32_t tuning_cac_data_size;
+    uint8_t  data[TUNING_DATA_MAX];
+}tuning_params_t;
+
+typedef enum {
+    CAM_INTF_PARM_HAL_VERSION,
+
+    /* Overall mode of 3A control routines. We need to have this parameter
+     * because not all android.control.* have an OFF option, for example,
+     * AE_FPS_Range, aePrecaptureTrigger */
+    CAM_INTF_META_MODE,
+    /* Whether AE is currently updating the sensor exposure and sensitivity
+     * fields */
+    CAM_INTF_META_AEC_MODE,
+    CAM_INTF_PARM_WHITE_BALANCE,
+    CAM_INTF_PARM_FOCUS_MODE,
+
+    /* common between HAL1 and HAL3 */
+    CAM_INTF_PARM_ANTIBANDING,
+    CAM_INTF_PARM_EV,
+    CAM_INTF_PARM_EV_STEP,
+    CAM_INTF_PARM_AEC_LOCK,
+    CAM_INTF_PARM_FPS_RANGE,
+    CAM_INTF_PARM_AWB_LOCK,
+    CAM_INTF_PARM_EFFECT,
+    CAM_INTF_PARM_BESTSHOT_MODE,
+    CAM_INTF_PARM_DIS_ENABLE,
+    CAM_INTF_PARM_LED_MODE,
+    CAM_INTF_META_HISTOGRAM, /* 10 */
+    CAM_INTF_META_FACE_DETECTION,
+
+    /* specific to HAl1 */
+    CAM_INTF_META_AUTOFOCUS_DATA,
+    CAM_INTF_PARM_QUERY_FLASH4SNAP,
+    CAM_INTF_PARM_SHARPNESS,
+    CAM_INTF_PARM_CONTRAST,
+    CAM_INTF_PARM_SATURATION,
+    CAM_INTF_PARM_BRIGHTNESS,
+    CAM_INTF_PARM_ISO,
+    CAM_INTF_PARM_ZOOM, /* 20 */
+    CAM_INTF_PARM_ROLLOFF,
+    CAM_INTF_PARM_MODE,             /* camera mode */
+    CAM_INTF_PARM_AEC_ALGO_TYPE,    /* auto exposure algorithm */
+    CAM_INTF_PARM_FOCUS_ALGO_TYPE,  /* focus algorithm */
+    CAM_INTF_PARM_AEC_ROI,
+    CAM_INTF_PARM_AF_ROI,
+    CAM_INTF_PARM_SCE_FACTOR,
+    CAM_INTF_PARM_FD,
+    CAM_INTF_PARM_MCE, /* 30 */
+    CAM_INTF_PARM_HFR,
+    CAM_INTF_PARM_REDEYE_REDUCTION,
+    CAM_INTF_PARM_WAVELET_DENOISE,
+    CAM_INTF_PARM_HISTOGRAM,
+    CAM_INTF_PARM_ASD_ENABLE,
+    CAM_INTF_PARM_RECORDING_HINT,
+    CAM_INTF_PARM_HDR,
+    CAM_INTF_PARM_FRAMESKIP,
+    CAM_INTF_PARM_ZSL_MODE,  /* indicating if it's running in ZSL mode */
+    CAM_INTF_PARM_HDR_NEED_1X, /* if HDR needs 1x output */ /* 40 */
+    CAM_INTF_PARM_LOCK_CAF,
+    CAM_INTF_PARM_VIDEO_HDR,
+    CAM_INTF_PARM_ROTATION,
+    CAM_INTF_META_CROP_DATA,
+    CAM_INTF_META_PREP_SNAPSHOT_DONE,
+    CAM_INTF_META_GOOD_FRAME_IDX_RANGE,
+
+    /* stream based parameters */
+    CAM_INTF_PARM_DO_REPROCESS,
+    CAM_INTF_PARM_SET_BUNDLE,
+
+    /* specific to HAL3 */
+    /* Whether the metadata maps to a valid frame number */
+    CAM_INTF_META_FRAME_NUMBER_VALID,
+    /* Whether the urgent metadata maps to a valid frame number */
+    CAM_INTF_META_URGENT_FRAME_NUMBER_VALID,
+    /* Whether the stream buffer corresponding this frame is dropped or not */
+    CAM_INTF_META_FRAME_DROPPED,
+    /* Number of pending requests yet to be processed */
+    CAM_INTF_META_PENDING_REQUESTS,
+    /* COLOR CORRECTION.*/
+    CAM_INTF_META_COLOR_CORRECT_MODE,
+    /* A transform matrix to chromatically adapt pixels in the CIE XYZ (1931)
+     * color space from the scene illuminant to the sRGB-standard D65-illuminant. */
+    CAM_INTF_META_COLOR_CORRECT_TRANSFORM, /* 50 */
+    /*Color channel gains in the Bayer raw domain in the order [RGeGoB]*/
+    CAM_INTF_META_COLOR_CORRECT_GAINS,
+    /*The best fit color transform matrix calculated by the stats*/
+    CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM,
+    /*The best fit color channels gains calculated by the stats*/
+    CAM_INTF_META_PRED_COLOR_CORRECT_GAINS,
+    /* CONTROL */
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_FRAME_NUMBER,
+    /* A frame counter set by the framework. Must be maintained unchanged in
+     * output frame. */
+    CAM_INTF_META_URGENT_FRAME_NUMBER,
+    /*Number of streams and size of streams in current configuration*/
+    CAM_INTF_META_STREAM_INFO,
+    /* List of areas to use for metering */
+    CAM_INTF_META_AEC_ROI,
+    /* Whether the HAL must trigger precapture metering. Used to sync trigger
+     * value and precapture ID */
+    CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
+    /* Use to report back to the trigger value, triger is requested using
+     * CAM_INTF_META_AEC_PRECAPTURE_TRIGGER */
+    CAM_INTF_META_PRECAPTURE_TRIGGER,
+    /* The ID sent with the latest CAMERA2_TRIGGER_PRECAPTURE_METERING call */
+    CAM_INTF_META_AEC_PRECAPTURE_ID,
+    /* Current state of AE algorithm */
+    CAM_INTF_META_AEC_STATE,
+    /* List of areas to use for focus estimation */
+    CAM_INTF_META_AF_ROI,
+    /* Whether the HAL must trigger autofocus. */
+    CAM_INTF_META_AF_TRIGGER,
+    /* The ID sent with the latest CAMERA2_TRIGGER_AUTOFOCUS call */
+    CAM_INTF_META_AF_TRIGGER_ID,
+    /* Use to report back AF trigger value, trigger is requested using
+     * CAM_INTF_META_AF_TRIGGER and CAM_INTF_META_AF_TRIGGER_ID */
+    CAM_INTF_META_AF_TRIGGER_NOTICE,
+    /* Current state of AF algorithm */
+    CAM_INTF_META_AF_STATE,
+    /* List of areas to use for illuminant estimation */
+    CAM_INTF_META_AWB_REGIONS,
+    /* Current state of AWB algorithm */
+    CAM_INTF_META_AWB_STATE,
+    /*Whether black level compensation is frozen or free to vary*/
+    CAM_INTF_META_BLACK_LEVEL_LOCK,
+    /* Information to 3A routines about the purpose of this capture, to help
+     * decide optimal 3A strategy */
+    CAM_INTF_META_CAPTURE_INTENT,
+    /* DEMOSAIC */
+    /* Controls the quality of the demosaicing processing */
+    CAM_INTF_META_DEMOSAIC,
+    /* EDGE */
+    /* Operation mode for edge enhancement */
+    CAM_INTF_META_EDGE_MODE,
+    /* Control the amount of edge enhancement applied to the images.*/
+    /* 1-10; 10 is maximum sharpening */
+    CAM_INTF_META_SHARPNESS_STRENGTH,
+    /* FLASH */
+    /* Power for flash firing/torch, 10 is max power; 0 is no flash. Linear */
+    CAM_INTF_META_FLASH_POWER,
+    /* Firing time of flash relative to start of exposure, in nanoseconds*/
+    CAM_INTF_META_FLASH_FIRING_TIME,
+    /* Current state of the flash unit */
+    CAM_INTF_META_FLASH_STATE,
+    /* GEOMETRIC */
+    /* Operating mode of geometric correction */
+    CAM_INTF_META_GEOMETRIC_MODE,
+    /* Control the amount of shading correction applied to the images */
+    CAM_INTF_META_GEOMETRIC_STRENGTH,
+    /* HOT PIXEL */
+    /* Set operational mode for hot pixel correction */
+    CAM_INTF_META_HOTPIXEL_MODE,
+    /* LENS */
+    /* Size of the lens aperture */
+    CAM_INTF_META_LENS_APERTURE,
+    /* State of lens neutral density filter(s) */
+    CAM_INTF_META_LENS_FILTERDENSITY,
+    /* Lens optical zoom setting */
+    CAM_INTF_META_LENS_FOCAL_LENGTH,
+    /* Distance to plane of sharpest focus, measured from frontmost surface
+     * of the lens */
+    CAM_INTF_META_LENS_FOCUS_DISTANCE,
+    /* The range of scene distances that are in sharp focus (depth of field) */
+    CAM_INTF_META_LENS_FOCUS_RANGE,
+    /* Whether optical image stabilization is enabled. */
+    CAM_INTF_META_LENS_OPT_STAB_MODE,
+    /*Whether the hal needs to output the lens shading map*/
+    CAM_INTF_META_LENS_SHADING_MAP_MODE,
+    /* Current lens status */
+    CAM_INTF_META_LENS_STATE,
+    /* NOISE REDUCTION */
+    /* Mode of operation for the noise reduction algorithm */
+    CAM_INTF_META_NOISE_REDUCTION_MODE,
+   /* Control the amount of noise reduction applied to the images.
+    * 1-10; 10 is max noise reduction */
+    CAM_INTF_META_NOISE_REDUCTION_STRENGTH,
+    /* SCALER */
+    /* Top-left corner and width of the output region to select from the active
+     * pixel array */
+    CAM_INTF_META_SCALER_CROP_REGION,
+    /* The estimated scene illumination lighting frequency */
+    CAM_INTF_META_SCENE_FLICKER,
+    /* SENSOR */
+    /* Duration each pixel is exposed to light, in nanoseconds */
+    CAM_INTF_META_SENSOR_EXPOSURE_TIME,
+    /* Duration from start of frame exposure to start of next frame exposure,
+     * in nanoseconds */
+    CAM_INTF_META_SENSOR_FRAME_DURATION,
+    /* Gain applied to image data. Must be implemented through analog gain only
+     * if set to values below 'maximum analog sensitivity'. */
+    CAM_INTF_META_SENSOR_SENSITIVITY,
+    /* Time at start of exposure of first row */
+    CAM_INTF_META_SENSOR_TIMESTAMP,
+    /* SHADING */
+    /* Quality of lens shading correction applied to the image data */
+    CAM_INTF_META_SHADING_MODE,
+    /* Control the amount of shading correction applied to the images.
+     * unitless: 1-10; 10 is full shading compensation */
+    CAM_INTF_META_SHADING_STRENGTH,
+    /* STATISTICS */
+    /* State of the face detector unit */
+    CAM_INTF_META_STATS_FACEDETECT_MODE,
+    /* Operating mode for histogram generation */
+    CAM_INTF_META_STATS_HISTOGRAM_MODE,
+    /* Operating mode for sharpness map generation */
+    CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
+    /* A 3-channel sharpness map, based on the raw sensor data,
+     * If only a monochrome sharpness map is supported, all channels
+     * should have the same data
+     */
+    CAM_INTF_META_STATS_SHARPNESS_MAP,
+
+    /* TONEMAP */
+    /* Tone map mode */
+    CAM_INTF_META_TONEMAP_MODE,
+    /* Table mapping RGB input values to output values */
+    CAM_INTF_META_TONEMAP_CURVES,
+
+    CAM_INTF_META_FLASH_MODE,
+    /* 2D array of gain factors for each color channel that was used to
+     * compensate for lens shading for this frame */
+    CAM_INTF_META_LENS_SHADING_MAP,
+    CAM_INTF_META_PRIVATE_DATA,
+    /* Indicates streams ID of all the requested buffers */
+    CAM_INTF_META_STREAM_ID,
+    CAM_INTF_META_TEST_PATTERN_DATA,
+    /*AEC info for Exif*/
+    CAM_INTF_META_AEC_INFO,
+    CAM_INTF_META_JPEG_GPS_COORDINATES,
+    CAM_INTF_META_JPEG_GPS_PROC_METHODS,
+    CAM_INTF_META_JPEG_GPS_TIMESTAMP,
+    CAM_INTF_META_JPEG_ORIENTATION,
+    CAM_INTF_META_JPEG_QUALITY,
+    CAM_INTF_META_JPEG_THUMB_QUALITY,
+    CAM_INTF_META_JPEG_THUMB_SIZE,
+
+    /* OTP : WB gr/gb */
+    CAM_INTF_META_OTP_WB_GRGB,
+    /* DNG file support */
+    CAM_INTF_META_PROFILE_TONE_CURVE,
+    CAM_INTF_META_NEUTRAL_COL_POINT,
+
+    CAM_INTF_PARM_MAX
+} cam_intf_parm_type_t;
+
+/*****************************************************************************
+ *                 Code for HAL3 data types                                  *
+ ****************************************************************************/
+typedef enum {
+    CAM_INTF_METADATA_MAX
+} cam_intf_metadata_type_t;
+
+typedef enum {
+    CAM_INTENT_CUSTOM,
+    CAM_INTENT_PREVIEW,
+    CAM_INTENT_STILL_CAPTURE,
+    CAM_INTENT_VIDEO_RECORD,
+    CAM_INTENT_VIDEO_SNAPSHOT,
+    CAM_INTENT_ZERO_SHUTTER_LAG,
+    CAM_INTENT_MAX,
+} cam_intent_t;
+
+typedef enum {
+    /* Full application control of pipeline. All 3A routines are disabled,
+     * no other settings in android.control.* have any effect */
+    CAM_CONTROL_OFF,
+    /* Use settings for each individual 3A routine. Manual control of capture
+     * parameters is disabled. All controls in android.control.* besides sceneMode
+     * take effect */
+    CAM_CONTROL_AUTO,
+    /* Use specific scene mode. Enabling this disables control.aeMode,
+     * control.awbMode and control.afMode controls; the HAL must ignore those
+     * settings while USE_SCENE_MODE is active (except for FACE_PRIORITY scene mode).
+     * Other control entries are still active. This setting can only be used if
+     * availableSceneModes != UNSUPPORTED. TODO: Should we remove this and handle this
+     * in HAL ?*/
+    CAM_CONTROL_USE_SCENE_MODE,
+    CAM_CONTROL_MAX
+} cam_control_mode_t;
+
+typedef enum {
+    /* Use the android.colorCorrection.transform matrix to do color conversion */
+    CAM_COLOR_CORRECTION_TRANSFORM_MATRIX,
+    /* Must not slow down frame rate relative to raw bayer output */
+    CAM_COLOR_CORRECTION_FAST,
+    /* Frame rate may be reduced by high quality */
+    CAM_COLOR_CORRECTION_HIGH_QUALITY,
+} cam_color_correct_mode_t;
+
+typedef struct {
+    /* 3x3 float matrix in row-major order. each element is in range of (0, 1) */
+    cam_rational_type_t transform_matrix[3][3];
+} cam_color_correct_matrix_t;
+
+#define CAM_FOCAL_LENGTHS_MAX     1
+#define CAM_APERTURES_MAX         1
+#define CAM_FILTER_DENSITIES_MAX  1
+#define CAM_MAX_MAP_HEIGHT        6
+#define CAM_MAX_MAP_WIDTH         6
+#define CAM_MAX_SHADING_MAP_WIDTH 17
+#define CAM_MAX_SHADING_MAP_HEIGHT 13
+#define CAM_MAX_TONEMAP_CURVE_SIZE    128
+
+typedef struct {
+    /* A 1D array of pairs of floats.
+     * Mapping a 0-1 input range to a 0-1 output range.
+     * The input range must be monotonically increasing with N,
+     * and values between entries should be linearly interpolated.
+     * For example, if the array is: [0.0, 0.0, 0.3, 0.5, 1.0, 1.0],
+     * then the input->output mapping for a few sample points would be:
+     * 0 -> 0, 0.15 -> 0.25, 0.3 -> 0.5, 0.5 -> 0.64 */
+    float tonemap_points[CAM_MAX_TONEMAP_CURVE_SIZE][2];
+} cam_tonemap_curve_t;
+
+typedef struct {
+   int tonemap_points_cnt;
+   cam_tonemap_curve_t curves[3];
+} cam_rgb_tonemap_curves;
+
+typedef struct {
+   int tonemap_points_cnt;
+   cam_tonemap_curve_t curve;
+} cam_profile_tone_curve;
+
+typedef struct {
+    cam_rational_type_t neutral_col_point[3];
+} cam_neutral_col_point_t;
+
+typedef enum {
+    OFF,
+    FAST,
+    QUALITY,
+} cam_quality_preference_t;
+
+typedef enum {
+    CAM_FLASH_CTRL_OFF,
+    CAM_FLASH_CTRL_SINGLE,
+    CAM_FLASH_CTRL_TORCH
+} cam_flash_ctrl_t;
+
+typedef struct {
+    uint8_t frame_dropped; /*  This flag indicates whether any stream buffer is dropped or not */
+    cam_stream_ID_t cam_stream_ID; /* if dropped, Stream ID of dropped streams */
+} cam_frame_dropped_t;
+
+typedef struct {
+    uint8_t ae_mode;
+    uint8_t awb_mode;
+    uint8_t af_mode;
+} cam_scene_mode_overrides_t;
+
+typedef struct {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+} cam_crop_region_t;
+
+typedef struct {
+    /* Estimated sharpness for each region of the input image.
+     * Normalized to be between 0 and maxSharpnessMapValue.
+     * Higher values mean sharper (better focused) */
+    int32_t sharpness[CAM_MAX_MAP_WIDTH][CAM_MAX_MAP_HEIGHT];
+} cam_sharpness_map_t;
+
+typedef struct {
+   float lens_shading[4*CAM_MAX_SHADING_MAP_HEIGHT*CAM_MAX_SHADING_MAP_WIDTH];
+} cam_lens_shading_map_t;
+
+typedef struct {
+    int32_t min_value;
+    int32_t max_value;
+    int32_t def_value;
+    int32_t step;
+} cam_control_range_t;
+
+#define CAM_QCOM_FEATURE_FACE_DETECTION (1<<0)
+#define CAM_QCOM_FEATURE_DENOISE2D      (1<<1)
+#define CAM_QCOM_FEATURE_CROP           (1<<2)
+#define CAM_QCOM_FEATURE_ROTATION       (1<<3)
+#define CAM_QCOM_FEATURE_FLIP           (1<<4)
+#define CAM_QCOM_FEATURE_HDR            (1<<5)
+#define CAM_QCOM_FEATURE_REGISTER_FACE  (1<<6)
+#define CAM_QCOM_FEATURE_SHARPNESS      (1<<7)
+#define CAM_QCOM_FEATURE_VIDEO_HDR      (1<<8)
+#define CAM_QCOM_FEATURE_CAC            (1<<9)
+
+// Counter clock wise
+typedef enum {
+    ROTATE_0 = 1<<0,
+    ROTATE_90 = 1<<1,
+    ROTATE_180 = 1<<2,
+    ROTATE_270 = 1<<3,
+} cam_rotation_t;
+
+typedef enum {
+    FLIP_H = 1<<0,
+    FLIP_V = 1<<1,
+} cam_flip_t;
+
+typedef struct {
+    uint32_t bundle_id;                            /* bundle id */
+    uint8_t num_of_streams;                        /* number of streams in the bundle */
+    uint32_t stream_ids[MAX_STREAM_NUM_IN_BUNDLE]; /* array of stream ids to be bundled */
+} cam_bundle_config_t;
+
+typedef enum {
+    CAM_ONLINE_REPROCESS_TYPE,    /* online reprocess, frames from running streams */
+    CAM_OFFLINE_REPROCESS_TYPE,   /* offline reprocess, frames from external source */
+} cam_reprocess_type_enum_t;
+
+typedef struct {
+    /* reprocess feature mask */
+    uint32_t feature_mask;
+
+    /* individual setting for features to be reprocessed */
+    cam_denoise_param_t denoise2d;
+    cam_rect_t input_crop;
+    cam_rotation_t rotation;
+    uint32_t flip;
+    int32_t sharpness;
+    int32_t hdr_need_1x; /* when CAM_QCOM_FEATURE_HDR enabled, indicate if 1x is needed for output */
+} cam_pp_feature_config_t;
+
+typedef struct {
+    uint32_t input_stream_id;
+    /* input source stream type */
+    cam_stream_type_t input_stream_type;
+} cam_pp_online_src_config_t;
+
+typedef struct {
+    /* image format */
+    cam_format_t input_fmt;
+
+    /* image dimension */
+    cam_dimension_t input_dim;
+
+    /* buffer plane information, will be calc based on stream_type, fmt,
+       dim, and padding_info(from stream config). Info including:
+       offset_x, offset_y, stride, scanline, plane offset */
+    cam_stream_buf_plane_info_t input_buf_planes;
+
+    /* number of input reprocess buffers */
+    uint8_t num_of_bufs;
+
+    cam_stream_type_t input_stream_type;
+
+} cam_pp_offline_src_config_t;
+
+/* reprocess stream input configuration */
+typedef struct {
+    /* input source config */
+    cam_reprocess_type_enum_t pp_type;
+    union {
+        cam_pp_online_src_config_t online;
+        cam_pp_offline_src_config_t offline;
+    };
+
+    /* pp feature config */
+    cam_pp_feature_config_t pp_feature_config;
+} cam_stream_reproc_config_t;
+
+typedef struct {
+    uint8_t trigger;
+    int32_t trigger_id;
+} cam_trigger_t;
+
+typedef enum {
+    CAM_OPT_STAB_OFF,
+    CAM_OPT_STAB_ON,
+    CAM_OPT_STAB_MAX
+} cam_optical_stab_modes_t;
+
+typedef enum {
+    CAM_FILTER_ARRANGEMENT_RGGB,
+    CAM_FILTER_ARRANGEMENT_GRBG,
+    CAM_FILTER_ARRANGEMENT_GBRG,
+    CAM_FILTER_ARRANGEMENT_BGGR,
+
+    /* Sensor is not Bayer; output has 3 16-bit values for each pixel,
+     * instead of just 1 16-bit value per pixel.*/
+    CAM_FILTER_ARRANGEMENT_RGB
+} cam_color_filter_arrangement_t;
+
+typedef enum {
+    CAM_AF_STATE_INACTIVE,
+    CAM_AF_STATE_PASSIVE_SCAN,
+    CAM_AF_STATE_PASSIVE_FOCUSED,
+    CAM_AF_STATE_ACTIVE_SCAN,
+    CAM_AF_STATE_FOCUSED_LOCKED,
+    CAM_AF_STATE_NOT_FOCUSED_LOCKED,
+    CAM_AF_STATE_PASSIVE_UNFOCUSED
+} cam_af_state_t;
+
+typedef enum {
+  CAM_AF_LENS_STATE_STATIONARY,
+  CAM_AF_LENS_STATE_MOVING,
+} cam_af_lens_state_t;
+
+typedef enum {
+    CAM_AWB_STATE_INACTIVE,
+    CAM_AWB_STATE_SEARCHING,
+    CAM_AWB_STATE_CONVERGED,
+    CAM_AWB_STATE_LOCKED
+} cam_awb_state_t;
+
+typedef enum {
+    CAM_FOCUS_UNCALIBRATED,
+    CAM_FOCUS_APPROXIMATE,
+    CAM_FOCUS_CALIBRATED
+} cam_focus_calibration_t;
+
+typedef enum {
+    CAM_TEST_PATTERN_OFF,
+    CAM_TEST_PATTERN_SOLID_COLOR,
+    CAM_TEST_PATTERN_COLOR_BARS,
+    CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY,
+    CAM_TEST_PATTERN_PN9,
+} cam_test_pattern_mode_t;
+
+typedef struct {
+    cam_test_pattern_mode_t mode;
+    int32_t r;
+    int32_t gr;
+    int32_t gb;
+    int32_t b;
+} cam_test_pattern_data_t;
+
+typedef enum {
+    CAM_AWB_D50,
+    CAM_AWB_D65,
+    CAM_AWB_D75,
+    CAM_AWB_A,
+    CAM_AWB_CUSTOM_A,
+    CAM_AWB_WARM_FLO,
+    CAM_AWB_COLD_FLO,
+    CAM_AWB_CUSTOM_FLO,
+    CAM_AWB_NOON,
+    CAM_AWB_CUSTOM_DAYLIGHT,
+    CAM_AWB_INVALID_ALL_LIGHT,
+} cam_illuminant_t;
+
+typedef enum {
+    LEGACY_RAW,
+    MIPI_RAW,
+} cam_opaque_raw_format_t;
+
+#endif /* __QCAMERA_TYPES_H__ */
diff --git a/camera/QCamera2/stack/common/mm_camera_interface.h b/camera/QCamera2/stack/common/mm_camera_interface.h
new file mode 100644
index 0000000..811f2f5
--- /dev/null
+++ b/camera/QCamera2/stack/common/mm_camera_interface.h
@@ -0,0 +1,662 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_INTERFACE_H__
+#define __MM_CAMERA_INTERFACE_H__
+#include <linux/msm_ion.h>
+#include <linux/videodev2.h>
+#include <media/msmb_camera.h>
+#include "cam_intf.h"
+#include "cam_queue.h"
+
+#define MM_CAMERA_MAX_NUM_SENSORS MSM_MAX_CAMERA_SENSORS
+#define MM_CAMERA_MAX_NUM_FRAMES CAM_MAX_NUM_BUFS_PER_STREAM
+/* num of channels allowed in a camera obj */
+#define MM_CAMERA_CHANNEL_MAX 16
+
+#define PAD_TO_SIZE(size, padding) ((size + padding - 1) & ~(padding - 1))
+
+/** mm_camera_buf_def_t: structure for stream frame buf
+*    @stream_id : stream handler to uniquely identify a stream
+*               object
+*    @buf_idx : index of the buf within the stream bufs, to be
+*               filled during mem allocation
+*    @timespec_ts : time stamp, to be filled when DQBUF is
+*                 called
+*    @frame_idx : frame sequence num, to be filled when DQBUF
+*    @num_planes : num of planes for the frame buffer, to be
+*               filled during mem allocation
+*    @planes : plane info for the frame buffer, to be filled
+*               during mem allocation
+*    @fd : file descriptor of the frame buffer, to be filled
+*        during mem allocation
+*    @buffer : pointer to the frame buffer, to be filled during
+*            mem allocation
+*    @frame_len : length of the whole frame, to be filled during
+*               mem allocation
+*    @mem_info : user specific pointer to additional mem info
+**/
+typedef struct {
+    uint32_t stream_id;
+    cam_stream_type_t stream_type;
+    int8_t buf_idx;
+    struct timespec ts;
+    uint32_t frame_idx;
+    int8_t num_planes;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    int fd;
+    void *buffer;
+    uint32_t frame_len;
+    void *mem_info;
+} mm_camera_buf_def_t;
+
+/** mm_camera_super_buf_t: super buf structure for bundled
+*   stream frames
+*    @camera_handle : camera handler to uniquely identify
+*              a camera object
+*    @ch_id : channel handler to uniquely ideentify a channel
+*           object
+*    @num_bufs : number of buffers in the super buf, should not
+*              exceeds MAX_STREAM_NUM_IN_BUNDLE
+*    @bufs : array of buffers in the bundle
+**/
+typedef struct {
+    uint32_t camera_handle;
+    uint32_t ch_id;
+    uint8_t num_bufs;
+    mm_camera_buf_def_t* bufs[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_super_buf_t;
+
+/** mm_camera_event_t: structure for event
+*    @server_event_type : event type from serer
+*    @status : status of an event, value could be
+*              CAM_STATUS_SUCCESS
+*              CAM_STATUS_FAILED
+**/
+typedef struct {
+    cam_event_type_t server_event_type;
+    uint32_t status;
+} mm_camera_event_t;
+
+/** mm_camera_event_notify_t: function definition for event
+*   notify handling
+*    @camera_handle : camera handler
+*    @evt : pointer to an event struct
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_event_notify_t)(uint32_t camera_handle,
+                                         mm_camera_event_t *evt,
+                                         void *user_data);
+
+/** mm_camera_buf_notify_t: function definition for frame notify
+*   handling
+*    @mm_camera_super_buf_t : received frame buffers
+*    @user_data: user data pointer
+**/
+typedef void (*mm_camera_buf_notify_t) (mm_camera_super_buf_t *bufs,
+                                        void *user_data);
+
+/** map_stream_buf_op_t: function definition for operation of
+*   mapping stream buffers via domain socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx    : plane index. If all planes share the same
+*                   fd, plane_idx = -1; otherwise, plean_idx is
+*                   the index to plane (0..num_of_planes)
+*    @fd : file descriptor of the stream buffer
+*    @size: size of the stream buffer
+*    @userdata : user data pointer
+**/
+typedef int32_t (*map_stream_buf_op_t) (uint32_t frame_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        uint32_t size,
+                                        void *userdata);
+
+/** unmap_stream_buf_op_t: function definition for operation of
+*                          unmapping stream buffers via domain
+*                          socket
+*    @frame_idx : buffer index within stream buffers
+*    @plane_idx : plane index. If all planes share the same
+*                 fd, plane_idx = -1; otherwise, plean_idx is
+*                 the index to plane (0..num_of_planes)
+*    @userdata : user data pointer
+**/
+typedef int32_t (*unmap_stream_buf_op_t) (uint32_t frame_idx,
+                                          int32_t plane_idx,
+                                          void *userdata);
+
+/** mm_camera_map_unmap_ops_tbl_t: virtual table
+*                      for mapping/unmapping stream buffers via
+*                      domain socket
+*    @map_ops : operation for mapping
+*    @unmap_ops : operation for unmapping
+*    @userdata: user data pointer
+**/
+typedef struct {
+    map_stream_buf_op_t map_ops;
+    unmap_stream_buf_op_t unmap_ops;
+    void *userdata;
+} mm_camera_map_unmap_ops_tbl_t;
+
+/** mm_camera_stream_mem_vtbl_t: virtual table for stream
+*                      memory allocation and deallocation
+*    @get_bufs : function definition for allocating
+*                stream buffers
+*    @put_bufs : function definition for deallocating
+*                stream buffers
+*    @user_data: user data pointer
+**/
+typedef struct {
+  void *user_data;
+  int32_t (*get_bufs) (cam_frame_len_offset_t *offset,
+                       uint8_t *num_bufs,
+                       uint8_t **initial_reg_flag,
+                       mm_camera_buf_def_t **bufs,
+                       mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*put_bufs) (mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                       void *user_data);
+  int32_t (*invalidate_buf)(int index, void *user_data);
+  int32_t (*clean_invalidate_buf)(int index, void *user_data);
+} mm_camera_stream_mem_vtbl_t;
+
+/** mm_camera_stream_config_t: structure for stream
+*                              configuration
+*    @stream_info : pointer to a stream info structure
+*    @padding_info: padding info obtained from querycapability
+*    @mem_tbl : memory operation table for
+*              allocating/deallocating stream buffers
+*    @stream_cb : callback handling stream frame notify
+*    @userdata : user data pointer
+**/
+typedef struct {
+    cam_stream_info_t *stream_info;
+    cam_padding_info_t padding_info;
+    mm_camera_stream_mem_vtbl_t mem_vtbl;
+    mm_camera_buf_notify_t stream_cb;
+    void *userdata;
+} mm_camera_stream_config_t;
+
+/** mm_camera_super_buf_notify_mode_t: enum for super uffer
+*                                      notification mode
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_BURST :
+*       ZSL use case: get burst of frames
+*    @MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS :
+*       get continuous frames: when the super buf is ready
+*       dispatch it to HAL
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_NOTIFY_BURST = 0,
+    MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS,
+    MM_CAMERA_SUPER_BUF_NOTIFY_MAX
+} mm_camera_super_buf_notify_mode_t;
+
+/** mm_camera_super_buf_priority_t: enum for super buffer
+*                                   matching priority
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL :
+*       Save the frame no matter focused or not. Currently only
+*       this type is supported.
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS :
+*       only queue the frame that is focused. Will enable meta
+*       data header to carry focus info
+*    @MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING :
+*       after shutter, only queue matched exposure index
+**/
+typedef enum {
+    MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL = 0,
+    MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS,
+    MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING,
+    MM_CAMERA_SUPER_BUF_PRIORITY_MAX
+} mm_camera_super_buf_priority_t;
+
+/** mm_camera_channel_attr_t: structure for defining channel
+*                             attributes
+*    @notify_mode : notify mode: burst or continuous
+*    @water_mark : queue depth. Only valid for burst mode
+*    @look_back : look back how many frames from last buf.
+*                 Only valid for burst mode
+*    @post_frame_skip : after send first frame to HAL, how many
+*                     frames needing to be skipped for next
+*                     delivery. Only valid for burst mode
+*    @max_unmatched_frames : max number of unmatched frames in
+*                     queue
+*    @priority : save matched priority frames only
+**/
+typedef struct {
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    uint8_t water_mark;
+    uint8_t look_back;
+    uint8_t post_frame_skip;
+    uint8_t max_unmatched_frames;
+    mm_camera_super_buf_priority_t priority;
+} mm_camera_channel_attr_t;
+
+typedef struct {
+    /** query_capability: fucntion definition for querying static
+     *                    camera capabilities
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume cam_capability_t is already mapped
+     **/
+    int32_t (*query_capability) (uint32_t camera_handle);
+
+    /** register_event_notify: fucntion definition for registering
+     *                         for event notification
+     *    @camera_handle : camer handler
+     *    @evt_cb : callback for event notify
+     *    @user_data : user data poiner
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*register_event_notify) (uint32_t camera_handle,
+                                      mm_camera_event_notify_t evt_cb,
+                                      void *user_data);
+
+    /** close_camera: fucntion definition for closing a camera
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*close_camera) (uint32_t camera_handle);
+
+    /** map_buf: fucntion definition for mapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_buf) (uint32_t camera_handle,
+                        uint8_t buf_type,
+                        int fd,
+                        uint32_t size);
+
+    /** unmap_buf: fucntion definition for unmapping a camera buffer
+     *           via domain socket
+     *    @camera_handle : camer handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *                CAM_MAPPING_BUF_TYPE_CAPABILITY
+     *                CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+     *                CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_buf) (uint32_t camera_handle,
+                          uint8_t buf_type);
+
+    /** set_parms: fucntion definition for setting camera
+     *             based parameters to server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be set, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_parms) (uint32_t camera_handle,
+                          parm_buffer_t *parms);
+
+    /** get_parms: fucntion definition for querying camera
+     *             based parameters from server
+     *    @camera_handle : camer handler
+     *    @parms : batch for parameters to be queried, stored in
+     *               parm_buffer_t
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm_buffer_t is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_parms) (uint32_t camera_handle,
+                          parm_buffer_t *parms);
+
+    /** do_auto_focus: fucntion definition for performing auto focus
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: if this call success, we will always assume there will
+     *        be an auto_focus event following up.
+     **/
+    int32_t (*do_auto_focus) (uint32_t camera_handle);
+
+    /** cancel_auto_focus: fucntion definition for cancelling
+     *                     previous auto focus request
+     *    @camera_handle : camer handler
+    *  Return value: 0 -- success
+    *                -1 -- failure
+     **/
+    int32_t (*cancel_auto_focus) (uint32_t camera_handle);
+
+    /** prepare_snapshot: fucntion definition for preparing hardware
+     *                    for snapshot.
+     *    @camera_handle : camer handler
+     *    @do_af_flag    : flag indicating if AF needs to be done
+     *                     0 -- no AF needed
+     *                     1 -- AF needed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*prepare_snapshot) (uint32_t camera_handle,
+                                 int32_t do_af_flag);
+
+    /** start_zsl_snapshot: function definition for starting
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*start_zsl_snapshot) (uint32_t camera_handle);
+
+    /** stop_zsl_snapshot: function definition for stopping
+     *                    zsl snapshot.
+     *    @camera_handle : camer handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*stop_zsl_snapshot) (uint32_t camera_handle);
+
+    /** add_channel: fucntion definition for adding a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @attr : pointer to channel attribute structure
+     *    @channel_cb : callbak to handle bundled super buffer
+     *    @userdata : user data pointer
+     *  Return value: channel id, zero is invalid ch_id
+     * Note: attr, channel_cb, and userdata can be NULL if no
+     *       superbufCB is needed
+     **/
+    uint32_t (*add_channel) (uint32_t camera_handle,
+                             mm_camera_channel_attr_t *attr,
+                             mm_camera_buf_notify_t channel_cb,
+                             void *userdata);
+
+    /** delete_channel: fucntion definition for deleting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_channel) (uint32_t camera_handle,
+                               uint32_t ch_id);
+
+    /** get_bundle_info: function definition for querying bundle
+     *  info of the channel
+     *    @camera_handle : camera handler
+     *    @ch_id         : channel handler
+     *    @bundle_info   : bundle info to be filled in
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*get_bundle_info) (uint32_t camera_handle,
+                                uint32_t ch_id,
+                                cam_bundle_config_t *bundle_info);
+
+    /** add_stream: fucntion definition for adding a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: stream_id. zero is invalid stream_id
+     **/
+    uint32_t (*add_stream) (uint32_t camera_handle,
+                            uint32_t ch_id);
+
+    /** delete_stream: fucntion definition for deleting a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*delete_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id);
+
+    /** config_stream: fucntion definition for configuring a stream
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @confid : pointer to a stream configuration structure
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*config_stream) (uint32_t camera_handle,
+                              uint32_t ch_id,
+                              uint32_t stream_id,
+                              mm_camera_stream_config_t *config);
+
+    /** map_stream_buf: fucntion definition for mapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *    @fd : file descriptor of the stream buffer
+     *    @size :  size of the stream buffer
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*map_stream_buf) (uint32_t camera_handle,
+                               uint32_t ch_id,
+                               uint32_t stream_id,
+                               uint8_t buf_type,
+                               uint32_t buf_idx,
+                               int32_t plane_idx,
+                               int fd,
+                               uint32_t size);
+
+    /** unmap_stream_buf: fucntion definition for unmapping
+     *                 stream buffer via domain socket
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @buf_type : type of mapping buffers, can be value of
+     *             CAM_MAPPING_BUF_TYPE_STREAM_BUF
+     *             CAM_MAPPING_BUF_TYPE_STREAM_INFO
+     *             CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+     *    @buf_idx : buffer index within the stream buffers
+     *    @plane_idx : plane index. If all planes share the same fd,
+     *               plane_idx = -1; otherwise, plean_idx is the
+     *               index to plane (0..num_of_planes)
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*unmap_stream_buf) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx);
+
+    /** set_stream_parms: fucntion definition for setting stream
+     *                    specific parameters to server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be set
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be set are filled in the
+     *       buf before this call
+     **/
+    int32_t (*set_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** get_stream_parms: fucntion definition for querying stream
+     *                    specific parameters from server
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @stream_id : stream handler
+     *    @parms : batch for parameters to be queried
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     *  Note: would assume parm buffer is already mapped, and
+     *       according parameter entries to be queried are filled in
+     *       the buf before this call
+     **/
+    int32_t (*get_stream_parms) (uint32_t camera_handle,
+                                 uint32_t ch_id,
+                                 uint32_t s_id,
+                                 cam_stream_parm_buffer_t *parms);
+
+    /** start_channel: fucntion definition for starting a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will start all streams belongs to the channel
+     **/
+    int32_t (*start_channel) (uint32_t camera_handle,
+                              uint32_t ch_id);
+
+    /** stop_channel: fucntion definition for stopping a channel
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     * This call will stop all streams belongs to the channel
+     **/
+    int32_t (*stop_channel) (uint32_t camera_handle,
+                             uint32_t ch_id);
+
+    /** qbuf: fucntion definition for queuing a frame buffer back to
+     *        kernel for reuse
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @buf : a frame buffer to be queued back to kernel
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*qbuf) (uint32_t camera_handle,
+                     uint32_t ch_id,
+                     mm_camera_buf_def_t *buf);
+
+    /** request_super_buf: fucntion definition for requesting frames
+     *                     from superbuf queue in burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @num_buf_requested : number of super buffers requested
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*request_super_buf) (uint32_t camera_handle,
+                                  uint32_t ch_id,
+                                  uint32_t num_buf_requested);
+
+    /** cancel_super_buf_request: fucntion definition for canceling
+     *                     frames dispatched from superbuf queue in
+     *                     burst mode
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*cancel_super_buf_request) (uint32_t camera_handle,
+                                         uint32_t ch_id);
+
+    /** flush_super_buf_queue: function definition for flushing out
+     *                     all frames in the superbuf queue up to frame_idx,
+     *                     even if frames with frame_idx come in later than
+     *                     this call.
+     *    @camera_handle : camer handler
+     *    @ch_id : channel handler
+     *    @frame_idx : frame index up until which all superbufs are flushed
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*flush_super_buf_queue) (uint32_t camera_handle,
+                                      uint32_t ch_id, uint32_t frame_idx);
+
+    /** configure_notify_mode: function definition for configuring the
+     *                         notification mode of channel
+     *    @camera_handle : camera handler
+     *    @ch_id : channel handler
+     *    @notify_mode : notification mode
+     *  Return value: 0 -- success
+     *                -1 -- failure
+     **/
+    int32_t (*configure_notify_mode) (uint32_t camera_handle,
+                                      uint32_t ch_id,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+} mm_camera_ops_t;
+
+/** mm_camera_vtbl_t: virtual table for camera operations
+*    @camera_handle : camera handler which uniquely identifies a
+*                   camera object
+*    @ops : API call table
+**/
+typedef struct {
+    uint32_t camera_handle;
+    mm_camera_ops_t *ops;
+} mm_camera_vtbl_t;
+
+/* return number of cameras */
+uint8_t get_num_of_cameras();
+
+/* return reference pointer of camera vtbl */
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx);
+
+/* helper functions */
+int32_t mm_stream_calc_offset_preview(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes);
+
+#endif /*__MM_CAMERA_INTERFACE_H__*/
diff --git a/camera/QCamera2/stack/common/mm_jpeg_interface.h b/camera/QCamera2/stack/common/mm_jpeg_interface.h
new file mode 100644
index 0000000..b58b3a9
--- /dev/null
+++ b/camera/QCamera2/stack/common/mm_jpeg_interface.h
@@ -0,0 +1,205 @@
+/* Copyright (c) 2012, 2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_INTERFACE_H_
+#define MM_JPEG_INTERFACE_H_
+#include "QOMX_JpegExtensions.h"
+#include "cam_intf.h"
+
+#define MM_JPEG_MAX_PLANES 3
+#define MM_JPEG_MAX_BUF CAM_MAX_NUM_BUFS_PER_STREAM
+
+typedef enum {
+  MM_JPEG_FMT_YUV,
+  MM_JPEG_FMT_BITSTREAM
+} mm_jpeg_format_t;
+
+typedef struct {
+  cam_3a_params_t cam_3a_params;
+  cam_sensor_params_t sensor_params;
+} mm_jpeg_exif_params_t;
+typedef struct {
+  uint32_t sequence;          /* for jpeg bit streams, assembling is based on sequence. sequence starts from 0 */
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  uint32_t buf_size;         /* total size of buf (header + image) */
+  mm_jpeg_format_t format;   /* buffer format*/
+  cam_frame_len_offset_t offset; /* offset of all the planes */
+  int index; /* index used to identify the buffers */
+} mm_jpeg_buf_t;
+
+typedef struct {
+  uint8_t *buf_vaddr;        /* ptr to buf */
+  int fd;                    /* fd of buf */
+  uint32_t buf_filled_len;   /* used for output image. filled by the client */
+} mm_jpeg_output_t;
+
+typedef enum {
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2,
+  MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H2V1,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V2,
+  MM_JPEG_COLOR_FORMAT_BITSTREAM_H1V1,
+  MM_JPEG_COLOR_FORMAT_MAX
+} mm_jpeg_color_format;
+
+typedef enum {
+  JPEG_JOB_STATUS_DONE = 0,
+  JPEG_JOB_STATUS_ERROR
+} jpeg_job_status_t;
+
+typedef void (*jpeg_encode_callback_t)(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData);
+
+typedef struct {
+  /* src img dimension */
+  cam_dimension_t src_dim;
+
+  /* jpeg output dimension */
+  cam_dimension_t dst_dim;
+
+  /* crop information */
+  cam_rect_t crop;
+} mm_jpeg_dim_t;
+
+typedef struct {
+  /* num of buf in src img */
+  uint32_t num_src_bufs;
+
+  /* num of src tmb bufs */
+  uint32_t num_tmb_bufs;
+
+  /* num of buf in src img */
+  uint32_t num_dst_bufs;
+
+  int8_t encode_thumbnail;
+
+  /* src img bufs */
+  mm_jpeg_buf_t src_main_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t src_thumb_buf[MM_JPEG_MAX_BUF];
+
+  /* this will be used only for bitstream */
+  mm_jpeg_buf_t dest_buf[MM_JPEG_MAX_BUF];
+
+  /* color format */
+  mm_jpeg_color_format color_format;
+
+  /* jpeg quality: range 0~100 */
+  uint32_t quality;
+
+  /* jpeg thumbnail quality: range 0~100 */
+  uint32_t thumb_quality;
+
+  /* buf to exif entries, caller needs to
+   * take care of the memory manage with insider ptr */
+  QOMX_EXIF_INFO exif_info;
+
+  jpeg_encode_callback_t jpeg_cb;
+  void* userdata;
+
+} mm_jpeg_encode_params_t;
+
+typedef struct {
+  /* active indices of the buffers for encoding */
+  uint32_t src_index;
+  uint32_t dst_index;
+  uint32_t thumb_index;
+  mm_jpeg_dim_t thumb_dim;
+
+  /* rotation informaiton */
+  int rotation;
+
+  /* main image dimension */
+  mm_jpeg_dim_t main_dim;
+
+  /*session id*/
+  uint32_t session_id;
+
+  /*Metadata from HAl version 1 */
+  cam_metadata_info_t *p_metadata_v1;
+
+  /*Metadata stream from HAL version 3*/
+  metadata_buffer_t *p_metadata_v3;
+
+  /* buf to exif entries, caller needs to
+   * take care of the memory manage with insider ptr */
+  QOMX_EXIF_INFO exif_info;
+  /* 3a parameters */
+  mm_jpeg_exif_params_t cam_exif_params;
+} mm_jpeg_encode_job_t;
+
+typedef enum {
+  JPEG_JOB_TYPE_ENCODE,
+  JPEG_JOB_TYPE_MAX
+} mm_jpeg_job_type_t;
+
+typedef struct {
+  mm_jpeg_job_type_t job_type;
+  union {
+    mm_jpeg_encode_job_t encode_job;
+  };
+} mm_jpeg_job_t;
+
+typedef struct {
+  /* config a job -- async call */
+  int (*start_job)(mm_jpeg_job_t* job, uint32_t* job_id);
+
+  /* abort a job -- sync call */
+  int (*abort_job)(uint32_t job_id);
+
+  /* create a session */
+  int (*create_session)(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params, uint32_t *p_session_id);
+
+  /* destroy session */
+  int (*destroy_session)(uint32_t session_id);
+
+  /* close a jpeg client -- sync call */
+  int (*close) (uint32_t clientHdl);
+} mm_jpeg_ops_t;
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops);
+
+#endif /* MM_JPEG_INTERFACE_H_ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/Android.mk b/camera/QCamera2/stack/mm-camera-interface/Android.mk
new file mode 100755
index 0000000..f3a515b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/Android.mk
@@ -0,0 +1,44 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+MM_CAM_FILES := \
+        src/mm_camera_interface.c \
+        src/mm_camera.c \
+        src/mm_camera_channel.c \
+        src/mm_camera_stream.c \
+        src/mm_camera_thread.c \
+        src/mm_camera_sock.c
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+    LOCAL_CFLAGS += -DUSE_ION
+endif
+
+ifeq ($(call is-board-platform-in-list,msm8974 msm8226),true)
+    LOCAL_CFLAGS += -DVENUS_PRESENT
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += ../common/cam_intf.h
+LOCAL_COPY_HEADERS += ../common/cam_types.h
+
+LOCAL_C_INCLUDES := \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common
+
+LOCAL_C_INCLUDES += hardware/qcom/media/mm-core/inc
+
+LOCAL_CFLAGS += -Wall -Werror
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE           := libmmcamera_interface
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
new file mode 100644
index 0000000..4d7b511
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera.h
@@ -0,0 +1,581 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+#include <cam_semaphore.h>
+
+#include "mm_camera_interface.h"
+
+/**********************************************************************************
+* Data structure declare
+***********************************************************************************/
+/* num of callbacks allowed for an event type */
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+/* num of data callbacks allowed in a stream obj */
+#define MM_CAMERA_STREAM_BUF_CB_MAX 4
+/* num of data poll threads allowed in a channel obj */
+#define MM_CAMERA_CHANNEL_POLL_THREAD_MAX 1
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 20
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+struct mm_channel;
+struct mm_stream;
+struct mm_camera_obj;
+
+typedef enum
+{
+    MM_CAMERA_CMD_TYPE_DATA_CB,    /* dataB CMD */
+    MM_CAMERA_CMD_TYPE_EVT_CB,     /* evtCB CMD */
+    MM_CAMERA_CMD_TYPE_EXIT,       /* EXIT */
+    MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
+    MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB,    /* superbuf dataB CMD */
+    MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY, /* configure notify mode */
+    MM_CAMERA_CMD_TYPE_FLUSH_QUEUE, /* flush queue */
+    MM_CAMERA_CMD_TYPE_MAX
+} mm_camera_cmdcb_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint32_t frame_idx;
+    mm_camera_buf_def_t *buf; /* ref to buf */
+} mm_camera_buf_info_t;
+
+typedef struct {
+    uint32_t num_buf_requested;
+} mm_camera_req_buf_t;
+
+typedef struct {
+    mm_camera_cmdcb_type_t cmd_type;
+    union {
+        mm_camera_buf_info_t buf;    /* frame buf if dataCB */
+        mm_camera_event_t evt;       /* evt if evtCB */
+        mm_camera_super_buf_t superbuf; /* superbuf if superbuf dataCB*/
+        mm_camera_req_buf_t req_buf; /* num of buf requested */
+        uint32_t frame_idx; /* frame idx boundary for flush superbuf queue*/
+        mm_camera_super_buf_notify_mode_t notify_mode; /* notification mode */
+    } u;
+} mm_camera_cmdcb_t;
+
+typedef void (*mm_camera_cmd_cb_t)(mm_camera_cmdcb_t * cmd_cb, void* user_data);
+
+typedef struct {
+    cam_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;     /* semaphore for cmd thread */
+    mm_camera_cmd_cb_t cb;       /* cb for cmd */
+    void* user_data;             /* user_data for cb */
+} mm_camera_cmd_thread_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TYPE_EVT,
+    MM_CAMERA_POLL_TYPE_DATA,
+    MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+/* function ptr defined for poll notify CB,
+ * registered at poll thread with poll fd */
+typedef void (*mm_camera_poll_notify_t)(void *user_data);
+
+typedef struct {
+    int32_t fd;
+    mm_camera_poll_notify_t notify_cb;
+    uint32_t handler;
+    void* user_data;
+} mm_camera_poll_entry_t;
+
+typedef struct {
+    mm_camera_poll_thread_type_t poll_type;
+    /* array to store poll fd and cb info
+     * for MM_CAMERA_POLL_TYPE_EVT, only index 0 is valid;
+     * for MM_CAMERA_POLL_TYPE_DATA, depends on valid stream fd */
+    mm_camera_poll_entry_t poll_entries[MAX_STREAM_NUM_IN_BUNDLE];
+    int32_t pfds[2];
+    pthread_t pid;
+    int32_t state;
+    int timeoutms;
+    uint32_t cmd;
+    struct pollfd poll_fds[MAX_STREAM_NUM_IN_BUNDLE + 1];
+    uint8_t num_fds;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond_v;
+    int32_t status;
+    //void *my_obj;
+} mm_camera_poll_thread_t;
+
+/* mm_stream */
+typedef enum {
+    MM_STREAM_STATE_NOTUSED = 0,      /* not used */
+    MM_STREAM_STATE_INITED,           /* inited  */
+    MM_STREAM_STATE_ACQUIRED,         /* acquired, fd opened  */
+    MM_STREAM_STATE_CFG,              /* fmt & dim configured */
+    MM_STREAM_STATE_BUFFED,           /* buf allocated */
+    MM_STREAM_STATE_REG,              /* buf regged, stream off */
+    MM_STREAM_STATE_ACTIVE,           /* active */
+    MM_STREAM_STATE_MAX
+} mm_stream_state_type_t;
+
+typedef enum {
+    MM_STREAM_EVT_ACQUIRE,
+    MM_STREAM_EVT_RELEASE,
+    MM_STREAM_EVT_SET_FMT,
+    MM_STREAM_EVT_GET_BUF,
+    MM_STREAM_EVT_PUT_BUF,
+    MM_STREAM_EVT_REG_BUF,
+    MM_STREAM_EVT_UNREG_BUF,
+    MM_STREAM_EVT_START,
+    MM_STREAM_EVT_STOP,
+    MM_STREAM_EVT_QBUF,
+    MM_STREAM_EVT_SET_PARM,
+    MM_STREAM_EVT_GET_PARM,
+    MM_STREAM_EVT_DO_ACTION,
+    MM_STREAM_EVT_MAX
+} mm_stream_evt_type_t;
+
+typedef struct {
+    mm_camera_buf_notify_t cb;
+    void *user_data;
+    /* cb_count = -1: infinite
+     * cb_count > 0: register only for required times */
+    int8_t cb_count;
+} mm_stream_data_cb_t;
+
+typedef struct {
+    /* buf reference count */
+    uint8_t buf_refcnt;
+
+    /* This flag is to indicate if after allocation,
+     * the corresponding buf needs to qbuf into kernel
+     * (e.g. for preview usecase, display needs to hold two bufs,
+     * so no need to qbuf these two bufs initially) */
+    uint8_t initial_reg_flag;
+
+    /* indicate if buf is in kernel(1) or client(0) */
+    uint8_t in_kernel;
+} mm_stream_buf_status_t;
+
+typedef struct mm_stream {
+    uint32_t my_hdl; /* local stream id */
+    uint32_t server_stream_id; /* stream id from server */
+    int32_t fd;
+    mm_stream_state_type_t state;
+
+    /* stream info*/
+    cam_stream_info_t *stream_info;
+
+    /* padding info */
+    cam_padding_info_t padding_info;
+
+    /* offset */
+    cam_frame_len_offset_t frame_offset;
+
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* dataCB registered on this stream obj */
+    pthread_mutex_t cb_lock; /* cb lock to protect buf_cb */
+    mm_stream_data_cb_t buf_cb[MM_CAMERA_STREAM_BUF_CB_MAX];
+
+    /* stream buffer management */
+    pthread_mutex_t buf_lock;
+    uint8_t buf_num; /* num of buffers allocated */
+    mm_camera_buf_def_t* buf; /* ptr to buf array */
+    mm_stream_buf_status_t* buf_status; /* ptr to buf status array */
+
+    /* reference to parent channel_obj */
+    struct mm_channel* ch_obj;
+
+    uint8_t is_bundled; /* flag if stream is bundled */
+
+    mm_camera_stream_mem_vtbl_t mem_vtbl; /* mem ops tbl */
+
+    mm_camera_map_unmap_ops_tbl_t map_ops;
+
+    int8_t queued_buffer_count;
+} mm_stream_t;
+
+/* mm_channel */
+typedef enum {
+    MM_CHANNEL_STATE_NOTUSED = 0,   /* not used */
+    MM_CHANNEL_STATE_STOPPED,       /* stopped */
+    MM_CHANNEL_STATE_ACTIVE,        /* active, at least one stream active */
+    MM_CHANNEL_STATE_PAUSED,        /* paused */
+    MM_CHANNEL_STATE_MAX
+} mm_channel_state_type_t;
+
+typedef enum {
+    MM_CHANNEL_EVT_ADD_STREAM,
+    MM_CHANNEL_EVT_DEL_STREAM,
+    MM_CHANNEL_EVT_CONFIG_STREAM,
+    MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+    MM_CHANNEL_EVT_START,
+    MM_CHANNEL_EVT_STOP,
+    MM_CHANNEL_EVT_PAUSE,
+    MM_CHANNEL_EVT_RESUME,
+    MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+    MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+    MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+    MM_CHANNEL_EVT_MAP_STREAM_BUF,
+    MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+    MM_CHANNEL_EVT_SET_STREAM_PARM,
+    MM_CHANNEL_EVT_GET_STREAM_PARM,
+    MM_CHANNEL_EVT_DO_STREAM_ACTION,
+    MM_CHANNEL_EVT_DELETE,
+} mm_channel_evt_type_t;
+
+typedef struct {
+    uint32_t stream_id;
+    mm_camera_stream_config_t *config;
+} mm_evt_paylod_config_stream_t;
+
+typedef struct {
+    uint32_t stream_id;
+    cam_stream_parm_buffer_t *parms;
+} mm_evt_paylod_set_get_stream_parms_t;
+
+typedef struct {
+    uint32_t stream_id;
+    void *actions;
+} mm_evt_paylod_do_stream_action_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint8_t buf_type;
+    uint32_t buf_idx;
+    int32_t plane_idx;
+    int fd;
+    uint32_t size;
+} mm_evt_paylod_map_stream_buf_t;
+
+typedef struct {
+    uint32_t stream_id;
+    uint8_t buf_type;
+    uint32_t buf_idx;
+    int32_t plane_idx;
+} mm_evt_paylod_unmap_stream_buf_t;
+
+typedef struct {
+    uint8_t num_of_bufs;
+    mm_camera_buf_info_t super_buf[MAX_STREAM_NUM_IN_BUNDLE];
+    uint8_t matched;
+    uint32_t frame_idx;
+} mm_channel_queue_node_t;
+
+typedef struct {
+    cam_queue_t que;
+    uint8_t num_streams;
+    /* container for bundled stream handlers */
+    uint32_t bundled_streams[MAX_STREAM_NUM_IN_BUNDLE];
+    mm_camera_channel_attr_t attr;
+    uint32_t expected_frame_id;
+    uint32_t match_cnt;
+} mm_channel_queue_t;
+
+typedef struct {
+    uint8_t is_active; /* flag to indicate if bundle is valid */
+    /* queue to store bundled super buffers */
+    mm_channel_queue_t superbuf_queue;
+    mm_camera_buf_notify_t super_buf_notify_cb;
+    void *user_data;
+} mm_channel_bundle_t;
+
+typedef struct mm_channel {
+    uint32_t my_hdl;
+    mm_channel_state_type_t state;
+    pthread_mutex_t ch_lock; /* channel lock */
+
+    /* stream bundle info in the channel */
+    mm_channel_bundle_t bundle;
+
+    /* num of pending suferbuffers */
+    uint32_t pending_cnt;
+
+    /* cmd thread for superbuffer dataCB and async stop*/
+    mm_camera_cmd_thread_t cmd_thread;
+
+    /* cb thread for sending data cb */
+    mm_camera_cmd_thread_t cb_thread;
+
+    /* data poll thread
+    * currently one data poll thread per channel
+    * could extended to support one data poll thread per stream in the channel */
+    mm_camera_poll_thread_t poll_thread[MM_CAMERA_CHANNEL_POLL_THREAD_MAX];
+
+    /* container for all streams in channel */
+    mm_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+
+    /* reference to parent cam_obj */
+    struct mm_camera_obj* cam_obj;
+} mm_channel_t;
+
+/* struct to store information about pp cookie*/
+typedef struct {
+    uint32_t cam_hdl;
+    uint32_t ch_hdl;
+    uint32_t stream_hdl;
+    mm_channel_queue_node_t* super_buf;
+} mm_channel_pp_info_t;
+
+/* mm_camera */
+typedef struct {
+    mm_camera_event_notify_t evt_cb;
+    void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+    mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+    /* reg_count <=0: infinite
+     * reg_count > 0: register only for required times */
+    int reg_count;
+} mm_camera_evt_obj_t;
+
+typedef struct mm_camera_obj {
+    uint32_t my_hdl;
+    int ref_count;
+    int32_t ctrl_fd;
+    int32_t ds_fd; /* domain socket fd */
+    pthread_mutex_t cam_lock;
+    pthread_mutex_t cb_lock; /* lock for evt cb */
+    mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
+    mm_camera_evt_obj_t evt;
+    mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
+    mm_camera_cmd_thread_t evt_thread;       /* thread for evt CB */
+    mm_camera_vtbl_t vtbl;
+
+    pthread_mutex_t evt_lock;
+    pthread_cond_t evt_cond;
+    mm_camera_event_t evt_rcvd;
+
+    pthread_mutex_t msg_lock; /* lock for sending msg through socket */
+} mm_camera_obj_t;
+
+typedef struct {
+    int8_t num_cam;
+    char video_dev_name[MM_CAMERA_MAX_NUM_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+    mm_camera_obj_t *cam_obj[MM_CAMERA_MAX_NUM_SENSORS];
+} mm_camera_ctrl_t;
+
+typedef enum {
+    mm_camera_async_call,
+    mm_camera_sync_call
+} mm_camera_call_type_t;
+
+/**********************************************************************************
+* external function declare
+***********************************************************************************/
+/* utility functions */
+/* set int32_t value */
+extern int32_t mm_camera_util_s_ctrl(int32_t fd,
+                                     uint32_t id,
+                                     int32_t *value);
+
+/* get int32_t value */
+extern int32_t mm_camera_util_g_ctrl(int32_t fd,
+                                     uint32_t id,
+                                     int32_t *value);
+
+/* send msg throught domain socket for fd mapping */
+extern int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                                      void *msg,
+                                      uint32_t buf_size,
+                                      int sendfd);
+/* Check if hardware target is A family */
+uint8_t mm_camera_util_chip_is_a_family(void);
+
+/* mm-camera */
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                               mm_camera_event_notify_t evt_cb,
+                                               void * user_data);
+extern int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id,
+                              mm_camera_buf_def_t *buf);
+extern int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+                                   parm_buffer_t *parms);
+extern int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+                                   parm_buffer_t *parms);
+extern int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+                                 uint8_t buf_type,
+                                 int fd,
+                                 uint32_t size);
+extern int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                                   uint8_t buf_type);
+extern int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                          int32_t do_af_flag);
+extern int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj);
+extern uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                                      mm_camera_channel_attr_t *attr,
+                                      mm_camera_buf_notify_t channel_cb,
+                                      void *userdata);
+extern int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                         uint32_t ch_id,
+                                         cam_bundle_config_t *bundle_info);
+extern uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                                     uint32_t ch_id);
+extern int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                                    uint32_t ch_id,
+                                    uint32_t stream_id);
+extern int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id,
+                                       uint32_t stream_id,
+                                       mm_camera_stream_config_t *config);
+extern int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+                                       uint32_t ch_id);
+extern int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                                      uint32_t ch_id);
+extern int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+                                           uint32_t ch_id,
+                                           uint32_t num_buf_requested);
+extern int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj,
+                                                  uint32_t ch_id);
+extern int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               uint32_t frame_idx);
+extern int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                               uint32_t ch_id,
+                                               mm_camera_super_buf_notify_mode_t notify_mode);
+extern int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t s_id,
+                                          cam_stream_parm_buffer_t *parms);
+extern int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                        mm_camera_event_notify_t evt_cb,
+                                                        void * user_data);
+extern int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        uint32_t stream_id,
+                                        uint8_t buf_type,
+                                        uint32_t buf_idx,
+                                        int32_t plane_idx,
+                                        int fd,
+                                        uint32_t size);
+extern int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          uint8_t buf_type,
+                                          uint32_t buf_idx,
+                                          int32_t plane_idx);
+extern int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                          uint32_t ch_id,
+                                          uint32_t stream_id,
+                                          void *actions);
+
+/* mm_channel */
+extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                                 mm_channel_evt_type_t evt,
+                                 void * in_val,
+                                 void * out_val);
+extern int32_t mm_channel_init(mm_channel_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata);
+/* qbuf is a special case that not going through state machine.
+ * This is to avoid deadlock when trying to aquire ch_lock,
+ * from the context of dataCB, but async stop is holding ch_lock */
+extern int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                               mm_camera_buf_def_t *buf);
+
+/* mm_stream */
+extern int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                                mm_stream_evt_type_t evt,
+                                void * in_val,
+                                void * out_val);
+/* Allow other stream to register dataCB at certain stream.
+ * This is for use case of video sized live snapshot,
+ * because snapshot stream need register one time CB at video stream.
+ * ext_image_mode and sensor_idx are used to identify the destinate stream
+ * to be register with dataCB. */
+extern int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+                                    mm_stream_data_cb_t *val);
+extern int32_t mm_stream_map_buf(mm_stream_t *my_obj,
+                                 uint8_t buf_type,
+                                 uint32_t frame_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 uint32_t size);
+extern int32_t mm_stream_unmap_buf(mm_stream_t *my_obj,
+                                   uint8_t buf_type,
+                                   uint32_t frame_idx,
+                                   int32_t plane_idx);
+
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_camera_util_generate_handler(uint8_t index);
+const char * mm_camera_util_get_dev_name(uint32_t cam_handler);
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler);
+
+/* poll/cmd thread functions */
+extern int32_t mm_camera_poll_thread_launch(
+                                mm_camera_poll_thread_t * poll_cb,
+                                mm_camera_poll_thread_type_t poll_type);
+extern int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb);
+extern int32_t mm_camera_poll_thread_add_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                int32_t fd,
+                                mm_camera_poll_notify_t nofity_cb,
+                                void *userdata,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_del_poll_fd(
+                                mm_camera_poll_thread_t * poll_cb,
+                                uint32_t handler,
+                                mm_camera_call_type_t);
+extern int32_t mm_camera_poll_thread_commit_updates(
+        mm_camera_poll_thread_t * poll_cb);
+extern int32_t mm_camera_cmd_thread_launch(
+                                mm_camera_cmd_thread_t * cmd_thread,
+                                mm_camera_cmd_cb_t cb,
+                                void* user_data);
+extern int32_t mm_camera_cmd_thread_name(const char* name);
+extern int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread);
+
+#endif /* __MM_CAMERA_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
new file mode 100755
index 0000000..4f3c94f
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -0,0 +1,71 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+//#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-intf"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+  #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-intf"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...)  ALOGD(fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
new file mode 100755
index 0000000..8889e3c
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/inc/mm_camera_sock.h
@@ -0,0 +1,57 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+#include <inttypes.h>
+
+typedef enum {
+    MM_CAMERA_SOCK_TYPE_UDP,
+    MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int sendfd);
+
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..5417f25
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,1654 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+    (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+    ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event);
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_channel_by_handler
+ *
+ * DESCRIPTION: utility function to get a channel object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a camera object
+ *   @handler: channel handle
+ *
+ * RETURN     : ptr to a channel object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+                                    mm_camera_obj_t * cam_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_channel_t *ch_obj = NULL;
+    for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+        if (handler == cam_obj->ch[i].my_hdl) {
+            ch_obj = &cam_obj->ch[i];
+            break;
+        }
+    }
+    return ch_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_chip_is_a_family
+ *
+ * DESCRIPTION: utility function to check if the host is A family chip
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : TRUE if A family.
+ *              FALSE otherwise.
+ *==========================================================================*/
+uint8_t mm_camera_util_chip_is_a_family(void)
+{
+    int id = 0;
+    FILE *fp;
+    if ((fp = fopen("/sys/devices/system/soc/soc0/id", "r")) != NULL) {
+        fscanf(fp, "%d", &id);
+        fclose(fp);
+    }
+    if (id == 126)
+        return FALSE;
+    else
+        return TRUE;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_dispatch_app_event
+ *
+ * DESCRIPTION: dispatch event to apps who regitster for event notify
+ *
+ * PARAMETERS :
+ *   @cmd_cb: ptr to a struct storing event info
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+                                         void* user_data)
+{
+    mm_camera_cmd_thread_name("mm_cam_event");
+    int i;
+    mm_camera_event_t *event = &cmd_cb->u.evt;
+    mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+    if (NULL != my_obj) {
+        pthread_mutex_lock(&my_obj->cb_lock);
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(my_obj->evt.evt[i].evt_cb) {
+                my_obj->evt.evt[i].evt_cb(
+                    my_obj->my_hdl,
+                    event,
+                    my_obj->evt.evt[i].user_data);
+            }
+        }
+        pthread_mutex_unlock(&my_obj->cb_lock);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_event_notify
+ *
+ * DESCRIPTION: callback to handle event notify from kernel. This call will
+ *              dequeue event from kernel.
+ *
+ * PARAMETERS :
+ *   @user_data: user data ptr (camera object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_event_notify(void* user_data)
+{
+    struct v4l2_event ev;
+    struct msm_v4l2_event_data *msm_evt = NULL;
+    int rc;
+    mm_camera_event_t evt;
+    memset(&evt, 0, sizeof(mm_camera_event_t));
+
+    mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+    if (NULL != my_obj) {
+        /* read evt */
+        memset(&ev, 0, sizeof(ev));
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+
+        if (rc >= 0 && ev.id == MSM_CAMERA_MSM_NOTIFY) {
+            msm_evt = (struct msm_v4l2_event_data *)ev.u.data;
+            switch (msm_evt->command) {
+            case CAM_EVENT_TYPE_MAP_UNMAP_DONE:
+                pthread_mutex_lock(&my_obj->evt_lock);
+                my_obj->evt_rcvd.server_event_type = msm_evt->command;
+                my_obj->evt_rcvd.status = msm_evt->status;
+                pthread_cond_signal(&my_obj->evt_cond);
+                pthread_mutex_unlock(&my_obj->evt_lock);
+                break;
+            case MSM_CAMERA_PRIV_SHUTDOWN:
+                {
+                    evt.server_event_type = CAM_EVENT_TYPE_DAEMON_DIED;
+                    mm_camera_enqueue_evt(my_obj, &evt);
+                }
+                break;
+            default:
+                break;
+            }
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_enqueue_evt
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+                              mm_camera_event_t *event)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t *node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+        node->u.evt = *event;
+
+        /* enqueue to evt cmd thread */
+        cam_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+        /* wake up evt cmd thread */
+        cam_sem_post(&(my_obj->evt_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_open
+ *
+ * DESCRIPTION: open a camera
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+    int32_t rc = 0;
+    int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+    uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+    unsigned int cam_idx = 0;
+
+    CDBG("%s:  begin\n", __func__);
+
+    snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+             mm_camera_util_get_dev_name(my_obj->my_hdl));
+    sscanf(dev_name, "/dev/video%u", &cam_idx);
+    CDBG_HIGH("%s: dev name = %s, cam_idx = %d", __func__, dev_name, cam_idx);
+
+    do{
+        n_try--;
+        my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        CDBG("%s:  ctrl_fd = %d, errno == %d", __func__, my_obj->ctrl_fd, errno);
+        if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
+            CDBG_HIGH("%s:  opened, break out while loop", __func__);
+            break;
+        }
+        CDBG_HIGH("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__, sleep_msec);
+        usleep(sleep_msec * 1000);
+    }while (n_try > 0);
+
+    if (my_obj->ctrl_fd <= 0) {
+        CDBG_ERROR("%s: cannot open control fd of '%s' (%s)\n",
+                 __func__, dev_name, strerror(errno));
+        rc = -1;
+        goto on_error;
+    }
+
+    /* open domain socket*/
+    n_try = MM_CAMERA_DEV_OPEN_TRIES;
+    do {
+        n_try--;
+        my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+        CDBG("%s:  ds_fd = %d, errno = %d", __func__, my_obj->ds_fd, errno);
+        if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
+            CDBG("%s:  opened, break out while loop", __func__);
+            break;
+        }
+        CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+             __func__, sleep_msec);
+        usleep(sleep_msec * 1000);
+    } while (n_try > 0);
+
+    if (my_obj->ds_fd <= 0) {
+        CDBG_ERROR("%s: cannot open domain socket fd of '%s'(%s)\n",
+                 __func__, dev_name, strerror(errno));
+        rc = -1;
+        goto on_error;
+    }
+    pthread_mutex_init(&my_obj->msg_lock, NULL);
+
+    pthread_mutex_init(&my_obj->cb_lock, NULL);
+    pthread_mutex_init(&my_obj->evt_lock, NULL);
+    pthread_cond_init(&my_obj->evt_cond, NULL);
+
+    CDBG("%s : Launch evt Thread in Cam Open",__func__);
+    mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+                                mm_camera_dispatch_app_event,
+                                (void *)my_obj);
+
+    /* launch event poll thread
+     * we will add evt fd into event poll thread upon user first register for evt */
+    CDBG("%s : Launch evt Poll Thread in Cam Open", __func__);
+    mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+                                 MM_CAMERA_POLL_TYPE_EVT);
+    mm_camera_evt_sub(my_obj, TRUE);
+
+    CDBG("%s:  end (rc = %d)\n", __func__, rc);
+    /* we do not need to unlock cam_lock here before return
+     * because for open, it's done within intf_lock */
+    return rc;
+
+on_error:
+    if (my_obj->ctrl_fd > 0) {
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = 0;
+    }
+    if (my_obj->ds_fd > 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+       my_obj->ds_fd = 0;
+    }
+
+    /* we do not need to unlock cam_lock here before return
+     * because for open, it's done within intf_lock */
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_close
+ *
+ * DESCRIPTION: enqueue received event into event queue to be processed by
+ *              event thread.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @event    : event to be queued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+    CDBG("%s : unsubscribe evt", __func__);
+    mm_camera_evt_sub(my_obj, FALSE);
+
+    CDBG("%s : Close evt Poll Thread in Cam Close",__func__);
+    mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+    CDBG("%s : Close evt cmd Thread in Cam Close",__func__);
+    mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+    if(my_obj->ctrl_fd > 0) {
+        close(my_obj->ctrl_fd);
+        my_obj->ctrl_fd = 0;
+    }
+    if(my_obj->ds_fd > 0) {
+        mm_camera_socket_close(my_obj->ds_fd);
+        my_obj->ds_fd = 0;
+    }
+    pthread_mutex_destroy(&my_obj->msg_lock);
+
+    pthread_mutex_destroy(&my_obj->cb_lock);
+    pthread_mutex_destroy(&my_obj->evt_lock);
+    pthread_cond_destroy(&my_obj->evt_cond);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify_internal
+ *
+ * DESCRIPTION: internal implementation for registering callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify_internal(mm_camera_obj_t *my_obj,
+                                                 mm_camera_event_notify_t evt_cb,
+                                                 void * user_data)
+{
+    int i;
+    int rc = -1;
+    mm_camera_evt_obj_t *evt_array = NULL;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    evt_array = &my_obj->evt;
+    if(evt_cb) {
+        /* this is reg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == NULL) {
+                evt_array->evt[i].evt_cb = evt_cb;
+                evt_array->evt[i].user_data = user_data;
+                evt_array->reg_count++;
+                rc = 0;
+                break;
+            }
+        }
+    } else {
+        /* this is unreg case */
+        for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+            if(evt_array->evt[i].user_data == user_data) {
+                evt_array->evt[i].evt_cb = NULL;
+                evt_array->evt[i].user_data = NULL;
+                evt_array->reg_count--;
+                rc = 0;
+                break;
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&my_obj->cb_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_register_event_notify
+ *
+ * DESCRIPTION: registering a callback for event notify.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a camera object
+ *   @evt_cb   : callback to be registered to handle event notify
+ *   @user_data: user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+                                        mm_camera_event_notify_t evt_cb,
+                                        void * user_data)
+{
+    int rc = -1;
+    rc = mm_camera_register_event_notify_internal(my_obj,
+                                                  evt_cb,
+                                                  user_data);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+                       uint32_t ch_id,
+                       mm_camera_buf_def_t *buf)
+{
+    int rc = -1;
+    mm_channel_t * ch_obj = NULL;
+    ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    /* we always assume qbuf will be done before channel/stream is fully stopped
+     * because qbuf is done within dataCB context
+     * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+    if (NULL != ch_obj) {
+        rc = mm_channel_qbuf(ch_obj, buf);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @my_obj: camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_query_capability(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_capability cap;
+
+    /* get camera capabilities */
+    memset(&cap, 0, sizeof(cap));
+    rc = ioctl(my_obj->ctrl_fd, VIDIOC_QUERYCAP, &cap);
+    if (rc != 0) {
+        CDBG_ERROR("%s: cannot get camera capabilities, rc = %d\n", __func__, rc);
+    }
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_parms(mm_camera_obj_t *my_obj,
+                            parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (parms !=  NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_parms(mm_camera_obj_t *my_obj,
+                            parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (parms != NULL) {
+        rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd, CAM_PRIV_PARM, &value);
+    }
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+int32_t mm_camera_do_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_DO_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_auto_focus(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_CANCEL_AUTO_FOCUS, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+                                   int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    int32_t value = do_af_flag;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, CAM_PRIV_PREPARE_SNAPSHOT, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+             CAM_PRIV_START_ZSL_SNAPSHOT, &value);
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl capture
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_zsl_snapshot(mm_camera_obj_t *my_obj)
+{
+    int32_t rc = -1;
+    int32_t value;
+    rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+             CAM_PRIV_STOP_ZSL_SNAPSHOT, &value);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj,
+                               mm_camera_channel_attr_t *attr,
+                               mm_camera_buf_notify_t channel_cb,
+                               void *userdata)
+{
+    mm_channel_t *ch_obj = NULL;
+    uint8_t ch_idx = 0;
+    uint32_t ch_hdl = 0;
+
+    for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+        if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+            ch_obj = &my_obj->ch[ch_idx];
+            break;
+        }
+    }
+
+    if (NULL != ch_obj) {
+        /* initialize channel obj */
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+        ch_hdl = mm_camera_util_generate_handler(ch_idx);
+        ch_obj->my_hdl = ch_hdl;
+        ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+        ch_obj->cam_obj = my_obj;
+        pthread_mutex_init(&ch_obj->ch_lock, NULL);
+        mm_channel_init(ch_obj, attr, channel_cb, userdata);
+    }
+
+    pthread_mutex_unlock(&my_obj->cam_lock);
+
+    return ch_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_channel(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DELETE,
+                               NULL,
+                               NULL);
+
+        pthread_mutex_destroy(&ch_obj->ch_lock);
+        memset(ch_obj, 0, sizeof(mm_channel_t));
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_get_bundle_info(mm_camera_obj_t *my_obj,
+                                  uint32_t ch_id,
+                                  cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_BUNDLE_INFO,
+                               (void *)bundle_info,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+                              uint32_t ch_id)
+{
+    uint32_t s_hdl = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        mm_channel_fsm_fn(ch_obj,
+                          MM_CHANNEL_EVT_ADD_STREAM,
+                          NULL,
+                          (void*)&s_hdl);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+                             uint32_t ch_id,
+                             uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DEL_STREAM,
+                               (void*)stream_id,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+                                uint32_t ch_id,
+                                uint32_t stream_id,
+                                mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+    mm_evt_paylod_config_stream_t payload;
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+        payload.stream_id = stream_id;
+        payload.config = config;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_STREAM,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_start_channel(mm_camera_obj_t *my_obj,
+                                uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_START,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_stop_channel(mm_camera_obj_t *my_obj,
+                               uint32_t ch_id)
+{
+    int32_t rc = 0;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_STOP,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+                                    uint32_t ch_id,
+                                    uint32_t num_buf_requested)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+                               (void*)num_buf_requested,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+                               NULL,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_flush_super_buf_queue(mm_camera_obj_t *my_obj, uint32_t ch_id,
+                                                             uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE,
+                               (void *)frame_idx,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_config_channel_notify
+ *
+ * DESCRIPTION: configures the channel notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_config_channel_notify(mm_camera_obj_t *my_obj,
+                                        uint32_t ch_id,
+                                        mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE,
+                               (void *)notify_mode,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_set_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_SET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_camera_get_stream_parms(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t s_id,
+                                   cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_set_get_stream_parms_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = s_id;
+        payload.parms = parms;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_GET_STREAM_PARM,
+                               (void *)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_camera_do_stream_action(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   void *actions)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_do_stream_action_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.actions = actions;
+
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_DO_STREAM_ACTION,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_stream_buf(mm_camera_obj_t *my_obj,
+                                 uint32_t ch_id,
+                                 uint32_t stream_id,
+                                 uint8_t buf_type,
+                                 uint32_t buf_idx,
+                                 int32_t plane_idx,
+                                 int fd,
+                                 uint32_t size)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_map_stream_buf_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.buf_type = buf_type;
+        payload.buf_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        payload.fd = fd;
+        payload.size = size;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_MAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_stream_buf(mm_camera_obj_t *my_obj,
+                                   uint32_t ch_id,
+                                   uint32_t stream_id,
+                                   uint8_t buf_type,
+                                   uint32_t buf_idx,
+                                   int32_t plane_idx)
+{
+    int32_t rc = -1;
+    mm_evt_paylod_unmap_stream_buf_t payload;
+    mm_channel_t * ch_obj =
+        mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+    if (NULL != ch_obj) {
+        pthread_mutex_lock(&ch_obj->ch_lock);
+        pthread_mutex_unlock(&my_obj->cam_lock);
+
+        memset(&payload, 0, sizeof(payload));
+        payload.stream_id = stream_id;
+        payload.buf_type = buf_type;
+        payload.buf_idx = buf_idx;
+        payload.plane_idx = plane_idx;
+        rc = mm_channel_fsm_fn(ch_obj,
+                               MM_CHANNEL_EVT_UNMAP_STREAM_BUF,
+                               (void*)&payload,
+                               NULL);
+    } else {
+        pthread_mutex_unlock(&my_obj->cam_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_evt_sub
+ *
+ * DESCRIPTION: subscribe/unsubscribe event notify from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @reg_flag     : 1 -- subscribe ; 0 -- unsubscribe
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+                          uint8_t reg_flag)
+{
+    int32_t rc = 0;
+    struct v4l2_event_subscription sub;
+
+    memset(&sub, 0, sizeof(sub));
+    sub.type = MSM_CAMERA_V4L2_EVENT_TYPE;
+    sub.id = MSM_CAMERA_MSM_NOTIFY;
+    if(FALSE == reg_flag) {
+        /* unsubscribe */
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            CDBG_ERROR("%s: unsubscribe event rc = %d", __func__, rc);
+            return rc;
+        }
+        /* remove evt fd from the polling thraed when unreg the last event */
+        rc = mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               mm_camera_sync_call);
+    } else {
+        rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+        if (rc < 0) {
+            CDBG_ERROR("%s: subscribe event rc = %d", __func__, rc);
+            return rc;
+        }
+        /* add evt fd to polling thread when subscribe the first event */
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+                                               my_obj->my_hdl,
+                                               my_obj->ctrl_fd,
+                                               mm_camera_event_notify,
+                                               (void*)my_obj,
+                                               mm_camera_sync_call);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_wait_for_event
+ *
+ * DESCRIPTION: utility function to wait for certain events
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @evt_mask     : mask for events to be waited. Any of event in the mask would
+ *                   trigger the wait to end
+ *   @status       : status of the event
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_util_wait_for_event(mm_camera_obj_t *my_obj,
+                                   uint32_t evt_mask,
+                                   int32_t *status)
+{
+    pthread_mutex_lock(&my_obj->evt_lock);
+    while (!(my_obj->evt_rcvd.server_event_type & evt_mask)) {
+        pthread_cond_wait(&my_obj->evt_cond, &my_obj->evt_lock);
+    }
+    *status = my_obj->evt_rcvd.status;
+    /* reset local storage for recieved event for next event */
+    memset(&my_obj->evt_rcvd, 0, sizeof(mm_camera_event_t));
+    pthread_mutex_unlock(&my_obj->evt_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_sendmsg
+ *
+ * DESCRIPTION: utility function to send msg via domain socket
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @msg          : message to be sent
+ *   @buf_size     : size of the message to be sent
+ *   @sendfd       : >0 if any file descriptor need to be passed across process
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+                               void *msg,
+                               uint32_t buf_size,
+                               int sendfd)
+{
+    int32_t rc = -1;
+    int32_t status;
+
+    /* need to lock msg_lock, since sendmsg until reposonse back is deemed as one operation*/
+    pthread_mutex_lock(&my_obj->msg_lock);
+    if(mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd) > 0) {
+        /* wait for event that mapping/unmapping is done */
+        mm_camera_util_wait_for_event(my_obj, CAM_EVENT_TYPE_MAP_UNMAP_DONE, &status);
+        if (MSM_CAMERA_STATUS_SUCCESS == status) {
+            rc = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->msg_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+                          uint8_t buf_type,
+                          int fd,
+                          uint32_t size)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                fd);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : camera object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+                            uint8_t buf_type)
+{
+    int32_t rc = 0;
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+    rc = mm_camera_util_sendmsg(my_obj,
+                                &packet,
+                                sizeof(cam_sock_packet_t),
+                                0);
+    pthread_mutex_unlock(&my_obj->cam_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_s_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for s_ctrl
+ *
+ * PARAMETERS :
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_s_ctrl(int32_t fd,  uint32_t id, int32_t *value)
+{
+    int rc = 0;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+    rc = ioctl(fd, VIDIOC_S_CTRL, &control);
+
+    CDBG("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %d\n",
+         __func__, fd, id, (uint32_t)value, rc);
+    if (value != NULL) {
+        *value = control.value;
+    }
+    return (rc >= 0)? 0 : -1;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_g_ctrl
+ *
+ * DESCRIPTION: utility function to send v4l2 ioctl for g_ctrl
+ *
+ * PARAMETERS :
+ *   @fd      : file descritpor for sending ioctl
+ *   @id      : control id
+ *   @value   : value of the ioctl to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+    int rc = 0;
+    struct v4l2_control control;
+
+    memset(&control, 0, sizeof(control));
+    control.id = id;
+    if (value != NULL) {
+        control.value = *value;
+    }
+    rc = ioctl(fd, VIDIOC_G_CTRL, &control);
+    CDBG("%s: fd=%d, G_CTRL, id=0x%x, rc = %d\n", __func__, fd, id, rc);
+    if (value != NULL) {
+        *value = control.value;
+    }
+    return (rc >= 0)? 0 : -1;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..9a6b3f5
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,1927 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(mm_camera_obj_t * cam_obj,
+                                                            uint32_t handler);
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                 uint32_t stream_id,
+                                 mm_camera_stream_config_t *config);
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info);
+int32_t mm_channel_start(mm_channel_t *my_obj);
+int32_t mm_channel_stop(mm_channel_t *my_obj);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj,
+                                     uint32_t num_buf_requested);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj,
+                                         uint32_t frame_idx);
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode);
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj, mm_channel_queue_t * queue);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload);
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                    mm_evt_paylod_do_stream_action_t *payload);
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  mm_evt_paylod_map_stream_buf_t *payload);
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    mm_evt_paylod_unmap_stream_buf_t *payload);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+                                             mm_channel_queue_t * queue,
+                                             mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t *my_obj,
+                                             mm_channel_queue_t *queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t *my_obj,
+                                 mm_channel_queue_t *queue);
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_get_stream_by_handler
+ *
+ * DESCRIPTION: utility function to get a stream object from its handle
+ *
+ * PARAMETERS :
+ *   @cam_obj: ptr to a channel object
+ *   @handler: stream handle
+ *
+ * RETURN     : ptr to a stream object.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+                                    mm_channel_t * ch_obj,
+                                    uint32_t handler)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if ((MM_STREAM_STATE_NOTUSED != ch_obj->streams[i].state) &&
+            (handler == ch_obj->streams[i].my_hdl)) {
+            s_obj = &ch_obj->streams[i];
+            break;
+        }
+    }
+    return s_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_dispatch_super_buf
+ *
+ * DESCRIPTION: dispatch super buffer of bundle to registered user
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_dispatch_super_buf(mm_camera_cmdcb_t *cmd_cb,
+                                          void* user_data)
+{
+    mm_camera_cmd_thread_name("mm_cam_cb");
+    mm_channel_t * my_obj = (mm_channel_t *)user_data;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    if (MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB != cmd_cb->cmd_type) {
+        CDBG_ERROR("%s: Wrong cmd_type (%d) for super buf dataCB",
+                   __func__, cmd_cb->cmd_type);
+        return;
+    }
+
+    if (my_obj->bundle.super_buf_notify_cb) {
+        my_obj->bundle.super_buf_notify_cb(&cmd_cb->u.superbuf, my_obj->bundle.user_data);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_process_stream_buf
+ *
+ * DESCRIPTION: handle incoming buffer from stream in a bundle. In this function,
+ *              matching logic will be performed on incoming stream frames.
+ *              Will depends on the bundle attribute, either storing matched frames
+ *              in the superbuf queue, or sending matched superbuf frames to upper
+ *              layer through registered callback.
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing matched super buf information
+ *   @userdata: user data ptr
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+                                          void *user_data)
+{
+    mm_camera_cmd_thread_name("mm_cam_cmd");
+    mm_camera_super_buf_notify_mode_t notify_mode;
+    mm_channel_queue_node_t *node = NULL;
+    mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+    if (NULL == ch_obj) {
+        return;
+    }
+
+    if (MM_CAMERA_CMD_TYPE_DATA_CB  == cmd_cb->cmd_type) {
+        /* comp_and_enqueue */
+        mm_channel_superbuf_comp_and_enqueue(
+                        ch_obj,
+                        &ch_obj->bundle.superbuf_queue,
+                        &cmd_cb->u.buf);
+    } else if (MM_CAMERA_CMD_TYPE_REQ_DATA_CB  == cmd_cb->cmd_type) {
+        /* skip frames if needed */
+        ch_obj->pending_cnt = cmd_cb->u.req_buf.num_buf_requested;
+        mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+    } else if (MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY == cmd_cb->cmd_type) {
+           ch_obj->bundle.superbuf_queue.attr.notify_mode = cmd_cb->u.notify_mode;
+    } else if (MM_CAMERA_CMD_TYPE_FLUSH_QUEUE  == cmd_cb->cmd_type) {
+        ch_obj->bundle.superbuf_queue.expected_frame_id = cmd_cb->u.frame_idx;
+        mm_channel_superbuf_flush(ch_obj, &ch_obj->bundle.superbuf_queue);
+        return;
+    }
+    notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+    /* bufdone for overflowed bufs */
+    mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+    /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+    while ( (ch_obj->pending_cnt > 0) ||
+            (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode) ) {
+
+        /* dequeue */
+        node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue);
+        if (NULL != node) {
+            /* decrease pending_cnt */
+            CDBG("%s: Super Buffer received, Call client callback, pending_cnt=%d",
+                 __func__, ch_obj->pending_cnt);
+            if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+                ch_obj->pending_cnt--;
+            }
+
+            /* dispatch superbuf */
+            if (NULL != ch_obj->bundle.super_buf_notify_cb) {
+                uint8_t i;
+                mm_camera_cmdcb_t* cb_node = NULL;
+
+                CDBG("%s: Send superbuf to HAL, pending_cnt=%d",
+                     __func__, ch_obj->pending_cnt);
+
+                /* send cam_sem_post to wake up cb thread to dispatch super buffer */
+                cb_node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+                if (NULL != cb_node) {
+                    memset(cb_node, 0, sizeof(mm_camera_cmdcb_t));
+                    cb_node->cmd_type = MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB;
+                    cb_node->u.superbuf.num_bufs = node->num_of_bufs;
+                    for (i=0; i<node->num_of_bufs; i++) {
+                        cb_node->u.superbuf.bufs[i] = node->super_buf[i].buf;
+                    }
+                    cb_node->u.superbuf.camera_handle = ch_obj->cam_obj->my_hdl;
+                    cb_node->u.superbuf.ch_id = ch_obj->my_hdl;
+
+                    /* enqueue to cb thread */
+                    cam_queue_enq(&(ch_obj->cb_thread.cmd_queue), cb_node);
+
+                    /* wake up cb thread */
+                    cam_sem_post(&(ch_obj->cb_thread.cmd_sem));
+                } else {
+                    CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+                    /* buf done with the nonuse super buf */
+                    for (i=0; i<node->num_of_bufs; i++) {
+                        mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                    }
+                }
+            } else {
+                /* buf done with the nonuse super buf */
+                uint8_t i;
+                for (i=0; i<node->num_of_bufs; i++) {
+                    mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+                }
+            }
+            free(node);
+        } else {
+            /* no superbuf avail, break the loop */
+            break;
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn
+ *
+ * DESCRIPTION: channel finite state machine entry function. Depends on channel
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = -1;
+
+    CDBG("%s : E state = %d", __func__, my_obj->state);
+    switch (my_obj->state) {
+    case MM_CHANNEL_STATE_NOTUSED:
+        rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_STOPPED:
+        rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_ACTIVE:
+        rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+        break;
+    case MM_CHANNEL_STATE_PAUSED:
+        rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+        break;
+    }
+
+    /* unlock ch_lock */
+    pthread_mutex_unlock(&my_obj->ch_lock);
+    CDBG("%s : X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_notused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in NOT_USED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = -1;
+
+    switch (evt) {
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_stopped
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in STOPPED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+                                  mm_channel_evt_type_t evt,
+                                  void * in_val,
+                                  void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s : E evt = %d", __func__, evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_ADD_STREAM:
+        {
+            uint32_t s_hdl = 0;
+            s_hdl = mm_channel_add_stream(my_obj);
+            *((uint32_t*)out_val) = s_hdl;
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_DEL_STREAM:
+        {
+            uint32_t s_id = (uint32_t)in_val;
+            rc = mm_channel_del_stream(my_obj, s_id);
+        }
+        break;
+    case MM_CHANNEL_EVT_START:
+        {
+            rc = mm_channel_start(my_obj);
+            /* first stream started in stopped state
+             * move to active state */
+            if (0 == rc) {
+                my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_STREAM:
+        {
+            mm_evt_paylod_config_stream_t *payload =
+                (mm_evt_paylod_config_stream_t *)in_val;
+            rc = mm_channel_config_stream(my_obj,
+                                          payload->stream_id,
+                                          payload->config);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_BUNDLE_INFO:
+        {
+            cam_bundle_config_t *payload =
+                (cam_bundle_config_t *)in_val;
+            rc = mm_channel_get_bundle_info(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DELETE:
+        {
+            mm_channel_release(my_obj);
+            rc = 0;
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            mm_evt_paylod_map_stream_buf_t *payload =
+                (mm_evt_paylod_map_stream_buf_t *)in_val;
+            rc = mm_channel_map_stream_buf(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            mm_evt_paylod_unmap_stream_buf_t *payload =
+                (mm_evt_paylod_unmap_stream_buf_t *)in_val;
+            rc = mm_channel_unmap_stream_buf(my_obj, payload);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
+                   __func__, my_obj->state, evt);
+        break;
+    }
+    CDBG("%s : E rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_active
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in ACTIVE state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    CDBG("%s : E evt = %d", __func__, evt);
+    switch (evt) {
+    case MM_CHANNEL_EVT_STOP:
+        {
+            rc = mm_channel_stop(my_obj);
+            my_obj->state = MM_CHANNEL_STATE_STOPPED;
+        }
+        break;
+    case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+        {
+            uint32_t num_buf_requested = (uint32_t)in_val;
+            rc = mm_channel_request_super_buf(my_obj, num_buf_requested);
+        }
+        break;
+    case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+        {
+            rc = mm_channel_cancel_super_buf_request(my_obj);
+        }
+        break;
+    case MM_CHANNEL_EVT_FLUSH_SUPER_BUF_QUEUE:
+        {
+            uint32_t frame_idx = (uint32_t)in_val;
+            rc = mm_channel_flush_super_buf_queue(my_obj, frame_idx);
+        }
+        break;
+    case MM_CHANNEL_EVT_CONFIG_NOTIFY_MODE:
+        {
+            mm_camera_super_buf_notify_mode_t notify_mode = ( mm_camera_super_buf_notify_mode_t ) in_val;
+            rc = mm_channel_config_notify_mode(my_obj, notify_mode);
+        }
+        break;
+    case MM_CHANNEL_EVT_SET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_set_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_GET_STREAM_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_channel_get_stream_parm(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_DO_STREAM_ACTION:
+        {
+            mm_evt_paylod_do_stream_action_t *payload =
+                (mm_evt_paylod_do_stream_action_t *)in_val;
+            rc = mm_channel_do_stream_action(my_obj, payload);
+        }
+        break;
+    case MM_CHANNEL_EVT_MAP_STREAM_BUF:
+        {
+            mm_evt_paylod_map_stream_buf_t *payload =
+                (mm_evt_paylod_map_stream_buf_t *)in_val;
+            if (payload != NULL &&
+                payload->buf_type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) {
+                rc = mm_channel_map_stream_buf(my_obj, payload);
+            } else {
+                CDBG_ERROR("%s: cannot map regualr stream buf in active state", __func__);
+            }
+        }
+        break;
+    case MM_CHANNEL_EVT_UNMAP_STREAM_BUF:
+        {
+            mm_evt_paylod_unmap_stream_buf_t *payload =
+                (mm_evt_paylod_unmap_stream_buf_t *)in_val;
+            if (payload != NULL &&
+                payload->buf_type == CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF) {
+                rc = mm_channel_unmap_stream_buf(my_obj, payload);
+            } else {
+                CDBG_ERROR("%s: cannot unmap regualr stream buf in active state", __func__);
+            }
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    CDBG("%s : X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_fsm_fn_paused
+ *
+ * DESCRIPTION: channel finite state machine function to handle event
+ *              in PAUSED state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a channel object
+ *   @evt      : channel event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+                          mm_channel_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+
+    /* currently we are not supporting pause/resume channel */
+    CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+               __func__, my_obj->state, evt, in_val, out_val);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_init
+ *
+ * DESCRIPTION: initialize a channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object be to initialized
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+int32_t mm_channel_init(mm_channel_t *my_obj,
+                        mm_camera_channel_attr_t *attr,
+                        mm_camera_buf_notify_t channel_cb,
+                        void *userdata)
+{
+    int32_t rc = 0;
+
+    my_obj->bundle.super_buf_notify_cb = channel_cb;
+    my_obj->bundle.user_data = userdata;
+    if (NULL != attr) {
+        my_obj->bundle.superbuf_queue.attr = *attr;
+    }
+
+    CDBG("%s : Launch data poll thread in channel open", __func__);
+    mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+                                 MM_CAMERA_POLL_TYPE_DATA);
+
+    /* change state to stopped state */
+    my_obj->state = MM_CHANNEL_STATE_STOPPED;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_release
+ *
+ * DESCRIPTION: release a channel resource. Channel state will move to UNUSED
+ *              state after this call.
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_channel_release(mm_channel_t *my_obj)
+{
+    /* stop data poll thread */
+    mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+    /* change state to notused state */
+    my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_add_stream
+ *
+ * DESCRIPTION: add a stream into the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    uint8_t idx = 0;
+    uint32_t s_hdl = 0;
+    mm_stream_t *stream_obj = NULL;
+
+    CDBG("%s : E", __func__);
+    /* check available stream */
+    for (idx = 0; idx < MAX_STREAM_NUM_IN_BUNDLE; idx++) {
+        if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+            stream_obj = &my_obj->streams[idx];
+            break;
+        }
+    }
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s: streams reach max, no more stream allowed to add", __func__);
+        return s_hdl;
+    }
+
+    /* initialize stream object */
+    memset(stream_obj, 0, sizeof(mm_stream_t));
+    stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+    stream_obj->ch_obj = my_obj;
+    pthread_mutex_init(&stream_obj->buf_lock, NULL);
+    pthread_mutex_init(&stream_obj->cb_lock, NULL);
+    stream_obj->state = MM_STREAM_STATE_INITED;
+
+    /* acquire stream */
+    rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+    if (0 == rc) {
+        s_hdl = stream_obj->my_hdl;
+    } else {
+        /* error during acquire, de-init */
+        pthread_mutex_destroy(&stream_obj->buf_lock);
+        pthread_mutex_destroy(&stream_obj->cb_lock);
+        memset(stream_obj, 0, sizeof(mm_stream_t));
+    }
+    CDBG("%s : stream handle = %d", __func__, s_hdl);
+    return s_hdl;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_del_stream
+ *
+ * DESCRIPTION: delete a stream from the channel bu its handle
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume steam is stooped before it can be deleted
+ *==========================================================================*/
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+                              uint32_t stream_id)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s :Invalid Stream Object for stream_id = %d",
+                   __func__, stream_id);
+        return rc;
+    }
+
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_RELEASE,
+                          NULL,
+                          NULL);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+                                   uint32_t stream_id,
+                                   mm_camera_stream_config_t *config)
+{
+    int rc = -1;
+    mm_stream_t * stream_obj = NULL;
+    CDBG("%s : E stream ID = %d", __func__, stream_id);
+    stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s :Invalid Stream Object for stream_id = %d", __func__, stream_id);
+        return rc;
+    }
+
+    /* set stream fmt */
+    rc = mm_stream_fsm_fn(stream_obj,
+                          MM_STREAM_EVT_SET_FMT,
+                          (void *)config,
+                          NULL);
+    CDBG("%s : X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel, which should include all
+ *              streams within this channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_get_bundle_info(mm_channel_t *my_obj,
+                                   cam_bundle_config_t *bundle_info)
+{
+    int i;
+    mm_stream_t *s_obj = NULL;
+    int32_t rc = 0;
+
+    memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    bundle_info->bundle_id = my_obj->my_hdl;
+    bundle_info->num_of_streams = 0;
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                if (CAM_STREAM_TYPE_METADATA != s_obj->stream_info->stream_type) {
+                    bundle_info->stream_ids[bundle_info->num_of_streams++] =
+                                                        s_obj->server_stream_id;
+                }
+            } else {
+                CDBG_ERROR("%s: cannot find stream obj (%d) by handler (%d)",
+                           __func__, i, my_obj->streams[i].my_hdl);
+                rc = -1;
+                break;
+            }
+        }
+    }
+    if (rc != 0) {
+        /* error, reset to 0 */
+        memset(bundle_info, 0, sizeof(cam_bundle_config_t));
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_start
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_start(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i, j;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_start = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                /* remember meta data stream index */
+                if (s_obj->stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+                    meta_stream_idx = num_streams_to_start;
+                }
+                s_objs[num_streams_to_start++] = s_obj;
+            }
+        }
+    }
+
+    if (meta_stream_idx > 0 ) {
+        /* always start meta data stream first, so switch the stream object with the first one */
+        s_obj = s_objs[0];
+        s_objs[0] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    if (NULL != my_obj->bundle.super_buf_notify_cb) {
+        /* need to send up cb, therefore launch thread */
+        /* init superbuf queue */
+        mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+        my_obj->bundle.superbuf_queue.num_streams = num_streams_to_start;
+        my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+
+        for (i = 0; i < num_streams_to_start; i++) {
+            /* set bundled flag to streams */
+            s_objs[i]->is_bundled = 1;
+            /* init bundled streams to invalid value -1 */
+            my_obj->bundle.superbuf_queue.bundled_streams[i] = s_objs[i]->my_hdl;
+        }
+
+        /* launch cb thread for dispatching super buf through cb */
+        mm_camera_cmd_thread_launch(&my_obj->cb_thread,
+                                    mm_channel_dispatch_super_buf,
+                                    (void*)my_obj);
+
+        /* launch cmd thread for super buf dataCB */
+        mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                    mm_channel_process_stream_buf,
+                                    (void*)my_obj);
+
+        /* set flag to TRUE */
+        my_obj->bundle.is_active = TRUE;
+    }
+
+    for (i = 0; i < num_streams_to_start; i++) {
+        /* all streams within a channel should be started at the same time */
+        if (s_objs[i]->state == MM_STREAM_STATE_ACTIVE) {
+            CDBG_ERROR("%s: stream already started idx(%d)", __func__, i);
+            rc = -1;
+            break;
+        }
+
+        /* allocate buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_GET_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: get buf failed at idx(%d)", __func__, i);
+            break;
+        }
+
+        /* reg buf */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_REG_BUF,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: reg buf failed at idx(%d)", __func__, i);
+            break;
+        }
+
+        /* start stream */
+        rc = mm_stream_fsm_fn(s_objs[i],
+                              MM_STREAM_EVT_START,
+                              NULL,
+                              NULL);
+        if (0 != rc) {
+            CDBG_ERROR("%s: start stream failed at idx(%d)", __func__, i);
+            break;
+        }
+    }
+
+    /* error handling */
+    if (0 != rc) {
+        for (j=0; j<=i; j++) {
+            /* stop streams*/
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_STOP,
+                             NULL,
+                             NULL);
+
+            /* unreg buf */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_UNREG_BUF,
+                             NULL,
+                             NULL);
+
+            /* put buf back */
+            mm_stream_fsm_fn(s_objs[j],
+                             MM_STREAM_EVT_PUT_BUF,
+                             NULL,
+                             NULL);
+        }
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_stop
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_stop(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    int i;
+    mm_stream_t *s_objs[MAX_STREAM_NUM_IN_BUNDLE] = {NULL};
+    uint8_t num_streams_to_stop = 0;
+    mm_stream_t *s_obj = NULL;
+    int meta_stream_idx = 0;
+
+    for (i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+        if (my_obj->streams[i].my_hdl > 0) {
+            s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                          my_obj->streams[i].my_hdl);
+            if (NULL != s_obj) {
+                /* remember meta data stream index */
+                if (s_obj->stream_info->stream_type == CAM_STREAM_TYPE_METADATA) {
+                    meta_stream_idx = num_streams_to_stop;
+                }
+                s_objs[num_streams_to_stop++] = s_obj;
+            }
+        }
+    }
+
+    if (meta_stream_idx < num_streams_to_stop - 1 ) {
+        /* always stop meta data stream last, so switch the stream object with the last one */
+        s_obj = s_objs[num_streams_to_stop - 1];
+        s_objs[num_streams_to_stop - 1] = s_objs[meta_stream_idx];
+        s_objs[meta_stream_idx] = s_obj;
+    }
+
+    for (i = 0; i < num_streams_to_stop; i++) {
+        /* stream off */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_STOP,
+                         NULL,
+                         NULL);
+
+        /* unreg buf at kernel */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_UNREG_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    /* destroy super buf cmd thread */
+    if (TRUE == my_obj->bundle.is_active) {
+        /* first stop bundle thread */
+        mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+        mm_camera_cmd_thread_release(&my_obj->cb_thread);
+
+        /* deinit superbuf queue */
+        mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+        /* memset bundle info */
+        memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+    }
+
+    /* since all streams are stopped, we are safe to
+     * release all buffers allocated in stream */
+    for (i = 0; i < num_streams_to_stop; i++) {
+        /* put buf back */
+        mm_stream_fsm_fn(s_objs[i],
+                         MM_STREAM_EVT_PUT_BUF,
+                         NULL,
+                         NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj, uint32_t num_buf_requested)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    /* set pending_cnt
+     * will trigger dispatching super frames if pending_cnt > 0 */
+    /* send cam_sem_post to wake up cmd thread to dispatch super buffer */
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+        node->u.req_buf.num_buf_requested = num_buf_requested;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+    int32_t rc = 0;
+    /* reset pending_cnt */
+    rc = mm_channel_request_super_buf(my_obj, 0);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush superbuf queue
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @frame_idx : frame idx until which to flush all superbufs
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_flush_super_buf_queue(mm_channel_t *my_obj, uint32_t frame_idx)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->cmd_type = MM_CAMERA_CMD_TYPE_FLUSH_QUEUE;
+        node->u.frame_idx = frame_idx;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_config_notify_mode
+ *
+ * DESCRIPTION: configure notification mode
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @notify_mode : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_config_notify_mode(mm_channel_t *my_obj,
+                                      mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = NULL;
+
+    node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL != node) {
+        memset(node, 0, sizeof(mm_camera_cmdcb_t));
+        node->u.notify_mode = notify_mode;
+        node->cmd_type = MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY;
+
+        /* enqueue to cmd thread */
+        cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+        /* wake up cmd thread */
+        cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+    } else {
+        CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        rc = -1;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+                        mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+    if (NULL != s_obj) {
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_QBUF,
+                              (void *)buf,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_SET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+                                   mm_evt_paylod_set_get_stream_parms_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_GET_PARM,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_do_stream_action
+ *
+ * DESCRIPTION: request server to perform stream based action. Maybe removed later
+ *              if the functionality is included in mm_camera_set_parms
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @s_id         : stream handle
+ *   @actions      : ptr to an action struct buf to be performed by server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Actions to be performed by server are already
+ *              filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_channel_do_stream_action(mm_channel_t *my_obj,
+                                   mm_evt_paylod_do_stream_action_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        rc = mm_stream_fsm_fn(s_obj,
+                              MM_STREAM_EVT_DO_ACTION,
+                              (void *)payload,
+                              NULL);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to payload for mapping
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_map_stream_buf(mm_channel_t *my_obj,
+                                  mm_evt_paylod_map_stream_buf_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        rc = mm_stream_map_buf(s_obj,
+                               payload->buf_type,
+                               payload->buf_idx,
+                               payload->plane_idx,
+                               payload->fd,
+                               payload->size);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : channel object
+ *   @payload      : ptr to unmap payload
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_unmap_stream_buf(mm_channel_t *my_obj,
+                                    mm_evt_paylod_unmap_stream_buf_t *payload)
+{
+    int32_t rc = -1;
+    mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj,
+                                                               payload->stream_id);
+    if (NULL != s_obj) {
+        rc = mm_stream_unmap_buf(s_obj, payload->buf_type,
+                                 payload->buf_idx, payload->plane_idx);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_init
+ *
+ * DESCRIPTION: initialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be initialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+    return cam_queue_init(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_queue_deinit
+ *
+ * DESCRIPTION: deinitialize superbuf queue in the channel
+ *
+ * PARAMETERS :
+ *   @queue   : ptr to superbuf queue to be deinitialized
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+    return cam_queue_deinit(&queue->que);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_util_seq_comp_w_rollover
+ *
+ * DESCRIPTION: utility function to handle sequence number comparison with rollover
+ *
+ * PARAMETERS :
+ *   @v1      : first value to be compared
+ *   @v2      : second value to be compared
+ *
+ * RETURN     : int8_t type of comparison result
+ *              >0  -- v1 larger than v2
+ *              =0  -- vi equal to v2
+ *              <0  -- v1 smaller than v2
+ *==========================================================================*/
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+                                           uint32_t v2)
+{
+    int8_t ret = 0;
+
+    /* TODO: need to handle the case if v2 roll over to 0 */
+    if (v1 > v2) {
+        ret = 1;
+    } else if (v1 < v2) {
+        ret = -1;
+    }
+
+    return ret;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_handle_metadata
+ *
+ * DESCRIPTION: Handle frame matching logic change due to metadata
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_handle_metadata(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t * queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+    int rc = 0 ;
+    mm_stream_t* stream_obj = NULL;
+    stream_obj = mm_channel_util_get_stream_by_handler(ch_obj,
+                buf_info->stream_id);
+
+    if (NULL == stream_obj) {
+        CDBG_ERROR("%s: Invalid Stream Object for stream_id = %d",
+                   __func__, buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+    if (NULL == stream_obj->stream_info) {
+        CDBG_ERROR("%s: NULL stream info for stream_id = %d",
+                    __func__, buf_info->stream_id);
+        rc = -1;
+        goto end;
+    }
+
+    if (CAM_STREAM_TYPE_METADATA == stream_obj->stream_info->stream_type) {
+        const cam_metadata_info_t *metadata;
+        metadata = (const cam_metadata_info_t *)buf_info->buf->buffer;
+
+        if (NULL == metadata) {
+            CDBG_ERROR("%s: NULL metadata buffer for metadata stream",
+                       __func__);
+            rc = -1;
+            goto end;
+        }
+
+        if (metadata->is_prep_snapshot_done_valid &&
+                metadata->is_good_frame_idx_range_valid) {
+            CDBG_ERROR("%s: prep_snapshot_done and good_idx_range shouldn't be valid at the same time", __func__);
+            rc = -1;
+            goto end;
+        }
+
+        if (metadata->is_prep_snapshot_done_valid &&
+            metadata->prep_snapshot_done_state == NEED_FUTURE_FRAME) {
+
+            /* Set expected frame id to a future frame idx, large enough to wait
+             * for good_frame_idx_range, and small enough to still capture an image */
+            const int max_future_frame_offset = 100;
+            queue->expected_frame_id += max_future_frame_offset;
+
+            mm_channel_superbuf_flush(ch_obj, queue);
+        } else if (metadata->is_good_frame_idx_range_valid) {
+            if (metadata->good_frame_idx_range.min_frame_idx >
+                queue->expected_frame_id) {
+                CDBG_HIGH("%s: min_frame_idx %d is greater than expected_frame_id %d",
+                    __func__, metadata->good_frame_idx_range.min_frame_idx,
+                    queue->expected_frame_id);
+            }
+            queue->expected_frame_id =
+                metadata->good_frame_idx_range.min_frame_idx;
+        }
+    }
+end:
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_comp_and_enqueue
+ *
+ * DESCRIPTION: implementation for matching logic for superbuf
+ *
+ * PARAMETERS :
+ *   @ch_obj  : channel object
+ *   @queue   : superbuf queue
+ *   @buf_info: new buffer from stream
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_comp_and_enqueue(
+                        mm_channel_t* ch_obj,
+                        mm_channel_queue_t *queue,
+                        mm_camera_buf_info_t *buf_info)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+    uint8_t buf_s_idx, i, found_super_buf, unmatched_bundles;
+    struct cam_list *last_buf, *insert_before_buf;
+
+    CDBG("%s: E", __func__);
+    for (buf_s_idx = 0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+        if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+            break;
+        }
+    }
+    if (buf_s_idx == queue->num_streams) {
+        CDBG_ERROR("%s: buf from stream (%d) not bundled", __func__, buf_info->stream_id);
+        return -1;
+    }
+
+    if (mm_channel_handle_metadata(ch_obj, queue, buf_info) < 0) {
+        return -1;
+    }
+
+    if (mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+                                            queue->expected_frame_id) < 0) {
+        /* incoming buf is older than expected buf id, will discard it */
+        mm_channel_qbuf(ch_obj, buf_info->buf);
+        return 0;
+    }
+
+    if (MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL != queue->attr.priority) {
+        /* TODO */
+        /* need to decide if we want to queue the frame based on focus or exposure
+         * if frame not to be queued, we need to qbuf it back */
+    }
+
+    /* comp */
+    pthread_mutex_lock(&queue->que.lock);
+    head = &queue->que.head.list;
+    /* get the last one in the queue which is possibly having no matching */
+    pos = head->next;
+
+    found_super_buf = 0;
+    unmatched_bundles = 0;
+    last_buf = NULL;
+    insert_before_buf = NULL;
+    while (pos != head) {
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if (NULL != super_buf) {
+            if (super_buf->matched) {
+                /* find a matched super buf, move to next one */
+                pos = pos->next;
+                continue;
+            } else if ( buf_info->frame_idx == super_buf->frame_idx ) {
+                /* have an unmatched super buf that matches our frame idx,
+                 *  break the loop */
+                found_super_buf = 1;
+                break;
+            } else {
+                unmatched_bundles++;
+                if ( NULL == last_buf ) {
+                    if ( super_buf->frame_idx < buf_info->frame_idx ) {
+                        last_buf = pos;
+                    }
+                }
+                if ( NULL == insert_before_buf ) {
+                    if ( super_buf->frame_idx > buf_info->frame_idx ) {
+                        insert_before_buf = pos;
+                    }
+                }
+                pos = pos->next;
+            }
+        }
+    }
+
+    if ( found_super_buf ) {
+            super_buf->super_buf[buf_s_idx] = *buf_info;
+
+            /* check if superbuf is all matched */
+            super_buf->matched = 1;
+            for (i=0; i < super_buf->num_of_bufs; i++) {
+                if (super_buf->super_buf[i].frame_idx == 0) {
+                    super_buf->matched = 0;
+                    break;
+                }
+            }
+
+            if (super_buf->matched) {
+                queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+                queue->match_cnt++;
+                /* Any older unmatched buffer need to be released */
+                if ( last_buf ) {
+                    while ( last_buf != pos ) {
+                        node = member_of(last_buf, cam_node_t, list);
+                        super_buf = (mm_channel_queue_node_t*)node->data;
+                        if (NULL != super_buf) {
+                            for (i=0; i<super_buf->num_of_bufs; i++) {
+                                if (super_buf->super_buf[i].frame_idx != 0) {
+                                        mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                                }
+                            }
+                            queue->que.size--;
+                            last_buf = last_buf->next;
+                            cam_list_del_node(&node->list);
+                            free(node);
+                            free(super_buf);
+                        } else {
+                            CDBG_ERROR(" %s : Invalid superbuf in queue!", __func__);
+                            break;
+                        }
+                    }
+                }
+            }
+    } else {
+        if (  ( queue->attr.max_unmatched_frames < unmatched_bundles ) &&
+              ( NULL == last_buf ) ) {
+            /* incoming frame is older than the last bundled one */
+            mm_channel_qbuf(ch_obj, buf_info->buf);
+        } else {
+            if ( queue->attr.max_unmatched_frames < unmatched_bundles ) {
+                /* release the oldest bundled superbuf */
+                node = member_of(last_buf, cam_node_t, list);
+                super_buf = (mm_channel_queue_node_t*)node->data;
+                for (i=0; i<super_buf->num_of_bufs; i++) {
+                    if (super_buf->super_buf[i].frame_idx != 0) {
+                            mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+                    }
+                }
+                queue->que.size--;
+                node = member_of(last_buf, cam_node_t, list);
+                cam_list_del_node(&node->list);
+                free(node);
+                free(super_buf);
+            }
+            /* insert the new frame at the appropriate position. */
+
+            mm_channel_queue_node_t *new_buf = NULL;
+            cam_node_t* new_node = NULL;
+
+            new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+            new_node = (cam_node_t*)malloc(sizeof(cam_node_t));
+            if (NULL != new_buf && NULL != new_node) {
+                memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+                memset(new_node, 0, sizeof(cam_node_t));
+                new_node->data = (void *)new_buf;
+                new_buf->num_of_bufs = queue->num_streams;
+                new_buf->super_buf[buf_s_idx] = *buf_info;
+                new_buf->frame_idx = buf_info->frame_idx;
+
+                /* enqueue */
+                if ( insert_before_buf ) {
+                    cam_list_insert_before_node(&new_node->list, insert_before_buf);
+                } else {
+                    cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+                }
+                queue->que.size++;
+
+                if(queue->num_streams == 1) {
+                    new_buf->matched = 1;
+
+                    queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+                    queue->match_cnt++;
+                }
+            } else {
+                /* No memory */
+                if (NULL != new_buf) {
+                    free(new_buf);
+                }
+                if (NULL != new_node) {
+                    free(new_node);
+                }
+                /* qbuf the new buf since we cannot enqueue */
+                mm_channel_qbuf(ch_obj, buf_info->buf);
+            }
+        }
+    }
+
+    pthread_mutex_unlock(&queue->que.lock);
+
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue_internal
+ *
+ * DESCRIPTION: internal implementation for dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *   @matched_only : if dequeued buf should be matched
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(mm_channel_queue_t * queue,
+                                                              uint8_t matched_only)
+{
+    cam_node_t* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    head = &queue->que.head.list;
+    pos = head->next;
+    if (pos != head) {
+        /* get the first node */
+        node = member_of(pos, cam_node_t, list);
+        super_buf = (mm_channel_queue_node_t*)node->data;
+        if ( (NULL != super_buf) &&
+             (matched_only == TRUE) &&
+             (super_buf->matched == FALSE) ) {
+            /* require to dequeue matched frame only, but this superbuf is not matched,
+               simply set return ptr to NULL */
+            super_buf = NULL;
+        }
+        if (NULL != super_buf) {
+            /* remove from the queue */
+            cam_list_del_node(&node->list);
+            queue->que.size--;
+            if (super_buf->matched == TRUE) {
+                queue->match_cnt--;
+            }
+            free(node);
+        }
+    }
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_dequeue
+ *
+ * DESCRIPTION: dequeue from the superbuf queue
+ *
+ * PARAMETERS :
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : ptr to a node from superbuf queue
+ *==========================================================================*/
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue)
+{
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return super_buf;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_bufdone_overflow
+ *
+ * DESCRIPTION: keep superbuf queue no larger than watermark set by upper layer
+ *              via channel attribute
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+                                             mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no overflow is needed */
+        return 0;
+    }
+
+    CDBG("%s: before match_cnt=%d, water_mark=%d",
+         __func__, queue->match_cnt, queue->attr.water_mark);
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.water_mark) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+    CDBG("%s: after match_cnt=%d, water_mark=%d",
+         __func__, queue->match_cnt, queue->attr.water_mark);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_skip
+ *
+ * DESCRIPTION: depends on the lookback configuration of the channel attribute,
+ *              unwanted superbufs will be removed from the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+                                 mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+    if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+        /* for continuous streaming mode, no skip is needed */
+        return 0;
+    }
+
+    /* bufdone overflowed bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    while (queue->match_cnt > queue->attr.look_back) {
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, TRUE);
+        if (NULL != super_buf) {
+            for (i=0; i<super_buf->num_of_bufs; i++) {
+                if (NULL != super_buf->super_buf[i].buf) {
+                    mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+                }
+            }
+            free(super_buf);
+        }
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_channel_superbuf_flush
+ *
+ * DESCRIPTION: flush the superbuf queue.
+ *
+ * PARAMETERS :
+ *   @my_obj  : channel object
+ *   @queue   : superbuf queue
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_channel_superbuf_flush(mm_channel_t* my_obj,
+                                  mm_channel_queue_t * queue)
+{
+    int32_t rc = 0, i;
+    mm_channel_queue_node_t* super_buf = NULL;
+
+    /* bufdone bufs */
+    pthread_mutex_lock(&queue->que.lock);
+    super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE);
+    while (super_buf != NULL) {
+        for (i=0; i<super_buf->num_of_bufs; i++) {
+            if (NULL != super_buf->super_buf[i].buf) {
+                mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+            }
+        }
+        free(super_buf);
+        super_buf = mm_channel_superbuf_dequeue_internal(queue, FALSE);
+    }
+    pthread_mutex_unlock(&queue->que.lock);
+
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..f043bfb
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,1410 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/media.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera_sock.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl = {0, {{0}}, {0}};
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_generate_handler
+ *
+ * DESCRIPTION: utility function to generate handler for camera/channel/stream
+ *
+ * PARAMETERS :
+ *   @index: index of the object to have handler
+ *
+ * RETURN     : uint32_t type of handle that uniquely identify the object
+ *==========================================================================*/
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+    uint32_t handler = 0;
+    pthread_mutex_lock(&g_handler_lock);
+    g_handler_history_count++;
+    if (0 == g_handler_history_count) {
+        g_handler_history_count++;
+    }
+    handler = g_handler_history_count;
+    handler = (handler<<8) | index;
+    pthread_mutex_unlock(&g_handler_lock);
+    return handler;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_index_by_handler
+ *
+ * DESCRIPTION: utility function to get index from handle
+ *
+ * PARAMETERS :
+ *   @handler: object handle
+ *
+ * RETURN     : uint8_t type of index derived from handle
+ *==========================================================================*/
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+    return (handler&0x000000ff);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_dev_name
+ *
+ * DESCRIPTION: utility function to get device name from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : char ptr to the device name stored in global variable
+ * NOTE       : caller should not free the char ptr
+ *==========================================================================*/
+const char *mm_camera_util_get_dev_name(uint32_t cam_handle)
+{
+    char *dev_name = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+    if(cam_idx < MM_CAMERA_MAX_NUM_SENSORS) {
+        dev_name = g_cam_ctrl.video_dev_name[cam_idx];
+    }
+    return dev_name;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_util_get_camera_by_handler
+ *
+ * DESCRIPTION: utility function to get camera object from camera handle
+ *
+ * PARAMETERS :
+ *   @cam_handle: camera handle
+ *
+ * RETURN     : ptr to the camera object stored in global variable
+ * NOTE       : caller should not free the camera object ptr
+ *==========================================================================*/
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handle)
+{
+    mm_camera_obj_t *cam_obj = NULL;
+    uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handle);
+
+    if (cam_idx < MM_CAMERA_MAX_NUM_SENSORS &&
+        (NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+        (cam_handle == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+        cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+    }
+    return cam_obj;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_query_capability
+ *
+ * DESCRIPTION: query camera capability
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_query_capability(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s E: camera_handler = %d ", __func__, camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_query_capability(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_parms
+ *
+ * DESCRIPTION: set parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_parms(uint32_t camera_handle,
+                                        parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_parms
+ *
+ * DESCRIPTION: get parameters per camera
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_parms(uint32_t camera_handle,
+                                        parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_parms(my_obj, parms);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_do_auto_focus
+ *
+ * DESCRIPTION: performing auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : if this call success, we will always assume there will
+ *              be an auto_focus event following up.
+ *==========================================================================*/
+static int32_t mm_camera_intf_do_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_do_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_auto_focus
+ *
+ * DESCRIPTION: cancel auto focus
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_auto_focus(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_auto_focus(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_prepare_snapshot
+ *
+ * DESCRIPTION: prepare hardware for snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @do_af_flag   : flag indicating if AF is needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_prepare_snapshot(uint32_t camera_handle,
+                                               int32_t do_af_flag)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_prepare_snapshot(my_obj, do_af_flag);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_zsl_snapshot
+ *
+ * DESCRIPTION: start zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_zsl_snapshot(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_zsl_snapshot(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_zsl_snapshot
+ *
+ * DESCRIPTION: stop zsl snapshot
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_zsl_snapshot(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_zsl_snapshot(my_obj);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_close
+ *
+ * DESCRIPTION: close a camera by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_close(uint32_t camera_handle)
+{
+    int32_t rc = -1;
+    uint8_t cam_idx = camera_handle & 0x00ff;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s E: camera_handler = %d ", __func__, camera_handle);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if (my_obj){
+        my_obj->ref_count--;
+
+        if(my_obj->ref_count > 0) {
+            /* still have reference to obj, return here */
+            CDBG("%s: ref_count=%d\n", __func__, my_obj->ref_count);
+            pthread_mutex_unlock(&g_intf_lock);
+            rc = 0;
+        } else {
+            /* need close camera here as no other reference
+             * first empty g_cam_ctrl's referent to cam_obj */
+            g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+            pthread_mutex_lock(&my_obj->cam_lock);
+            pthread_mutex_unlock(&g_intf_lock);
+
+            rc = mm_camera_close(my_obj);
+
+            pthread_mutex_destroy(&my_obj->cam_lock);
+            free(my_obj);
+        }
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_channel
+ *
+ * DESCRIPTION: add a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @attr         : bundle attribute of the channel if needed
+ *   @channel_cb   : callback function for bundle data notify
+ *   @userdata     : user data ptr
+ *
+ * RETURN     : uint32_t type of channel handle
+ *              0  -- invalid channel handle, meaning the op failed
+ *              >0 -- successfully added a channel with a valid handle
+ * NOTE       : if no bundle data notify is needed, meaning each stream in the
+ *              channel will have its own stream data notify callback, then
+ *              attr, channel_cb, and userdata can be NULL. In this case,
+ *              no matching logic will be performed in channel for the bundling.
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handle,
+                                           mm_camera_channel_attr_t *attr,
+                                           mm_camera_buf_notify_t channel_cb,
+                                           void *userdata)
+{
+    uint32_t ch_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d", __func__, camera_handle);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        ch_id = mm_camera_add_channel(my_obj, attr, channel_cb, userdata);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X ch_id = %d", __func__, ch_id);
+    return ch_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_channel
+ *
+ * DESCRIPTION: delete a channel by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_channel(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ch_id = %d", __func__, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_bundle_info
+ *
+ * DESCRIPTION: query bundle info of the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @bundle_info  : bundle info to be filled in
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : all streams in the channel should be stopped already before
+ *              this channel can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_bundle_info(uint32_t camera_handle,
+                                              uint32_t ch_id,
+                                              cam_bundle_config_t *bundle_info)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ch_id = %d", __func__, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_bundle_info(my_obj, ch_id, bundle_info);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X", __func__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_register_event_notify
+ *
+ * DESCRIPTION: register for event notify
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @evt_cb       : callback for event notify
+ *   @user_data    : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_register_event_notify(uint32_t camera_handle,
+                                                    mm_camera_event_notify_t evt_cb,
+                                                    void * user_data)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E ", __func__);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :E rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @buf          : buf ptr to be enqueued
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handle,
+                                    uint32_t ch_id,
+                                    mm_camera_buf_def_t *buf)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_qbuf(my_obj, ch_id, buf);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X evt_type = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_add_stream
+ *
+ * DESCRIPTION: add a stream into a channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : uint32_t type of stream handle
+ *              0  -- invalid stream handle, meaning the op failed
+ *              >0 -- successfully added a stream with a valid handle
+ *==========================================================================*/
+static uint32_t mm_camera_intf_add_stream(uint32_t camera_handle,
+                                          uint32_t ch_id)
+{
+    uint32_t stream_id = 0;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s : E handle = %d ch_id = %d",
+         __func__, camera_handle, ch_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        stream_id = mm_camera_add_stream(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X stream_id = %d", __func__, stream_id);
+    return stream_id;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_del_stream
+ *
+ * DESCRIPTION: delete a stream by its handle
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : stream should be stopped already before it can be deleted.
+ *==========================================================================*/
+static int32_t mm_camera_intf_del_stream(uint32_t camera_handle,
+                                         uint32_t ch_id,
+                                         uint32_t stream_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s : E handle = %d ch_id = %d stream_id = %d",
+         __func__, camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_config_stream
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @stream_id    : stream handle
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_config_stream(uint32_t camera_handle,
+                                            uint32_t ch_id,
+                                            uint32_t stream_id,
+                                            mm_camera_stream_config_t *config)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E handle = %d, ch_id = %d,stream_id = %d",
+         __func__, camera_handle, ch_id, stream_id);
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :mm_camera_intf_config_stream stream_id = %d",__func__,stream_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_start_channel
+ *
+ * DESCRIPTION: start a channel, which will start all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_start_channel(uint32_t camera_handle,
+                                            uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_start_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_stop_channel
+ *
+ * DESCRIPTION: stop a channel, which will stop all streams in the channel
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_stop_channel(uint32_t camera_handle,
+                                           uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_stop_channel(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_request_super_buf
+ *
+ * DESCRIPTION: for burst mode in bundle, reuqest certain amount of matched
+ *              frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @num_buf_requested : number of matched frames needed
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_request_super_buf(uint32_t camera_handle,
+                                                uint32_t ch_id,
+                                                uint32_t num_buf_requested)
+{
+    int32_t rc = -1;
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_request_super_buf(my_obj, ch_id, num_buf_requested);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_cancel_super_buf_request
+ *
+ * DESCRIPTION: for burst mode in bundle, cancel the reuqest for certain amount
+ *              of matched frames from superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_cancel_super_buf_request(uint32_t camera_handle,
+                                                       uint32_t ch_id)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_flush_super_buf_queue
+ *
+ * DESCRIPTION: flush out all frames in the superbuf queue
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @frame_idx    : frame index
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_flush_super_buf_queue(uint32_t camera_handle,
+                                                    uint32_t ch_id, uint32_t frame_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_flush_super_buf_queue(my_obj, ch_id, frame_idx);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_configure_notify_mode
+ *
+ * DESCRIPTION: Configures channel notification mode
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @notify_mode  : notification mode
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_configure_notify_mode(uint32_t camera_handle,
+                                                    uint32_t ch_id,
+                                                    mm_camera_super_buf_notify_mode_t notify_mode)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    CDBG("%s :E camera_handler = %d,ch_id = %d",
+         __func__, camera_handle, ch_id);
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_config_channel_notify(my_obj, ch_id, notify_mode);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_buf
+ *
+ * DESCRIPTION: mapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_buf(uint32_t camera_handle,
+                                      uint8_t buf_type,
+                                      int fd,
+                                      uint32_t size)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_buf(my_obj, buf_type, fd, size);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_buf
+ *
+ * DESCRIPTION: unmapping camera buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_CAPABILITY
+ *                   CAM_MAPPING_BUF_TYPE_SETPARM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_GETPARM_BUF
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_buf(uint32_t camera_handle,
+                                        uint8_t buf_type)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_buf(my_obj, buf_type);
+    } else {
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_set_stream_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+static int32_t mm_camera_intf_set_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d",
+         __func__, camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_set_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_get_stream_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @parms        : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Parameters to be get from server are already
+ *              filled in by upper layer caller. After this call, corresponding
+ *              fields of requested parameters will be filled in by server with
+ *              detailed information.
+ *==========================================================================*/
+static int32_t mm_camera_intf_get_stream_parms(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t s_id,
+                                               cam_stream_parm_buffer_t *parms)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d",
+         __func__, camera_handle, ch_id, s_id);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_get_stream_parms(my_obj, ch_id, s_id, parms);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_map_stream_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_map_stream_buf(uint32_t camera_handle,
+                                             uint32_t ch_id,
+                                             uint32_t stream_id,
+                                             uint8_t buf_type,
+                                             uint32_t buf_idx,
+                                             int32_t plane_idx,
+                                             int fd,
+                                             uint32_t size)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+         __func__, camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_map_stream_buf(my_obj, ch_id, stream_id,
+                                      buf_type, buf_idx, plane_idx,
+                                      fd, size);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_intf_unmap_stream_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @camera_handle: camera handle
+ *   @ch_id        : channel handle
+ *   @s_id         : stream handle
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @buf_idx      : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_intf_unmap_stream_buf(uint32_t camera_handle,
+                                               uint32_t ch_id,
+                                               uint32_t stream_id,
+                                               uint8_t buf_type,
+                                               uint32_t buf_idx,
+                                               int32_t plane_idx)
+{
+    int32_t rc = -1;
+    mm_camera_obj_t * my_obj = NULL;
+
+    pthread_mutex_lock(&g_intf_lock);
+    my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+    CDBG("%s :E camera_handle = %d, ch_id = %d, s_id = %d, buf_idx = %d, plane_idx = %d",
+         __func__, camera_handle, ch_id, stream_id, buf_idx, plane_idx);
+
+    if(my_obj) {
+        pthread_mutex_lock(&my_obj->cam_lock);
+        pthread_mutex_unlock(&g_intf_lock);
+        rc = mm_camera_unmap_stream_buf(my_obj, ch_id, stream_id,
+                                        buf_type, buf_idx, plane_idx);
+    }else{
+        pthread_mutex_unlock(&g_intf_lock);
+    }
+
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : get_num_of_cameras
+ *
+ * DESCRIPTION: get number of cameras
+ *
+ * PARAMETERS :
+ *
+ * RETURN     : number of cameras supported
+ *==========================================================================*/
+uint8_t get_num_of_cameras()
+{
+    int rc = 0;
+    int dev_fd = 0;
+    struct media_device_info mdev_info;
+    int num_media_devices = 0;
+    uint8_t num_cameras = 0;
+
+    CDBG("%s : E", __func__);
+    /* lock the mutex */
+    pthread_mutex_lock(&g_intf_lock);
+    while (1) {
+        char dev_name[32];
+        int num_entities;
+        snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+        dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (dev_fd <= 0) {
+            CDBG("Done discovering media devices\n");
+            break;
+        }
+        num_media_devices++;
+        memset(&mdev_info, 0, sizeof(mdev_info));
+        rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+        if (rc < 0) {
+            CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+            close(dev_fd);
+            dev_fd = 0;
+            num_cameras = 0;
+            break;
+        }
+
+        if(strncmp(mdev_info.model, MSM_CAMERA_NAME, sizeof(mdev_info.model)) != 0) {
+            close(dev_fd);
+            dev_fd = 0;
+            continue;
+        }
+
+        num_entities = 1;
+        while (1) {
+            struct media_entity_desc entity;
+            memset(&entity, 0, sizeof(entity));
+            entity.id = num_entities++;
+            rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+            if (rc < 0) {
+                CDBG("Done enumerating media entities\n");
+                rc = 0;
+                break;
+            }
+            if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+                strncpy(g_cam_ctrl.video_dev_name[num_cameras],
+                     entity.name, sizeof(entity.name));
+                break;
+            }
+        }
+
+        CDBG("%s: dev_info[id=%d,name='%s']\n",
+            __func__, num_cameras, g_cam_ctrl.video_dev_name[num_cameras]);
+
+        num_cameras++;
+        close(dev_fd);
+        dev_fd = 0;
+    }
+    g_cam_ctrl.num_cam = num_cameras;
+
+    /* unlock the mutex */
+    pthread_mutex_unlock(&g_intf_lock);
+    CDBG("%s: num_cameras=%d\n", __func__, g_cam_ctrl.num_cam);
+    return g_cam_ctrl.num_cam;
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+    .query_capability = mm_camera_intf_query_capability,
+    .register_event_notify = mm_camera_intf_register_event_notify,
+    .close_camera = mm_camera_intf_close,
+    .set_parms = mm_camera_intf_set_parms,
+    .get_parms = mm_camera_intf_get_parms,
+    .do_auto_focus = mm_camera_intf_do_auto_focus,
+    .cancel_auto_focus = mm_camera_intf_cancel_auto_focus,
+    .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+    .start_zsl_snapshot = mm_camera_intf_start_zsl_snapshot,
+    .stop_zsl_snapshot = mm_camera_intf_stop_zsl_snapshot,
+    .map_buf = mm_camera_intf_map_buf,
+    .unmap_buf = mm_camera_intf_unmap_buf,
+    .add_channel = mm_camera_intf_add_channel,
+    .delete_channel = mm_camera_intf_del_channel,
+    .get_bundle_info = mm_camera_intf_get_bundle_info,
+    .add_stream = mm_camera_intf_add_stream,
+    .delete_stream = mm_camera_intf_del_stream,
+    .config_stream = mm_camera_intf_config_stream,
+    .qbuf = mm_camera_intf_qbuf,
+    .map_stream_buf = mm_camera_intf_map_stream_buf,
+    .unmap_stream_buf = mm_camera_intf_unmap_stream_buf,
+    .set_stream_parms = mm_camera_intf_set_stream_parms,
+    .get_stream_parms = mm_camera_intf_get_stream_parms,
+    .start_channel = mm_camera_intf_start_channel,
+    .stop_channel = mm_camera_intf_stop_channel,
+    .request_super_buf = mm_camera_intf_request_super_buf,
+    .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+    .flush_super_buf_queue = mm_camera_intf_flush_super_buf_queue,
+    .configure_notify_mode = mm_camera_intf_configure_notify_mode
+};
+
+/*===========================================================================
+ * FUNCTION   : camera_open
+ *
+ * DESCRIPTION: open a camera by camera index
+ *
+ * PARAMETERS :
+ *   @camera_idx : camera index. should within range of 0 to num_of_cameras
+ *
+ * RETURN     : ptr to a virtual table containing camera handle and operation table.
+ *              NULL if failed.
+ *==========================================================================*/
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx)
+{
+    int32_t rc = 0;
+    mm_camera_obj_t* cam_obj = NULL;
+
+    CDBG("%s: E camera_idx = %d\n", __func__, camera_idx);
+    if (camera_idx >= g_cam_ctrl.num_cam) {
+        CDBG_ERROR("%s: Invalid camera_idx (%d)", __func__, camera_idx);
+        return NULL;
+    }
+
+    pthread_mutex_lock(&g_intf_lock);
+    /* opened already */
+    if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+        /* Add reference */
+        g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+        pthread_mutex_unlock(&g_intf_lock);
+        CDBG("%s:  opened alreadyn", __func__);
+        return &g_cam_ctrl.cam_obj[camera_idx]->vtbl;
+    }
+
+    cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+    if(NULL == cam_obj) {
+        pthread_mutex_unlock(&g_intf_lock);
+        CDBG("%s:  no mem", __func__);
+        return NULL;
+    }
+
+    /* initialize camera obj */
+    memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+    cam_obj->ref_count++;
+    cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+    cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+    cam_obj->vtbl.ops = &mm_camera_ops;
+    pthread_mutex_init(&cam_obj->cam_lock, NULL);
+
+    rc = mm_camera_open(cam_obj);
+    if(rc != 0) {
+        CDBG_ERROR("%s: mm_camera_open err = %d", __func__, rc);
+        pthread_mutex_destroy(&cam_obj->cam_lock);
+        g_cam_ctrl.cam_obj[camera_idx] = NULL;
+        free(cam_obj);
+        cam_obj = NULL;
+        pthread_mutex_unlock(&g_intf_lock);
+        return NULL;
+    }else{
+        CDBG("%s: Open succeded\n", __func__);
+        g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+        pthread_mutex_unlock(&g_intf_lock);
+        return &cam_obj->vtbl;
+    }
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100755
index 0000000..58953d1
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,229 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/uio.h>
+#include <sys/un.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_create
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ *  @cam_id   : camera ID
+ *  @sock_type: socket type, TCP/UDP
+ *
+ * RETURN     : fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+    int socket_fd;
+    struct sockaddr_un sock_addr;
+    int sktype;
+    int rc;
+
+    switch (sock_type)
+    {
+      case MM_CAMERA_SOCK_TYPE_UDP:
+        sktype = SOCK_DGRAM;
+        break;
+      case MM_CAMERA_SOCK_TYPE_TCP:
+        sktype = SOCK_STREAM;
+        break;
+      default:
+        CDBG_ERROR("%s: unknown socket type =%d", __func__, sock_type);
+        return -1;
+    }
+    socket_fd = socket(AF_UNIX, sktype, 0);
+    if (socket_fd < 0) {
+        CDBG_ERROR("%s: error create socket fd =%d", __func__, socket_fd);
+        return socket_fd;
+    }
+
+    memset(&sock_addr, 0, sizeof(sock_addr));
+    sock_addr.sun_family = AF_UNIX;
+    snprintf(sock_addr.sun_path, UNIX_PATH_MAX, "/data/cam_socket%d", cam_id);
+    if((rc = connect(socket_fd, (struct sockaddr *) &sock_addr,
+      sizeof(sock_addr))) != 0) {
+      close(socket_fd);
+      socket_fd = -1;
+      CDBG_ERROR("%s: socket_fd=%d %s ", __func__, socket_fd, strerror(errno));
+    }
+
+    CDBG("%s: socket_fd=%d %s", __func__, socket_fd, sock_addr.sun_path);
+    return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_close
+ *
+ * DESCRIPTION:  close domain socket by its fd
+ *   @fd      : file descriptor for the domain socket to be closed
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+    if (fd > 0) {
+      close(fd);
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_sendmsg
+ *
+ * DESCRIPTION:  send msg through domain socket
+ *   @fd      : socket fd
+ *   @msg     : pointer to msg to be sent over domain socket
+ *   @sendfd  : file descriptors to be sent
+ *
+ * RETURN     : the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int sendfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr * cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+
+    if (msg == NULL) {
+      CDBG("%s: msg is NULL", __func__);
+      return -1;
+    }
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+    CDBG("%s: iov_len=%d", __func__, iov[0].iov_len);
+
+    msgh.msg_control = NULL;
+    msgh.msg_controllen = 0;
+
+    /* if sendfd is valid, we need to pass it through control msg */
+    if( sendfd > 0) {
+      msgh.msg_control = control;
+      msgh.msg_controllen = sizeof(control);
+      cmsghp = CMSG_FIRSTHDR(&msgh);
+      if (cmsghp != NULL) {
+        CDBG("%s: Got ctrl msg pointer", __func__);
+        cmsghp->cmsg_level = SOL_SOCKET;
+        cmsghp->cmsg_type = SCM_RIGHTS;
+        cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+        *((int *)CMSG_DATA(cmsghp)) = sendfd;
+        CDBG("%s: cmsg data=%d", __func__, *((int *) CMSG_DATA(cmsghp)));
+      } else {
+        CDBG("%s: ctrl msg NULL", __func__);
+        return -1;
+      }
+    }
+
+    return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_socket_recvmsg
+ *
+ * DESCRIPTION:  receive msg from domain socket.
+ *   @fd      : socket fd
+ *   @msg     : pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ *              need be allocated by the caller
+ *   @buf_size: the size of the buf that holds incoming msg
+ *   @rcvdfd  : pointer to hold recvd file descriptor if not NULL.
+ *
+ * RETURN     : the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+  int fd,
+  void *msg,
+  uint32_t buf_size,
+  int *rcvdfd)
+{
+    struct msghdr msgh;
+    struct iovec iov[1];
+    struct cmsghdr *cmsghp = NULL;
+    char control[CMSG_SPACE(sizeof(int))];
+    int rcvd_fd = -1;
+    int rcvd_len = 0;
+
+    if ( (msg == NULL) || (buf_size <= 0) ) {
+      CDBG_ERROR(" %s: msg buf is NULL", __func__);
+      return -1;
+    }
+
+    memset(&msgh, 0, sizeof(msgh));
+    msgh.msg_name = NULL;
+    msgh.msg_namelen = 0;
+    msgh.msg_control = control;
+    msgh.msg_controllen = sizeof(control);
+
+    iov[0].iov_base = msg;
+    iov[0].iov_len = buf_size;
+    msgh.msg_iov = iov;
+    msgh.msg_iovlen = 1;
+
+    if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+      CDBG_ERROR(" %s: recvmsg failed", __func__);
+      return rcvd_len;
+    }
+
+    CDBG("%s:  msg_ctrl %p len %d", __func__, msgh.msg_control, msgh.msg_controllen);
+
+    if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+        (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+      if (cmsghp->cmsg_level == SOL_SOCKET &&
+        cmsghp->cmsg_type == SCM_RIGHTS) {
+        CDBG("%s:  CtrlMsg is valid", __func__);
+        rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+        CDBG("%s:  Receieved fd=%d", __func__, rcvd_fd);
+      } else {
+        CDBG_ERROR("%s:  Unexpected Control Msg. Line=%d", __func__, __LINE__);
+      }
+    }
+
+    if (rcvdfd) {
+      *rcvdfd = rcvd_fd;
+    }
+
+    return rcvd_len;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..a71f914
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,2567 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <time.h>
+#include <cam_semaphore.h>
+#ifdef VENUS_PRESENT
+#include <media/msm_media_info.h>
+#endif
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+                       mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_sync_info(mm_stream_t *my_obj);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *value);
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes);
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame);
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj);
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val);
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val);
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt);
+
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_handle_rcvd_buf
+ *
+ * DESCRIPTION: function to handle newly received stream buffer
+ *
+ * PARAMETERS :
+ *   @cam_obj : stream object
+ *   @buf_info: ptr to struct storing buffer information
+ *
+ * RETURN     : none
+ *==========================================================================*/
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+                               mm_camera_buf_info_t *buf_info,
+                               uint8_t has_cb)
+{
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* enqueue to super buf thread */
+    if (my_obj->is_bundled) {
+        mm_camera_cmdcb_t* node = NULL;
+
+        /* send cam_sem_post to wake up channel cmd thread to enqueue to super buffer */
+        node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(mm_camera_cmdcb_t));
+            node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+            node->u.buf = *buf_info;
+
+            /* enqueue to cmd thread */
+            cam_queue_enq(&(my_obj->ch_obj->cmd_thread.cmd_queue), node);
+
+            /* wake up cmd thread */
+            cam_sem_post(&(my_obj->ch_obj->cmd_thread.cmd_sem));
+        } else {
+            CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        }
+    }
+
+    if(has_cb) {
+        mm_camera_cmdcb_t* node = NULL;
+
+        /* send cam_sem_post to wake up cmd thread to dispatch dataCB */
+        node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+        if (NULL != node) {
+            memset(node, 0, sizeof(mm_camera_cmdcb_t));
+            node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+            node->u.buf = *buf_info;
+
+            /* enqueue to cmd thread */
+            cam_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+            /* wake up cmd thread */
+            cam_sem_post(&(my_obj->cmd_thread.cmd_sem));
+        } else {
+            CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+        }
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_data_notify
+ *
+ * DESCRIPTION: callback to handle data notify from kernel
+ *
+ * PARAMETERS :
+ *   @user_data : user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_data_notify(void* user_data)
+{
+    mm_stream_t *my_obj = (mm_stream_t*)user_data;
+    int32_t idx = -1, i, rc;
+    uint8_t has_cb = 0;
+    mm_camera_buf_info_t buf_info;
+
+    if (NULL == my_obj) {
+        return;
+    }
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    if (MM_STREAM_STATE_ACTIVE != my_obj->state) {
+        /* this Cb will only received in active_stream_on state
+         * if not so, return here */
+        CDBG_ERROR("%s: ERROR!! Wrong state (%d) to receive data notify!",
+                   __func__, my_obj->state);
+        return;
+    }
+
+    memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+    rc = mm_stream_read_msm_frame(my_obj, &buf_info, my_obj->frame_offset.num_planes);
+    if (rc != 0) {
+        return;
+    }
+    idx = buf_info.buf->buf_idx;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb) {
+            /* for every CB, add ref count */
+            has_cb = 1;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    /* update buffer location */
+    my_obj->buf_status[idx].in_kernel = 0;
+
+    /* update buf ref count */
+    if (my_obj->is_bundled) {
+        /* need to add into super buf since bundled, add ref count */
+        my_obj->buf_status[idx].buf_refcnt++;
+    }
+    my_obj->buf_status[idx].buf_refcnt += has_cb;
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    mm_stream_handle_rcvd_buf(my_obj, &buf_info, has_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_dispatch_app_data
+ *
+ * DESCRIPTION: dispatch stream buffer to registered users
+ *
+ * PARAMETERS :
+ *   @cmd_cb  : ptr storing stream buffer information
+ *   @userdata: user data ptr (stream object)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+                                        void* user_data)
+{
+    int i;
+    mm_stream_t * my_obj = (mm_stream_t *)user_data;
+    mm_camera_buf_info_t* buf_info = NULL;
+    mm_camera_super_buf_t super_buf;
+    mm_camera_cmd_thread_name("mm_cam_stream");
+
+    if (NULL == my_obj) {
+        return;
+    }
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+        CDBG_ERROR("%s: Wrong cmd_type (%d) for dataCB",
+                   __func__, cmd_cb->cmd_type);
+        return;
+    }
+
+    buf_info = &cmd_cb->u.buf;
+    memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+    super_buf.num_bufs = 1;
+    super_buf.bufs[0] = buf_info->buf;
+    super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+    super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL != my_obj->buf_cb[i].cb) {
+            if (my_obj->buf_cb[i].cb_count != 0) {
+                /* if <0, means infinite CB
+                 * if >0, means CB for certain times
+                 * both case we need to call CB */
+
+                /* increase buf ref cnt */
+                pthread_mutex_lock(&my_obj->buf_lock);
+                my_obj->buf_status[buf_info->buf->buf_idx].buf_refcnt++;
+                pthread_mutex_unlock(&my_obj->buf_lock);
+
+                /* callback */
+                my_obj->buf_cb[i].cb(&super_buf,
+                                     my_obj->buf_cb[i].user_data);
+            }
+
+            /* if >0, reduce count by 1 every time we called CB until reaches 0
+             * when count reach 0, reset the buf_cb to have no CB */
+            if (my_obj->buf_cb[i].cb_count > 0) {
+                my_obj->buf_cb[i].cb_count--;
+                if (0 == my_obj->buf_cb[i].cb_count) {
+                    my_obj->buf_cb[i].cb = NULL;
+                    my_obj->buf_cb[i].user_data = NULL;
+                }
+            }
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    /* do buf_done since we increased refcnt by one when has_cb */
+    mm_stream_buf_done(my_obj, buf_info->buf);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_fn
+ *
+ * DESCRIPTION: stream finite state machine entry function. Depends on stream
+ *              state, incoming event will be handled differently.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+                         mm_stream_evt_type_t evt,
+                         void * in_val,
+                         void * out_val)
+{
+    int32_t rc = -1;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch (my_obj->state) {
+    case MM_STREAM_STATE_NOTUSED:
+        CDBG("%s: Not handling evt in unused state", __func__);
+        break;
+    case MM_STREAM_STATE_INITED:
+        rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACQUIRED:
+        rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_CFG:
+        rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_BUFFED:
+        rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_REG:
+        rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+        break;
+    case MM_STREAM_STATE_ACTIVE:
+        rc = mm_stream_fsm_active(my_obj, evt, in_val, out_val);
+        break;
+    default:
+        CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+        break;
+    }
+    CDBG("%s : X rc =%d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_inited
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in INITED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    char dev_name[MM_CAMERA_DEV_NAME_LEN];
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_ACQUIRE:
+        if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+            CDBG_ERROR("%s: NULL channel or camera obj\n", __func__);
+            rc = -1;
+            break;
+        }
+
+        snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+                 mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl));
+
+        my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+        if (my_obj->fd <= 0) {
+            CDBG_ERROR("%s: open dev returned %d\n", __func__, my_obj->fd);
+            rc = -1;
+            break;
+        }
+        CDBG("%s: open dev fd = %d\n", __func__, my_obj->fd);
+        rc = mm_stream_set_ext_mode(my_obj);
+        if (0 == rc) {
+            my_obj->state = MM_STREAM_STATE_ACQUIRED;
+        } else {
+            /* failed setting ext_mode
+             * close fd */
+            close(my_obj->fd);
+            my_obj->fd = 0;
+            break;
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_acquired
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in AQUIRED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+                               mm_stream_evt_type_t evt,
+                               void * in_val,
+                               void * out_val)
+{
+    int32_t rc = 0;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        /* change state to not used */
+         my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_cfg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in CONFIGURED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_SET_FMT:
+        {
+            mm_camera_stream_config_t *config =
+                (mm_camera_stream_config_t *)in_val;
+
+            rc = mm_stream_config(my_obj, config);
+
+            /* change state to configed */
+            my_obj->state = MM_STREAM_STATE_CFG;
+
+            break;
+        }
+    case MM_STREAM_EVT_RELEASE:
+        rc = mm_stream_release(my_obj);
+        my_obj->state = MM_STREAM_STATE_NOTUSED;
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_BUF:
+        rc = mm_stream_init_bufs(my_obj);
+        /* change state to buff allocated */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_BUFFED;
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_buffed
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in BUFFED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_PUT_BUF:
+        rc = mm_stream_deinit_bufs(my_obj);
+        /* change state to configed */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_CFG;
+        }
+        break;
+    case MM_STREAM_EVT_REG_BUF:
+        rc = mm_stream_reg_buf(my_obj);
+        /* change state to regged */
+        if(0 == rc) {
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_reg
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in REGGED
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+                          mm_stream_evt_type_t evt,
+                          void * in_val,
+                          void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    switch(evt) {
+    case MM_STREAM_EVT_UNREG_BUF:
+        rc = mm_stream_unreg_buf(my_obj);
+
+        /* change state to buffed */
+        my_obj->state = MM_STREAM_STATE_BUFFED;
+        break;
+    case MM_STREAM_EVT_START:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            /* launch cmd thread if CB is not null */
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if(NULL != my_obj->buf_cb[i].cb) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            if (has_cb) {
+                mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+                                            mm_stream_dispatch_app_data,
+                                            (void *)my_obj);
+            }
+
+            my_obj->state = MM_STREAM_STATE_ACTIVE;
+            rc = mm_stream_streamon(my_obj);
+            if (0 != rc) {
+                /* failed stream on, need to release cmd thread if it's launched */
+                if (has_cb) {
+                    mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+                }
+                my_obj->state = MM_STREAM_STATE_REG;
+                break;
+            }
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_fsm_active
+ *
+ * DESCRIPTION: stream finite state machine function to handle event in ACTIVE
+ *              state.
+ *
+ * PARAMETERS :
+ *   @my_obj   : ptr to a stream object
+ *   @evt      : stream event to be processed
+ *   @in_val   : input event payload. Can be NULL if not needed.
+ *   @out_val  : output payload, Can be NULL if not needed.
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_fsm_active(mm_stream_t * my_obj,
+                             mm_stream_evt_type_t evt,
+                             void * in_val,
+                             void * out_val)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    switch(evt) {
+    case MM_STREAM_EVT_QBUF:
+        rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+        break;
+    case MM_STREAM_EVT_STOP:
+        {
+            uint8_t has_cb = 0;
+            uint8_t i;
+            rc = mm_stream_streamoff(my_obj);
+
+            pthread_mutex_lock(&my_obj->cb_lock);
+            for (i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+                if(NULL != my_obj->buf_cb[i].cb) {
+                    has_cb = 1;
+                    break;
+                }
+            }
+            pthread_mutex_unlock(&my_obj->cb_lock);
+
+            if (has_cb) {
+                mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+            }
+            my_obj->state = MM_STREAM_STATE_REG;
+        }
+        break;
+    case MM_STREAM_EVT_SET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_set_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_GET_PARM:
+        {
+            mm_evt_paylod_set_get_stream_parms_t *payload =
+                (mm_evt_paylod_set_get_stream_parms_t *)in_val;
+            rc = mm_stream_get_parm(my_obj, payload->parms);
+        }
+        break;
+    case MM_STREAM_EVT_DO_ACTION:
+        rc = mm_stream_do_action(my_obj, in_val);
+        break;
+    default:
+        CDBG_ERROR("%s: invalid state (%d) for evt (%d), in(%p), out(%p)",
+                   __func__, my_obj->state, evt, in_val, out_val);
+    }
+    CDBG("%s :X rc = %d", __func__, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_config
+ *
+ * DESCRIPTION: configure a stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @config       : stream configuration
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_config(mm_stream_t *my_obj,
+                         mm_camera_stream_config_t *config)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+    my_obj->stream_info = config->stream_info;
+    my_obj->buf_num = 0;
+    my_obj->mem_vtbl = config->mem_vtbl;
+    my_obj->padding_info = config->padding_info;
+    /* cd through intf always palced at idx 0 of buf_cb */
+    my_obj->buf_cb[0].cb = config->stream_cb;
+    my_obj->buf_cb[0].user_data = config->userdata;
+    my_obj->buf_cb[0].cb_count = -1; /* infinite by default */
+
+    rc = mm_stream_sync_info(my_obj);
+    if (rc == 0) {
+        rc = mm_stream_set_fmt(my_obj);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_release
+ *
+ * DESCRIPTION: release a stream resource
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* close fd */
+    if(my_obj->fd > 0)
+    {
+        close(my_obj->fd);
+    }
+
+    /* destroy mutex */
+    pthread_mutex_destroy(&my_obj->buf_lock);
+    pthread_mutex_destroy(&my_obj->cb_lock);
+
+    /* reset stream obj */
+    memset(my_obj, 0, sizeof(mm_stream_t));
+
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamon
+ *
+ * DESCRIPTION: stream on a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+    int32_t rc;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+    if (rc < 0) {
+        CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
+                   __func__, rc);
+        /* remove fd from data poll thread in case of failure */
+        mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl, mm_camera_sync_call);
+    }
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_streamoff
+ *
+ * DESCRIPTION: stream off a stream. sending v4l2 request to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+    int32_t rc;
+    enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* step1: remove fd from data poll thread */
+    rc = mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0],
+            my_obj->my_hdl, mm_camera_sync_call);
+    if (rc < 0) {
+        /* The error might be due to async update. In this case
+         * wait for all updates to complete before proceeding. */
+        rc = mm_camera_poll_thread_commit_updates(&my_obj->ch_obj->poll_thread[0]);
+        if (rc < 0) {
+            CDBG_ERROR("%s: Poll sync failed %d",
+                 __func__, rc);
+        }
+    }
+
+    /* step2: stream off */
+    rc = ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+    if (rc < 0) {
+        CDBG_ERROR("%s: STREAMOFF failed: %s\n",
+                __func__, strerror(errno));
+    }
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_read_msm_frame
+ *
+ * DESCRIPTION: dequeue a stream buffer from kernel queue
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_info     : ptr to a struct storing buffer information
+ *   @num_planes   : number of planes in the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+                                 mm_camera_buf_info_t* buf_info,
+                                 uint8_t num_planes)
+{
+    int32_t rc = 0;
+    struct v4l2_buffer vb;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&vb,  0,  sizeof(vb));
+    vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    vb.memory = V4L2_MEMORY_USERPTR;
+    vb.m.planes = &planes[0];
+    vb.length = num_planes;
+
+    rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+    if (rc < 0) {
+        CDBG_ERROR("%s: VIDIOC_DQBUF ioctl call failed (rc=%d)\n",
+                   __func__, rc);
+    } else {
+        pthread_mutex_lock(&my_obj->buf_lock);
+        my_obj->queued_buffer_count--;
+        if(my_obj->queued_buffer_count == 0) {
+            CDBG_HIGH("%s: Stoping poll on stream %p type :%d", __func__, my_obj, my_obj->stream_info->stream_type);
+            mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl, mm_camera_async_call);
+            CDBG_HIGH("%s: Stopped poll on stream %p type :%d", __func__, my_obj, my_obj->stream_info->stream_type);
+        }
+        pthread_mutex_unlock(&my_obj->buf_lock);
+        int8_t idx = vb.index;
+        buf_info->buf = &my_obj->buf[idx];
+        buf_info->frame_idx = vb.sequence;
+        buf_info->stream_id = my_obj->my_hdl;
+
+        buf_info->buf->stream_id = my_obj->my_hdl;
+        buf_info->buf->buf_idx = idx;
+        buf_info->buf->frame_idx = vb.sequence;
+        buf_info->buf->ts.tv_sec  = vb.timestamp.tv_sec;
+        buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+        CDBG("%s: VIDIOC_DQBUF buf_index %d, frame_idx %d, stream type %d\n",
+             __func__, vb.index, buf_info->buf->frame_idx, my_obj->stream_info->stream_type);
+        if ( NULL != my_obj->mem_vtbl.clean_invalidate_buf ) {
+            rc = my_obj->mem_vtbl.clean_invalidate_buf(idx,
+                                                       my_obj->mem_vtbl.user_data);
+            if ( 0 > rc ) {
+                CDBG_ERROR("%s: Clean invalidate cache failed on buffer index: %d",
+                           __func__,
+                           idx);
+                return rc;
+            }
+        } else {
+            CDBG_ERROR(" %s : Clean invalidate cache op not supported\n", __func__);
+        }
+    }
+
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_parms
+ *
+ * DESCRIPTION: set parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be set to server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be set
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_set_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_parms
+ *
+ * DESCRIPTION: get parameters per stream
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a param struct to be get from server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the parms struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of parameters to be get
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_get_parm(mm_stream_t *my_obj,
+                           cam_stream_parm_buffer_t *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_g_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_do_actions
+ *
+ * DESCRIPTION: request server to perform stream based actions
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @in_value     : ptr to a struct of actions to be performed by the server
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Assume the action struct buf is already mapped to server via
+ *              domain socket. Corresponding fields of actions to be performed
+ *              are already filled in by upper layer caller.
+ *==========================================================================*/
+int32_t mm_stream_do_action(mm_stream_t *my_obj,
+                            void *in_value)
+{
+    int32_t rc = -1;
+    int32_t value = 0;
+    if (in_value != NULL) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd, CAM_PRIV_STREAM_PARM, &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_ext_mode
+ *
+ * DESCRIPTION: set stream extended mode to server via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : Server will return a server stream id that uniquely identify
+ *              this stream on server side. Later on communication to server
+ *              per stream should use this server stream id.
+ *==========================================================================*/
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_streamparm s_parm;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memset(&s_parm, 0, sizeof(s_parm));
+    s_parm.type =  V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+    rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+    CDBG("%s:stream fd=%d, rc=%d, extended_mode=%d\n",
+         __func__, my_obj->fd, rc, s_parm.parm.capture.extendedmode);
+    if (rc == 0) {
+        /* get server stream id */
+        my_obj->server_stream_id = s_parm.parm.capture.extendedmode;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_qbuf
+ *
+ * DESCRIPTION: enqueue buffer back to kernel queue for furture use
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf          : ptr to a struct storing buffer information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+    int32_t rc = 0;
+    struct v4l2_buffer buffer;
+    struct v4l2_plane planes[VIDEO_MAX_PLANES];
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    memcpy(planes, buf->planes, sizeof(planes));
+    memset(&buffer, 0, sizeof(buffer));
+    buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    buffer.memory = V4L2_MEMORY_USERPTR;
+    buffer.index = buf->buf_idx;
+    buffer.m.planes = &planes[0];
+    buffer.length = buf->num_planes;
+
+    CDBG("%s:plane 0: stream_hdl=%d,fd=%d,frame idx=%d,num_planes = %d, offset = %d, data_offset = %d\n", __func__,
+         buf->stream_id, buf->fd, buffer.index, buffer.length, buf->planes[0].reserved[0], buf->planes[0].data_offset);
+    CDBG("%s:plane 1: stream_hdl=%d,fd=%d,frame idx=%d,num_planes = %d, offset = %d, data_offset = %d\n", __func__,
+         buf->stream_id, buf->fd, buffer.index, buffer.length, buf->planes[1].reserved[0], buf->planes[1].data_offset);
+
+    if ( NULL != my_obj->mem_vtbl.invalidate_buf ) {
+        rc = my_obj->mem_vtbl.invalidate_buf(buffer.index,
+                                             my_obj->mem_vtbl.user_data);
+        if ( 0 > rc ) {
+            CDBG_ERROR("%s: Cache invalidate failed on buffer index: %d",
+                       __func__,
+                       buffer.index);
+            return rc;
+        }
+    } else {
+        CDBG_ERROR("%s: Cache invalidate op not added", __func__);
+    }
+
+    my_obj->queued_buffer_count++;
+    if(my_obj->queued_buffer_count == 1) {
+        /* Add fd to data poll thread */
+        CDBG_HIGH("%s: Starting poll on stream %p type :%d", __func__, my_obj,my_obj->stream_info->stream_type);
+        rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+                my_obj->my_hdl,
+                my_obj->fd,
+                mm_stream_data_notify,
+                (void*)my_obj,
+                mm_camera_async_call);
+        CDBG_HIGH("%s: Started poll on stream %p type :%d", __func__, my_obj,my_obj->stream_info->stream_type);
+        if (rc < 0) {
+            CDBG_ERROR("%s: add poll fd error", __func__);
+            return rc;
+        }
+    }
+
+    rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+    CDBG("%s: qbuf idx:%d, rc:%d", __func__, buffer.index, rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_request_buf
+ *
+ * DESCRIPTION: This function let kernel know the amount of buffers need to
+ *              be registered via v4l2 ioctl.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_requestbuffers bufreq;
+    uint8_t buf_num = my_obj->buf_num;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+        CDBG_ERROR("%s: buf num %d > max limit %d\n",
+                   __func__, buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+        return -1;
+    }
+
+    memset(&bufreq, 0, sizeof(bufreq));
+    bufreq.count = buf_num;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+      CDBG_ERROR("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+           __func__, my_obj->fd, rc);
+    }
+    CDBG("%s :X rc = %d",__func__,rc);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf
+ *
+ * DESCRIPTION: mapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be mapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_map_buf(mm_stream_t * my_obj,
+                          uint8_t buf_type,
+                          uint32_t frame_idx,
+                          int32_t plane_idx,
+                          int fd,
+                          uint32_t size)
+{
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        CDBG_ERROR("%s: NULL obj of stream/channel/camera", __func__);
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_MAPPING;
+    packet.payload.buf_map.type = buf_type;
+    packet.payload.buf_map.fd = fd;
+    packet.payload.buf_map.size = size;
+    packet.payload.buf_map.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_map.frame_idx = frame_idx;
+    packet.payload.buf_map.plane_idx = plane_idx;
+    return mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+                                  &packet,
+                                  sizeof(cam_sock_packet_t),
+                                  fd);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf
+ *
+ * DESCRIPTION: unmapping stream buffer via domain socket to server
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @buf_type     : type of buffer to be unmapped. could be following values:
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_BUF
+ *                   CAM_MAPPING_BUF_TYPE_STREAM_INFO
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unmap_buf(mm_stream_t * my_obj,
+                            uint8_t buf_type,
+                            uint32_t frame_idx,
+                            int32_t plane_idx)
+{
+    if (NULL == my_obj || NULL == my_obj->ch_obj || NULL == my_obj->ch_obj->cam_obj) {
+        CDBG_ERROR("%s: NULL obj of stream/channel/camera", __func__);
+        return -1;
+    }
+
+    cam_sock_packet_t packet;
+    memset(&packet, 0, sizeof(cam_sock_packet_t));
+    packet.msg_type = CAM_MAPPING_TYPE_FD_UNMAPPING;
+    packet.payload.buf_unmap.type = buf_type;
+    packet.payload.buf_unmap.stream_id = my_obj->server_stream_id;
+    packet.payload.buf_unmap.frame_idx = frame_idx;
+    packet.payload.buf_unmap.plane_idx = plane_idx;
+    return mm_camera_util_sendmsg(my_obj->ch_obj->cam_obj,
+                                  &packet,
+                                  sizeof(cam_sock_packet_t),
+                                  0);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_map_buf_ops
+ *
+ * DESCRIPTION: ops for mapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and mapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @fd           : file descriptor of the buffer
+ *   @size         : size of the buffer
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_map_buf_ops(uint32_t frame_idx,
+                                     int32_t plane_idx,
+                                     int fd,
+                                     uint32_t size,
+                                     void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_map_buf(my_obj,
+                             CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                             frame_idx, plane_idx, fd, size);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unmap_buf_ops
+ *
+ * DESCRIPTION: ops for unmapping stream buffer via domain socket to server.
+ *              This function will be passed to upper layer as part of ops table
+ *              to be used by upper layer when allocating stream buffers and unmapping
+ *              buffers to server via domain socket.
+ *
+ * PARAMETERS :
+ *   @frame_idx    : index of buffer within the stream buffers, only valid if
+ *                   buf_type is CAM_MAPPING_BUF_TYPE_STREAM_BUF or
+ *                   CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF
+ *   @plane_idx    : plane index. If all planes share the same fd,
+ *                   plane_idx = -1; otherwise, plean_idx is the
+ *                   index to plane (0..num_of_planes)
+ *   @userdata     : user data ptr (stream object)
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_stream_unmap_buf_ops(uint32_t frame_idx,
+                                       int32_t plane_idx,
+                                       void *userdata)
+{
+    mm_stream_t *my_obj = (mm_stream_t *)userdata;
+    return mm_stream_unmap_buf(my_obj,
+                               CAM_MAPPING_BUF_TYPE_STREAM_BUF,
+                               frame_idx,
+                               plane_idx);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_init_bufs
+ *
+ * DESCRIPTION: initialize stream buffers needed. This function will request
+ *              buffers needed from upper layer through the mem ops table passed
+ *              during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+    int32_t i, rc = 0;
+    uint8_t *reg_flags = NULL;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* deinit buf if it's not NULL*/
+    if (NULL != my_obj->buf) {
+        mm_stream_deinit_bufs(my_obj);
+    }
+
+    my_obj->map_ops.map_ops = mm_stream_map_buf_ops;
+    my_obj->map_ops.unmap_ops = mm_stream_unmap_buf_ops;
+    my_obj->map_ops.userdata = my_obj;
+
+    rc = my_obj->mem_vtbl.get_bufs(&my_obj->frame_offset,
+                                   &my_obj->buf_num,
+                                   &reg_flags,
+                                   &my_obj->buf,
+                                   &my_obj->map_ops,
+                                   my_obj->mem_vtbl.user_data);
+
+    if (0 != rc) {
+        CDBG_ERROR("%s: Error get buf, rc = %d\n", __func__, rc);
+        return rc;
+    }
+
+    my_obj->buf_status =
+        (mm_stream_buf_status_t *)malloc(sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+
+    if (NULL == my_obj->buf_status) {
+        CDBG_ERROR("%s: No memory for buf_status", __func__);
+        mm_stream_deinit_bufs(my_obj);
+        free(reg_flags);
+        return -1;
+    }
+
+    memset(my_obj->buf_status, 0, sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+    for (i = 0; i < my_obj->buf_num; i++) {
+        my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+        my_obj->buf[i].stream_id = my_obj->my_hdl;
+        my_obj->buf[i].stream_type = my_obj->stream_info->stream_type;
+    }
+
+    free(reg_flags);
+    reg_flags = NULL;
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_deinit_bufs
+ *
+ * DESCRIPTION: return stream buffers to upper layer through the mem ops table
+ *              passed during configuration stage.
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    mm_camera_map_unmap_ops_tbl_t ops_tbl;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (NULL == my_obj->buf) {
+        CDBG("%s: Buf is NULL, no need to deinit", __func__);
+        return rc;
+    }
+
+    /* release bufs */
+    ops_tbl.map_ops = mm_stream_map_buf_ops;
+    ops_tbl.unmap_ops = mm_stream_unmap_buf_ops;
+    ops_tbl.userdata = my_obj;
+
+    rc = my_obj->mem_vtbl.put_bufs(&ops_tbl,
+                                   my_obj->mem_vtbl.user_data);
+
+    free(my_obj->buf);
+    my_obj->buf = NULL;
+    if (my_obj->buf_status != NULL) {
+        free(my_obj->buf_status);
+        my_obj->buf_status = NULL;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf
+ *
+ * DESCRIPTION: register buffers with kernel by calling v4l2 ioctl QBUF for
+ *              each buffer in the stream
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+    int32_t rc = 0;
+    uint8_t i;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    rc = mm_stream_request_buf(my_obj);
+    if (rc != 0) {
+        return rc;
+    }
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    my_obj->queued_buffer_count = 0;
+    for(i = 0; i < my_obj->buf_num; i++){
+        /* check if need to qbuf initially */
+        if (my_obj->buf_status[i].initial_reg_flag) {
+            rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+            if (rc != 0) {
+                CDBG_ERROR("%s: VIDIOC_QBUF rc = %d, errno is %s\n",
+                        __func__, rc, strerror(errno));
+                break;
+            }
+            my_obj->buf_status[i].buf_refcnt = 0;
+            my_obj->buf_status[i].in_kernel = 1;
+        } else {
+            /* the buf is held by upper layer, will not queue into kernel.
+             * add buf reference count */
+            my_obj->buf_status[i].buf_refcnt = 1;
+            my_obj->buf_status[i].in_kernel = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_unreg buf
+ *
+ * DESCRIPTION: unregister all stream buffers from kernel
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+    struct v4l2_requestbuffers bufreq;
+    int32_t i, rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    /* unreg buf to kernel */
+    bufreq.count = 0;
+    bufreq.type  = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    bufreq.memory = V4L2_MEMORY_USERPTR;
+    rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+    if (rc < 0) {
+        CDBG_ERROR("%s: fd=%d, VIDIOC_REQBUFS failed, rc=%d\n",
+              __func__, my_obj->fd, rc);
+    }
+
+    /* reset buf reference count */
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if (NULL != my_obj->buf_status) {
+        for(i = 0; i < my_obj->buf_num; i++){
+            my_obj->buf_status[i].buf_refcnt = 0;
+            my_obj->buf_status[i].in_kernel = 0;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_get_v4l2_fmt
+ *
+ * DESCRIPTION: translate camera image format into FOURCC code
+ *
+ * PARAMETERS :
+ *   @fmt     : camera image format
+ *
+ * RETURN     : FOURCC code for image format
+ *==========================================================================*/
+uint32_t mm_stream_get_v4l2_fmt(cam_format_t fmt)
+{
+    uint32_t val;
+    switch(fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+        val = V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_420_NV21:
+        val = V4L2_PIX_FMT_NV21;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+        val= V4L2_PIX_FMT_SBGGR10;
+        break;
+    case CAM_FORMAT_YUV_422_NV61:
+        val= V4L2_PIX_FMT_NV61;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+        val= V4L2_PIX_FMT_YUYV;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+        val= V4L2_PIX_FMT_YVYU;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+        val= V4L2_PIX_FMT_UYVY;
+        break;
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+        val= V4L2_PIX_FMT_VYUY;
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        val= V4L2_PIX_FMT_NV12;
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+        val= V4L2_PIX_FMT_NV16;
+        break;
+    default:
+        val = 0;
+        CDBG_ERROR("%s: Unknown fmt=%d", __func__, fmt);
+        break;
+    }
+    CDBG("%s: fmt=%d, val =%d", __func__, fmt, val);
+    return val;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_preview
+ *
+ * DESCRIPTION: calculate preview/postview frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_preview(cam_format_t fmt,
+                                      cam_dimension_t *dim,
+                                      cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0, scanline = 0;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = stride * scanline;
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            stride * scanline;
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_NV21_ADRENO:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_32);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline,
+                        CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        stride = PAD_TO_SIZE(dim->width / 2, CAM_PAD_TO_32) * 2;
+        scanline = PAD_TO_SIZE(dim->height / 2, CAM_PAD_TO_32);
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            PAD_TO_SIZE(stride * scanline,
+                        CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_2);
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = stride * scanline;
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len =
+            stride * scanline;
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.mp[2].offset = 0;
+        buf_planes->plane_info.mp[2].len =
+            stride * scanline;
+        buf_planes->plane_info.mp[2].offset_x = 0;
+        buf_planes->plane_info.mp[2].offset_y = 0;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        scanline = dim->height;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len = stride * scanline;
+        buf_planes->plane_info.mp[0].offset_x = 0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        buf_planes->plane_info.mp[1].offset = 0;
+        buf_planes->plane_info.mp[1].len = stride * scanline;
+        buf_planes->plane_info.mp[1].offset_x = 0;
+        buf_planes->plane_info.mp[1].offset_y = 0;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format for preview %d",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_snapshot
+ *
+ * DESCRIPTION: calculate snapshot/postproc frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_snapshot(cam_format_t fmt,
+                                       cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    uint8_t isAFamily = mm_camera_util_chip_is_a_family();
+    int offset_x = 0, offset_y = 0;
+    int stride = 0, scanline = 0;
+
+    if (isAFamily) {
+        stride = dim->width;
+        scanline = PAD_TO_SIZE(dim->height, CAM_PAD_TO_16);
+        offset_x = 0;
+        offset_y = scanline - dim->height;
+        scanline += offset_y; /* double padding */
+    } else {
+        stride = PAD_TO_SIZE(dim->width,
+                             padding->width_padding);
+        scanline = PAD_TO_SIZE(dim->height,
+                               padding->height_padding);
+        offset_x = 0;
+        offset_y = 0;
+    }
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_420_NV12:
+    case CAM_FORMAT_YUV_420_NV21:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].len =
+            PAD_TO_SIZE(stride * scanline,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_420_YV12:
+        /* 3 planes: Y + Cr + Cb */
+        buf_planes->plane_info.num_planes = 3;
+
+        buf_planes->plane_info.mp[0].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        stride = PAD_TO_SIZE(stride / 2, CAM_PAD_TO_16);
+        scanline = scanline / 2;
+        buf_planes->plane_info.mp[1].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.mp[2].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[2].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.mp[2].offset_x = offset_x;
+        buf_planes->plane_info.mp[2].offset_y = offset_y;
+        buf_planes->plane_info.mp[2].stride = stride;
+        buf_planes->plane_info.mp[2].scanline = scanline;
+
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                        buf_planes->plane_info.mp[1].len +
+                        buf_planes->plane_info.mp[2].len,
+                        CAM_PAD_TO_4K);
+        break;
+    case CAM_FORMAT_YUV_422_NV16:
+    case CAM_FORMAT_YUV_422_NV61:
+        /* 2 planes: Y + CbCr */
+        buf_planes->plane_info.num_planes = 2;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[0].offset_x = offset_x;
+        buf_planes->plane_info.mp[0].offset_y = offset_y;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+
+        buf_planes->plane_info.mp[1].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset =
+            PAD_TO_SIZE(offset_x + stride * offset_y,
+                        padding->plane_padding);
+        buf_planes->plane_info.mp[1].offset_x = offset_x;
+        buf_planes->plane_info.mp[1].offset_y = offset_y;
+        buf_planes->plane_info.mp[1].stride = stride;
+        buf_planes->plane_info.mp[1].scanline = scanline;
+
+        buf_planes->plane_info.frame_len = PAD_TO_SIZE(
+            buf_planes->plane_info.mp[0].len + buf_planes->plane_info.mp[1].len,
+            CAM_PAD_TO_4K);
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format for snapshot %d",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_raw
+ *
+ * DESCRIPTION: calculate raw frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @fmt     : image format
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_raw(cam_format_t fmt,
+                                  cam_dimension_t *dim,
+                                  cam_padding_info_t *padding,
+                                  cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    int stride = 0;
+    int scanline = dim->height;
+
+    switch (fmt) {
+    case CAM_FORMAT_YUV_RAW_8BIT_YUYV:
+    case CAM_FORMAT_YUV_RAW_8BIT_YVYU:
+    case CAM_FORMAT_YUV_RAW_8BIT_UYVY:
+    case CAM_FORMAT_YUV_RAW_8BIT_VYUY:
+    case CAM_FORMAT_JPEG_RAW_8BIT:
+    case CAM_FORMAT_META_RAW_8BIT:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline * 2, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN8_8BPP_BGGR:
+        /* 1 plane */
+        /* Every 16 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_10BPP_BGGR:
+        /* Every 12 pixels occupy 16 bytes */
+        stride = (dim->width + 11)/12 * 16;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_QCOM_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_QCOM_12BPP_BGGR:
+        /* Every 10 pixels occupy 16 bytes */
+        stride = (dim->width + 9)/10 * 16;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_10BPP_BGGR:
+        /* Every 64 pixels occupy 80 bytes */
+        stride = PAD_TO_SIZE(
+                PAD_TO_SIZE(dim->width, CAM_PAD_TO_4)*5/4, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_MIPI_12BPP_BGGR:
+        /* Every 32 pixels occupy 48 bytes */
+        stride = PAD_TO_SIZE(PAD_TO_SIZE(dim->width, 2)*3/2, CAM_PAD_TO_16);
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_8BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_10BPP_BGGR:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GBRG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_GRBG:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_RGGB:
+    case CAM_FORMAT_BAYER_IDEAL_RAW_PLAIN16_12BPP_BGGR:
+        /* Every 8 pixels occupy 16 bytes */
+        stride = PAD_TO_SIZE(dim->width, CAM_PAD_TO_8) * 2;
+        buf_planes->plane_info.num_planes = 1;
+        buf_planes->plane_info.mp[0].offset = 0;
+        buf_planes->plane_info.mp[0].len =
+            PAD_TO_SIZE(stride * scanline, padding->plane_padding);
+        buf_planes->plane_info.frame_len =
+            PAD_TO_SIZE(buf_planes->plane_info.mp[0].len, CAM_PAD_TO_4K);
+        buf_planes->plane_info.mp[0].offset_x =0;
+        buf_planes->plane_info.mp[0].offset_y = 0;
+        buf_planes->plane_info.mp[0].stride = stride;
+        buf_planes->plane_info.mp[0].scanline = scanline;
+        break;
+    default:
+        CDBG_ERROR("%s: Invalid cam_format %d for raw stream",
+                   __func__, fmt);
+        rc = -1;
+        break;
+    }
+
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_video
+ *
+ * DESCRIPTION: calculate video frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @dim     : image dimension
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+#ifdef VENUS_PRESENT
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes)
+{
+    int stride = 0, scanline = 0;
+
+    // using Venus
+    stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, dim->width);
+    scanline = VENUS_Y_SCANLINES(COLOR_FMT_NV12, dim->height);
+
+    buf_planes->plane_info.frame_len =
+        VENUS_BUFFER_SIZE(COLOR_FMT_NV12, dim->width, dim->height);
+    buf_planes->plane_info.num_planes = 2;
+    buf_planes->plane_info.mp[0].len = stride * scanline;
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = stride;
+    buf_planes->plane_info.mp[0].scanline = scanline;
+    stride = VENUS_UV_STRIDE(COLOR_FMT_NV12, dim->width);
+    scanline = VENUS_UV_SCANLINES(COLOR_FMT_NV12, dim->height);
+    buf_planes->plane_info.mp[1].len =
+        buf_planes->plane_info.frame_len - buf_planes->plane_info.mp[0].len;
+    buf_planes->plane_info.mp[1].offset = 0;
+    buf_planes->plane_info.mp[1].offset_x =0;
+    buf_planes->plane_info.mp[1].offset_y = 0;
+    buf_planes->plane_info.mp[1].stride = stride;
+    buf_planes->plane_info.mp[1].scanline = scanline;
+
+    return 0;
+}
+#else
+int32_t mm_stream_calc_offset_video(cam_dimension_t *dim,
+                                    cam_stream_buf_plane_info_t *buf_planes)
+{
+    int stride = 0, scanline = 0;
+
+    buf_planes->plane_info.num_planes = 2;
+
+    stride = dim->width;
+    scanline = dim->height;
+    buf_planes->plane_info.mp[0].len =
+        PAD_TO_SIZE(stride * scanline, CAM_PAD_TO_2K);
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = stride;
+    buf_planes->plane_info.mp[0].scanline = scanline;
+
+    stride = dim->width;
+    scanline = dim->height / 2;
+    buf_planes->plane_info.mp[1].len =
+        PAD_TO_SIZE(stride * scanline, CAM_PAD_TO_2K);
+    buf_planes->plane_info.mp[1].offset = 0;
+    buf_planes->plane_info.mp[1].offset_x =0;
+    buf_planes->plane_info.mp[1].offset_y = 0;
+    buf_planes->plane_info.mp[1].stride = stride;
+    buf_planes->plane_info.mp[1].scanline = scanline;
+
+    buf_planes->plane_info.frame_len =
+        PAD_TO_SIZE(buf_planes->plane_info.mp[0].len +
+                    buf_planes->plane_info.mp[1].len,
+                    CAM_PAD_TO_4K);
+
+    return 0;
+}
+#endif
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_metadata
+ *
+ * DESCRIPTION: calculate metadata frame offset based on format and
+ *              padding information
+ *
+ * PARAMETERS :
+ *   @dim     : image dimension
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_metadata(cam_dimension_t *dim,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    buf_planes->plane_info.num_planes = 1;
+    buf_planes->plane_info.mp[0].offset = 0;
+    buf_planes->plane_info.mp[0].len =
+        PAD_TO_SIZE(dim->width * dim->height, padding->plane_padding);
+    buf_planes->plane_info.frame_len =
+        buf_planes->plane_info.mp[0].len;
+
+    buf_planes->plane_info.mp[0].offset_x =0;
+    buf_planes->plane_info.mp[0].offset_y = 0;
+    buf_planes->plane_info.mp[0].stride = dim->width;
+    buf_planes->plane_info.mp[0].scanline = dim->height;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset_postproc
+ *
+ * DESCRIPTION: calculate postprocess frame offset
+ *
+ * PARAMETERS :
+ *   @stream_info: ptr to stream info
+ *   @padding : padding information
+ *   @buf_planes : [out] buffer plane information
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset_postproc(cam_stream_info_t *stream_info,
+                                       cam_padding_info_t *padding,
+                                       cam_stream_buf_plane_info_t *buf_planes)
+{
+    int32_t rc = 0;
+    cam_stream_type_t type = CAM_STREAM_TYPE_DEFAULT;
+    if (stream_info->reprocess_config.pp_type == CAM_OFFLINE_REPROCESS_TYPE) {
+        type = stream_info->reprocess_config.offline.input_stream_type;
+        if (CAM_STREAM_TYPE_DEFAULT == type) {
+            if (buf_planes->plane_info.frame_len == 0) {
+                // take offset from input source
+                *buf_planes = stream_info->reprocess_config.offline.input_buf_planes;
+                return rc;
+            }
+        } else {
+            type = stream_info->reprocess_config.offline.input_stream_type;
+        }
+    } else {
+        type = stream_info->reprocess_config.online.input_stream_type;
+    }
+
+    switch (type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_CALLBACK:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_preview(stream_info->fmt,
+                                           &stream_info->dim,
+                                           buf_planes);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(stream_info->fmt,
+                                            &stream_info->dim,
+                                            padding,
+                                            buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(&stream_info->dim,
+                                         buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(stream_info->fmt,
+                                       &stream_info->dim,
+                                       padding,
+                                       buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&stream_info->dim,
+                                            padding,
+                                            buf_planes);
+        break;
+    default:
+        CDBG_ERROR("%s: not supported for stream type %d",
+                   __func__, type);
+        rc = -1;
+        break;
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_calc_offset
+ *
+ * DESCRIPTION: calculate frame offset based on format and padding information
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_calc_offset(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+
+    cam_dimension_t dim = my_obj->stream_info->dim;
+    if (my_obj->stream_info->pp_config.feature_mask & CAM_QCOM_FEATURE_ROTATION) {
+        if (my_obj->stream_info->pp_config.rotation == ROTATE_90 ||
+            my_obj->stream_info->pp_config.rotation == ROTATE_270) {
+            // rotated by 90 or 270, need to switch width and height
+            dim.width = my_obj->stream_info->dim.height;
+            dim.height = my_obj->stream_info->dim.width;
+        }
+    }
+
+    switch (my_obj->stream_info->stream_type) {
+    case CAM_STREAM_TYPE_PREVIEW:
+    case CAM_STREAM_TYPE_CALLBACK:
+    case CAM_STREAM_TYPE_POSTVIEW:
+        rc = mm_stream_calc_offset_preview(my_obj->stream_info->fmt,
+                                           &dim,
+                                           &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_SNAPSHOT:
+    case CAM_STREAM_TYPE_NON_ZSL_SNAPSHOT:
+        rc = mm_stream_calc_offset_snapshot(my_obj->stream_info->fmt,
+                                            &dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_OFFLINE_PROC:
+        rc = mm_stream_calc_offset_postproc(my_obj->stream_info,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_VIDEO:
+        rc = mm_stream_calc_offset_video(&dim,
+                                         &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_RAW:
+        rc = mm_stream_calc_offset_raw(my_obj->stream_info->fmt,
+                                       &dim,
+                                       &my_obj->padding_info,
+                                       &my_obj->stream_info->buf_planes);
+        break;
+    case CAM_STREAM_TYPE_METADATA:
+        rc = mm_stream_calc_offset_metadata(&dim,
+                                            &my_obj->padding_info,
+                                            &my_obj->stream_info->buf_planes);
+        break;
+    default:
+        CDBG_ERROR("%s: not supported for stream type %d",
+                   __func__, my_obj->stream_info->stream_type);
+        rc = -1;
+        break;
+    }
+
+    my_obj->frame_offset = my_obj->stream_info->buf_planes.plane_info;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_sync_info
+ *
+ * DESCRIPTION: synchronize stream information with server
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ * NOTE       : assume stream info buffer is mapped to server and filled in with
+ *              stream information by upper layer. This call will let server to
+ *              synchornize the stream information with HAL. If server find any
+ *              fields that need to be changed accroding to hardware configuration,
+ *              server will modify corresponding fields so that HAL could know
+ *              about it.
+ *==========================================================================*/
+int32_t mm_stream_sync_info(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    int32_t value = 0;
+    my_obj->stream_info->stream_svr_id = my_obj->server_stream_id;
+    rc = mm_stream_calc_offset(my_obj);
+
+    if (rc == 0) {
+        rc = mm_camera_util_s_ctrl(my_obj->fd,
+                                   CAM_PRIV_STREAM_INFO_SYNC,
+                                   &value);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_set_fmt
+ *
+ * DESCRIPTION: set stream format to kernel via v4l2 ioctl
+ *
+ * PARAMETERS :
+ *   @my_obj  : stream object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+    int32_t rc = 0;
+    struct v4l2_format fmt;
+    struct msm_v4l2_format_data msm_fmt;
+    int i;
+
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    if (my_obj->stream_info->dim.width == 0 ||
+        my_obj->stream_info->dim.height == 0) {
+        CDBG_ERROR("%s:invalid input[w=%d,h=%d,fmt=%d]\n",
+                   __func__,
+                   my_obj->stream_info->dim.width,
+                   my_obj->stream_info->dim.height,
+                   my_obj->stream_info->fmt);
+        return -1;
+    }
+
+    memset(&fmt, 0, sizeof(fmt));
+    memset(&msm_fmt, 0, sizeof(msm_fmt));
+    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    msm_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    msm_fmt.width = my_obj->stream_info->dim.width;
+    msm_fmt.height = my_obj->stream_info->dim.height;
+    msm_fmt.pixelformat = mm_stream_get_v4l2_fmt(my_obj->stream_info->fmt);
+    msm_fmt.num_planes = my_obj->frame_offset.num_planes;
+    for (i = 0; i < msm_fmt.num_planes; i++) {
+        msm_fmt.plane_sizes[i] = my_obj->frame_offset.mp[i].len;
+    }
+
+    memcpy(fmt.fmt.raw_data, &msm_fmt, sizeof(msm_fmt));
+    rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_buf_done
+ *
+ * DESCRIPTION: enqueue buffer back to kernel
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @frame        : frame to be enqueued back to kernel
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+                           mm_camera_buf_def_t *frame)
+{
+    int32_t rc = 0;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->buf_lock);
+    if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+        CDBG("%s: Error Trying to free second time?(idx=%d) count=%d\n",
+                   __func__, frame->buf_idx,
+                   my_obj->buf_status[frame->buf_idx].buf_refcnt);
+        rc = -1;
+    }else{
+        my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+        if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+            CDBG("<DEBUG> : Buf done for buffer:%d, stream:%d", frame->buf_idx, frame->stream_type);
+            rc = mm_stream_qbuf(my_obj, frame);
+            if(rc < 0) {
+                CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) errno=%d, %s\n",
+                           __func__, frame->buf_idx, errno, strerror(errno));
+            } else {
+                my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+            }
+        }else{
+            CDBG("<DEBUG> : Still ref count pending count :%d",
+                 my_obj->buf_status[frame->buf_idx].buf_refcnt);
+            CDBG("<DEBUG> : for buffer:%p:%d",
+                 my_obj, frame->buf_idx);
+        }
+    }
+    pthread_mutex_unlock(&my_obj->buf_lock);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_stream_reg_buf_cb
+ *
+ * DESCRIPTION: Allow other stream to register dataCB at this stream.
+ *
+ * PARAMETERS :
+ *   @my_obj       : stream object
+ *   @val          : ptr to info about the callback to be registered
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+                             mm_stream_data_cb_t *val)
+{
+    int32_t rc = -1;
+    uint8_t i;
+    CDBG("%s: E, my_handle = 0x%x, fd = %d, state = %d",
+         __func__, my_obj->my_hdl, my_obj->fd, my_obj->state);
+
+    pthread_mutex_lock(&my_obj->cb_lock);
+    for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+        if(NULL == my_obj->buf_cb[i].cb) {
+            my_obj->buf_cb[i] = *val;
+            rc = 0;
+            break;
+        }
+    }
+    pthread_mutex_unlock(&my_obj->cb_lock);
+
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..9f0dfe7
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,673 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <cam_semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+    /* poll entries updated */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+    /* poll entries updated */
+    MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC,
+    /* commit updates */
+    MM_CAMERA_PIPE_CMD_COMMIT,
+    /* exit */
+    MM_CAMERA_PIPE_CMD_EXIT,
+    /* max count */
+    MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+    MM_CAMERA_POLL_TASK_STATE_STOPPED,
+    MM_CAMERA_POLL_TASK_STATE_POLL,     /* polling pid in polling state. */
+    MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+    uint8_t cmd;
+    mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig_async
+ *
+ * DESCRIPTION: Asynchoronous call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig_async(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+    int len;
+
+    CDBG("%s: E cmd = %d", __func__,cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+    /* send cmd to worker */
+
+    len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if(len < 1) {
+        CDBG_ERROR("%s: len = %d, errno = %d", __func__, len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    CDBG("%s: begin IN mutex write done, len = %d", __func__, len);
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+
+
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: synchorinzed call to send a command through pipe.
+ *
+ * PARAMETERS :
+ *   @poll_cb      : ptr to poll thread object
+ *   @cmd          : command to be sent
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+                                  uint32_t cmd)
+{
+    /* send through pipe */
+    /* get the mutex */
+    mm_camera_sig_evt_t cmd_evt;
+    int len;
+
+    CDBG("%s: E cmd = %d", __func__,cmd);
+    memset(&cmd_evt, 0, sizeof(cmd_evt));
+    cmd_evt.cmd = cmd;
+    pthread_mutex_lock(&poll_cb->mutex);
+    /* reset the statue to false */
+    poll_cb->status = FALSE;
+    /* send cmd to worker */
+
+    len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+    if(len < 1) {
+        CDBG_ERROR("%s: len = %d, errno = %d", __func__, len, errno);
+        /* Avoid waiting for the signal */
+        pthread_mutex_unlock(&poll_cb->mutex);
+        return 0;
+    }
+    CDBG("%s: begin IN mutex write done, len = %d", __func__, len);
+    /* wait till worker task gives positive signal */
+    if (FALSE == poll_cb->status) {
+        CDBG("%s: wait", __func__);
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    /* done */
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: X", __func__);
+    return 0;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_sig
+ *
+ * DESCRIPTION: signal the status of done
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = TRUE;
+    pthread_cond_signal(&poll_cb->cond_v);
+    CDBG("%s: done, in mutex", __func__);
+    pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_set_state
+ *
+ * DESCRIPTION: set a polling state
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *   @state   : polling state (stopped/polling)
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+                                     mm_camera_poll_task_state_type_t state)
+{
+    poll_cb->state = state;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_proc_pipe
+ *
+ * DESCRIPTION: polling thread routine to process pipe
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+    ssize_t read_len;
+    int i;
+    mm_camera_sig_evt_t cmd_evt;
+    read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+    CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+         __func__, poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+    switch (cmd_evt.cmd) {
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+    case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC:
+        /* we always have index 0 for pipe read */
+        poll_cb->num_fds = 0;
+        poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+        poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+        poll_cb->num_fds++;
+
+        if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type) {
+            if (poll_cb->poll_entries[0].fd > 0) {
+                /* fd is valid, we update poll_fds */
+                poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+                poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                poll_cb->num_fds++;
+            }
+        } else if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+            for(i = 0; i < MAX_STREAM_NUM_IN_BUNDLE; i++) {
+                if(poll_cb->poll_entries[i].fd > 0) {
+                    /* fd is valid, we update poll_fds to this fd */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+                    poll_cb->num_fds++;
+                } else {
+                    /* fd is invalid, we set the entry to -1 to prevent polling.
+                     * According to spec, polling will not poll on entry with fd=-1.
+                     * If this is not the case, we need to skip these invalid fds
+                     * when updating this array.
+                     * We still keep fd=-1 in this array because this makes easier to
+                     * map cb associated with this fd once incoming data avail by directly
+                     * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+                    poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+                    poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+                    poll_cb->num_fds++;
+                }
+            }
+        }
+        if (cmd_evt.cmd != MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC)
+            mm_camera_poll_sig_done(poll_cb);
+        break;
+
+    case MM_CAMERA_PIPE_CMD_COMMIT:
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    case MM_CAMERA_PIPE_CMD_EXIT:
+    default:
+        mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+        mm_camera_poll_sig_done(poll_cb);
+        break;
+    }
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_fn
+ *
+ * DESCRIPTION: polling thread routine
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+    int rc = 0, i;
+
+    CDBG("%s: poll type = %d, num_fd = %d poll_cb = %p\n",
+         __func__, poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+    do {
+         for(i = 0; i < poll_cb->num_fds; i++) {
+            poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+         }
+
+         rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+         if(rc > 0) {
+            if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+                (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+                /* if we have data on pipe, we only process pipe in this iteration */
+                CDBG("%s: cmd received on pipe\n", __func__);
+                mm_camera_poll_proc_pipe(poll_cb);
+            } else {
+                for(i=1; i<poll_cb->num_fds; i++) {
+                    /* Checking for ctrl events */
+                    if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+                        (poll_cb->poll_fds[i].revents & POLLPRI)) {
+                        CDBG("%s: mm_camera_evt_notify\n", __func__);
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+
+                    if ((MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) &&
+                        (poll_cb->poll_fds[i].revents & POLLIN) &&
+                        (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+                        CDBG("%s: mm_stream_data_notify\n", __func__);
+                        if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+                            poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+                        }
+                    }
+                }
+            }
+        } else {
+            /* in error case sleep 10 us and then continue. hard coded here */
+            usleep(10);
+            continue;
+        }
+    } while (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL);
+    return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: polling thread entry function
+ *
+ * PARAMETERS :
+ *   @data    : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+static void *mm_camera_poll_thread(void *data)
+{
+    prctl(PR_SET_NAME, (unsigned long)"mm_cam_poll_th", 0, 0, 0);
+    mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+    /* add pipe read fd into poll first */
+    poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+    mm_camera_poll_sig_done(poll_cb);
+    mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+    return mm_camera_poll_fn(poll_cb);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread
+ *
+ * DESCRIPTION: notify the polling thread that entries for polling fd have
+ *              been updated
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+    /* send poll entries updated signal to poll thread */
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_commit_updates
+ *
+ * DESCRIPTION: sync with all previously pending async updates
+ *
+ * PARAMETERS :
+ *   @poll_cb : ptr to poll thread object
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_commit_updates(mm_camera_poll_thread_t * poll_cb)
+{
+    return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_COMMIT);
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_add_poll_fd
+ *
+ * DESCRIPTION: add a new fd into polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *   @fd        : file descriptor need to be added into polling thread
+ *   @notify_cb : callback function to handle if any notify from fd
+ *   @userdata  : user data ptr
+ *   @call_type : Whether its Synchronous or Asynchronous call
+ *
+ * RETURN     : none
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          int32_t fd,
+                                          mm_camera_poll_notify_t notify_cb,
+                                          void* userdata,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if (MAX_STREAM_NUM_IN_BUNDLE > idx) {
+        poll_cb->poll_entries[idx].fd = fd;
+        poll_cb->poll_entries[idx].handler = handler;
+        poll_cb->poll_entries[idx].notify_cb = notify_cb;
+        poll_cb->poll_entries[idx].user_data = userdata;
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call ) {
+            rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+        }
+    } else {
+        CDBG_ERROR("%s: invalid handler %d (%d)",
+                   __func__, handler, idx);
+    }
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : mm_camera_poll_thread_del_poll_fd
+ *
+ * DESCRIPTION: delete a fd from polling thread
+ *
+ * PARAMETERS :
+ *   @poll_cb   : ptr to poll thread object
+ *   @handler   : stream handle if channel data polling thread,
+ *                0 if event polling thread
+ *
+ * RETURN     : int32_t type of status
+ *              0  -- success
+ *              -1 -- failure
+ *==========================================================================*/
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+                                          uint32_t handler,
+                                          mm_camera_call_type_t call_type)
+{
+    int32_t rc = -1;
+    uint8_t idx = 0;
+
+    if (MM_CAMERA_POLL_TYPE_DATA == poll_cb->poll_type) {
+        /* get stream idx from handler if CH type */
+        idx = mm_camera_util_get_index_by_handler(handler);
+    } else {
+        /* for EVT type, only idx=0 is valid */
+        idx = 0;
+    }
+
+    if ((MAX_STREAM_NUM_IN_BUNDLE > idx) &&
+        (handler == poll_cb->poll_entries[idx].handler)) {
+        /* reset poll entry */
+        poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+        poll_cb->poll_entries[idx].handler = 0;
+        poll_cb->poll_entries[idx].notify_cb = NULL;
+
+        /* send poll entries updated signal to poll thread */
+        if (call_type == mm_camera_sync_call ) {
+            rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+        } else {
+            rc = mm_camera_poll_sig_async(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED_ASYNC );
+        }
+    } else {
+        CDBG_ERROR("%s: invalid handler %d (%d)",
+                   __func__, handler, idx);
+        return -1;
+    }
+
+    return rc;
+}
+
+static pthread_mutex_t constr_destr_lock = PTHREAD_MUTEX_INITIALIZER;
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+                                     mm_camera_poll_thread_type_t poll_type)
+{
+    int32_t rc = 0;
+
+    pthread_mutex_lock(&constr_destr_lock);
+
+    poll_cb->poll_type = poll_type;
+
+    poll_cb->pfds[0] = 0;
+    poll_cb->pfds[1] = 0;
+    rc = pipe(poll_cb->pfds);
+    if(rc < 0) {
+        CDBG_ERROR("%s: pipe open rc=%d\n", __func__, rc);
+        pthread_mutex_unlock(&constr_destr_lock);
+        return -1;
+    }
+
+    poll_cb->timeoutms = -1;  /* Infinite seconds */
+
+    CDBG("%s: poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+        __func__, poll_cb->poll_type,
+        poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+    pthread_mutex_init(&poll_cb->mutex, NULL);
+    pthread_cond_init(&poll_cb->cond_v, NULL);
+
+    /* launch the thread */
+    pthread_mutex_lock(&poll_cb->mutex);
+    poll_cb->status = 0;
+    pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+    if(!poll_cb->status) {
+        pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+    }
+    pthread_mutex_unlock(&poll_cb->mutex);
+    CDBG("%s: End",__func__);
+    pthread_mutex_unlock(&constr_destr_lock);
+    return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+    int32_t rc = 0;
+
+    pthread_mutex_lock(&constr_destr_lock);
+
+    if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+        CDBG_ERROR("%s: err, poll thread is not running.\n", __func__);
+        goto done;
+    }
+
+    /* send exit signal to poll thread */
+    mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+    /* wait until poll thread exits */
+    if (pthread_join(poll_cb->pid, NULL) != 0) {
+        CDBG_ERROR("%s: pthread dead already\n", __func__);
+    }
+
+    /* close pipe */
+    if(poll_cb->pfds[0]) {
+        close(poll_cb->pfds[0]);
+    }
+    if(poll_cb->pfds[1]) {
+        close(poll_cb->pfds[1]);
+    }
+
+    pthread_mutex_destroy(&poll_cb->mutex);
+    pthread_cond_destroy(&poll_cb->cond_v);
+    memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+done:
+    pthread_mutex_unlock(&constr_destr_lock);
+    return rc;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+    int running = 1;
+    int ret;
+    mm_camera_cmd_thread_t *cmd_thread =
+                (mm_camera_cmd_thread_t *)data;
+    mm_camera_cmdcb_t* node = NULL;
+
+    do {
+        do {
+            ret = cam_sem_wait(&cmd_thread->cmd_sem);
+            if (ret != 0 && errno != EINVAL) {
+                CDBG_ERROR("%s: cam_sem_wait error (%s)",
+                           __func__, strerror(errno));
+                return NULL;
+            }
+        } while (ret != 0);
+
+        /* we got notified about new cmd avail in cmd queue */
+        node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        while (node != NULL) {
+            switch (node->cmd_type) {
+            case MM_CAMERA_CMD_TYPE_EVT_CB:
+            case MM_CAMERA_CMD_TYPE_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_SUPER_BUF_DATA_CB:
+            case MM_CAMERA_CMD_TYPE_CONFIG_NOTIFY:
+            case MM_CAMERA_CMD_TYPE_FLUSH_QUEUE:
+                if (NULL != cmd_thread->cb) {
+                    cmd_thread->cb(node, cmd_thread->user_data);
+                }
+                break;
+            case MM_CAMERA_CMD_TYPE_EXIT:
+            default:
+                running = 0;
+                break;
+            }
+            free(node);
+            node = (mm_camera_cmdcb_t*)cam_queue_deq(&cmd_thread->cmd_queue);
+        } /* (node != NULL) */
+    } while (running);
+    return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+                                    mm_camera_cmd_cb_t cb,
+                                    void* user_data)
+{
+    int32_t rc = 0;
+
+    cam_sem_init(&cmd_thread->cmd_sem, 0);
+    cam_queue_init(&cmd_thread->cmd_queue);
+    cmd_thread->cb = cb;
+    cmd_thread->user_data = user_data;
+
+    /* launch the thread */
+    pthread_create(&cmd_thread->cmd_pid,
+                   NULL,
+                   mm_camera_cmd_thread,
+                   (void *)cmd_thread);
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_name(const char* name)
+{
+    int32_t rc = 0;
+    /* name the thread */
+    prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return rc;
+}
+
+
+int32_t mm_camera_cmd_thread_stop(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+    if (NULL == node) {
+        CDBG_ERROR("%s: No memory for mm_camera_cmdcb_t", __func__);
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_camera_cmdcb_t));
+    node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+    cam_queue_enq(&cmd_thread->cmd_queue, node);
+    cam_sem_post(&cmd_thread->cmd_sem);
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+        CDBG("%s: pthread dead already\n", __func__);
+    }
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_destroy(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    cam_queue_deinit(&cmd_thread->cmd_queue);
+    cam_sem_destroy(&cmd_thread->cmd_sem);
+    memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+    return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+    int32_t rc = 0;
+    rc = mm_camera_cmd_thread_stop(cmd_thread);
+    if (0 == rc) {
+        rc = mm_camera_cmd_thread_destroy(cmd_thread);
+    }
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/Android.mk b/camera/QCamera2/stack/mm-camera-test/Android.mk
new file mode 100644
index 0000000..a3e99db
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/Android.mk
@@ -0,0 +1,76 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH:=$(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS:= \
+        -DAMSS_VERSION=$(AMSS_VERSION) \
+        $(mmcamera_debug_defines) \
+        $(mmcamera_debug_cflags) \
+        $(USE_SERVER_TREE)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_
+
+LOCAL_SRC_FILES:= \
+        src/mm_qcamera_app.c \
+        src/mm_qcamera_unit_test.c \
+        src/mm_qcamera_video.c \
+        src/mm_qcamera_preview.c \
+        src/mm_qcamera_snapshot.c \
+        src/mm_qcamera_rdi.c
+#        src/mm_qcamera_dual_test.c \
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+        frameworks/native/include/media/openmax \
+        $(LOCAL_PATH)/../common \
+        $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+        $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CP_MM_HEAP_ID
+ifeq ($(call is-board-platform,msm8974),true)
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(call is-board-platform,msm8226),true)
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_MM_HEAP
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=9
+else ifeq ($(call is-board-platform,msm8960),true)
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else ifeq ($(call is-chipset-prefix-in-board-platform,msm8660),true)
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP # Don't Care
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID # EBI
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+else
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP # Don't Care
+        LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+        LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+        LOCAL_CFLAGS += -DNUM_RECORDING_BUFFERS=5
+endif
+LOCAL_CFLAGS += -Wall -Werror
+
+LOCAL_SHARED_LIBRARIES:= \
+         libcutils liblog libdl
+
+LOCAL_MODULE:= mm-qcamera-app
+
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
new file mode 100644
index 0000000..bf85667
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_app.h
@@ -0,0 +1,261 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_APP_H__
+#define __MM_QCAMERA_APP_H__
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+
+#include "mm_camera_interface.h"
+#include "mm_jpeg_interface.h"
+
+#define MM_QCAMERA_APP_INTERATION 1
+
+#define MM_APP_MAX_DUMP_FRAME_NUM 1000
+
+#define PREVIEW_BUF_NUM 7
+#define VIDEO_BUF_NUM 7
+#define ISP_PIX_BUF_NUM 9
+#define STATS_BUF_NUM 4
+#define RDI_BUF_NUM 8
+
+#define DEFAULT_PREVIEW_FORMAT    CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_PREVIEW_WIDTH     800
+#define DEFAULT_PREVIEW_HEIGHT    480
+#define DEFAULT_PREVIEW_PADDING   CAM_PAD_TO_WORD
+#define DEFAULT_VIDEO_FORMAT      CAM_FORMAT_YUV_420_NV12
+#define DEFAULT_VIDEO_WIDTH       800
+#define DEFAULT_VIDEO_HEIGHT      480
+#define DEFAULT_VIDEO_PADDING     CAM_PAD_TO_2K
+#define DEFAULT_SNAPSHOT_FORMAT   CAM_FORMAT_YUV_420_NV21
+#define DEFAULT_SNAPSHOT_WIDTH    1280
+#define DEFAULT_SNAPSHOT_HEIGHT   960
+#define DEFAULT_SNAPSHOT_PADDING  CAM_PAD_TO_WORD
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+typedef enum {
+    MM_CAMERA_OK,
+    MM_CAMERA_E_GENERAL,
+    MM_CAMERA_E_NO_MEMORY,
+    MM_CAMERA_E_NOT_SUPPORTED,
+    MM_CAMERA_E_INVALID_INPUT,
+    MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+    MM_CAMERA_E_ENCODE,
+    MM_CAMERA_E_BUFFER_REG,
+    MM_CAMERA_E_PMEM_ALLOC,
+    MM_CAMERA_E_CAPTURE_FAILED,
+    MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+} mm_camera_status_type_t;
+
+typedef enum {
+    MM_CHANNEL_TYPE_ZSL,      /* preview, and snapshot main */
+    MM_CHANNEL_TYPE_CAPTURE,  /* snapshot main, and postview */
+    MM_CHANNEL_TYPE_PREVIEW,  /* preview only */
+    MM_CHANNEL_TYPE_SNAPSHOT, /* snapshot main only */
+    MM_CHANNEL_TYPE_VIDEO,    /* video only */
+    MM_CHANNEL_TYPE_RDI,      /* rdi only */
+    MM_CHANNEL_TYPE_MAX
+} mm_camera_channel_type_t;
+
+typedef struct {
+    int                     fd;
+    int                     main_ion_fd;
+    struct ion_handle *     handle;
+    uint32_t                size;
+    void *                  data;
+} mm_camera_app_meminfo_t;
+
+typedef struct {
+    mm_camera_buf_def_t buf;
+    mm_camera_app_meminfo_t mem_info;
+} mm_camera_app_buf_t;
+
+typedef struct {
+    uint32_t s_id;
+    mm_camera_stream_config_t s_config;
+    cam_frame_len_offset_t offset;
+    uint8_t num_of_bufs;
+    mm_camera_app_buf_t s_bufs[MM_CAMERA_MAX_NUM_FRAMES];
+    mm_camera_app_buf_t s_info_buf;
+} mm_camera_stream_t;
+
+typedef struct {
+    uint32_t ch_id;
+    uint8_t num_streams;
+    mm_camera_stream_t streams[MAX_STREAM_NUM_IN_BUNDLE];
+} mm_camera_channel_t;
+
+typedef struct {
+    mm_camera_vtbl_t *cam;
+    uint8_t num_channels;
+    mm_camera_channel_t channels[MM_CHANNEL_TYPE_MAX];
+    mm_jpeg_ops_t jpeg_ops;
+    uint32_t jpeg_hdl;
+    mm_camera_app_buf_t cap_buf;
+    mm_camera_app_buf_t parm_buf;
+
+    uint32_t current_jpeg_sess_id;
+    mm_camera_super_buf_t* current_job_frames;
+    uint32_t current_job_id;
+    mm_camera_app_buf_t jpeg_buf;
+} mm_camera_test_obj_t;
+
+typedef struct {
+  void *ptr;
+  void* ptr_jpeg;
+
+  uint8_t (*get_num_of_cameras) ();
+  mm_camera_vtbl_t *(*mm_camera_open) (uint8_t camera_idx);
+  uint32_t (*jpeg_open) (mm_jpeg_ops_t *ops);
+} hal_interface_lib_t;
+
+typedef struct {
+    uint8_t num_cameras;
+    hal_interface_lib_t hal_lib;
+} mm_camera_app_t;
+
+typedef int (*mm_app_test_t) (mm_camera_app_t *cam_apps);
+typedef struct {
+    mm_app_test_t f;
+    int r;
+} mm_app_tc_t;
+
+extern int mm_app_unit_test_entry(mm_camera_app_t *cam_app);
+extern int mm_app_dual_test_entry(mm_camera_app_t *cam_app);
+extern void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+                              char *name,
+                              char *ext,
+                              int frame_idx);
+extern void mm_app_dump_jpeg_frame(const void * data,
+                                   uint32_t size,
+                                   char* name,
+                                   char* ext,
+                                   int index);
+extern int mm_camera_app_timedwait(uint8_t seconds);
+extern int mm_camera_app_wait();
+extern void mm_camera_app_done();
+extern int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+                             cam_frame_len_offset_t *frame_offset_info,
+                             uint8_t num_bufs,
+                             uint8_t is_streambuf);
+extern int mm_app_release_bufs(uint8_t num_bufs,
+                               mm_camera_app_buf_t* app_bufs);
+extern int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+                                 uint8_t *num_bufs,
+                                 uint8_t **initial_reg_flag,
+                                 mm_camera_buf_def_t **bufs,
+                                 mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                 void *user_data);
+extern int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                       void *user_data);
+extern int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info,
+                            unsigned int cmd);
+extern int32_t mm_app_stream_clean_invalidate_buf(int index, void *user_data);
+extern int32_t mm_app_stream_invalidate_buf(int index, void *user_data);
+extern int mm_app_open(mm_camera_app_t *cam_app,
+                       uint8_t cam_id,
+                       mm_camera_test_obj_t *test_obj);
+extern int mm_app_close(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_channel(
+                                         mm_camera_test_obj_t *test_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_channel_attr_t *attr,
+                                         mm_camera_buf_notify_t channel_cb,
+                                         void *userdata);
+extern int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+                              mm_camera_channel_t *channel);
+extern mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+                                              mm_camera_channel_t *channel);
+extern int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+                             mm_camera_channel_t *channel,
+                             mm_camera_stream_t *stream);
+extern int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel,
+                                mm_camera_stream_t *stream,
+                                mm_camera_stream_config_t *config);
+extern int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel);
+extern int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+                               mm_camera_channel_t *channel);
+extern mm_camera_channel_t *mm_app_get_channel_by_type(
+                                    mm_camera_test_obj_t *test_obj,
+                                    mm_camera_channel_type_t ch_type);
+
+extern int mm_app_start_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj);
+extern mm_camera_channel_t * mm_app_add_preview_channel(
+                                mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+                                       mm_camera_channel_t *channel);
+extern mm_camera_channel_t * mm_app_add_snapshot_channel(
+                                               mm_camera_test_obj_t *test_obj);
+extern mm_camera_stream_t * mm_app_add_snapshot_stream(
+                                                mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst);
+extern int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_record(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+                                uint8_t num_snapshots);
+extern int mm_app_stop_capture(mm_camera_test_obj_t *test_obj);
+extern int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst);
+extern int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj);
+
+#endif /* __MM_QCAMERA_APP_H__ */
+
+
+
+
+
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
new file mode 100755
index 0000000..bb5ab81
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/inc/mm_qcamera_dbg.h
@@ -0,0 +1,71 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_QCAMERA_DBG_H__
+#define __MM_QCAMERA_DBG_H__
+
+//#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-test"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+  #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-camera-test"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...)  ALOGE(fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_QCAMERA_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
new file mode 100644
index 0000000..80398e4
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_app.c
@@ -0,0 +1,801 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <dlfcn.h>
+#include <linux/msm_ion.h>
+#include <sys/mman.h>
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static pthread_mutex_t app_mutex;
+static int thread_status = 0;
+static pthread_cond_t app_cond_v;
+
+#define MM_QCAMERA_APP_NANOSEC_SCALE 1000000000
+
+int mm_camera_app_timedwait(uint8_t seconds)
+{
+    int rc = 0;
+    pthread_mutex_lock(&app_mutex);
+    if(FALSE == thread_status) {
+        struct timespec tw;
+        memset(&tw, 0, sizeof tw);
+        tw.tv_sec = 0;
+        tw.tv_nsec = time(0) + seconds * MM_QCAMERA_APP_NANOSEC_SCALE;
+
+        rc = pthread_cond_timedwait(&app_cond_v, &app_mutex,&tw);
+        thread_status = FALSE;
+    }
+    pthread_mutex_unlock(&app_mutex);
+    return rc;
+}
+
+int mm_camera_app_wait()
+{
+    int rc = 0;
+    pthread_mutex_lock(&app_mutex);
+    if(FALSE == thread_status){
+        pthread_cond_wait(&app_cond_v, &app_mutex);
+        thread_status = FALSE;
+    }
+    pthread_mutex_unlock(&app_mutex);
+    return rc;
+}
+
+void mm_camera_app_done()
+{
+  pthread_mutex_lock(&app_mutex);
+  thread_status = TRUE;
+  pthread_cond_signal(&app_cond_v);
+  pthread_mutex_unlock(&app_mutex);
+}
+
+int mm_app_load_hal(mm_camera_app_t *my_cam_app)
+{
+    memset(&my_cam_app->hal_lib, 0, sizeof(hal_interface_lib_t));
+    my_cam_app->hal_lib.ptr = dlopen("libmmcamera_interface.so", RTLD_NOW);
+    my_cam_app->hal_lib.ptr_jpeg = dlopen("libmmjpeg_interface.so", RTLD_NOW);
+    if (!my_cam_app->hal_lib.ptr || !my_cam_app->hal_lib.ptr_jpeg) {
+        CDBG_ERROR("%s Error opening HAL library %s\n", __func__, dlerror());
+        return -MM_CAMERA_E_GENERAL;
+    }
+    *(void **)&(my_cam_app->hal_lib.get_num_of_cameras) =
+        dlsym(my_cam_app->hal_lib.ptr, "get_num_of_cameras");
+    *(void **)&(my_cam_app->hal_lib.mm_camera_open) =
+        dlsym(my_cam_app->hal_lib.ptr, "camera_open");
+    *(void **)&(my_cam_app->hal_lib.jpeg_open) =
+        dlsym(my_cam_app->hal_lib.ptr_jpeg, "jpeg_open");
+
+    if (my_cam_app->hal_lib.get_num_of_cameras == NULL ||
+        my_cam_app->hal_lib.mm_camera_open == NULL ||
+        my_cam_app->hal_lib.jpeg_open == NULL) {
+        CDBG_ERROR("%s Error loading HAL sym %s\n", __func__, dlerror());
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    my_cam_app->num_cameras = my_cam_app->hal_lib.get_num_of_cameras();
+    CDBG("%s: num_cameras = %d\n", __func__, my_cam_app->num_cameras);
+
+    return MM_CAMERA_OK;
+}
+
+int mm_app_allocate_ion_memory(mm_camera_app_buf_t *buf, int ion_type)
+{
+    int rc = MM_CAMERA_OK;
+    struct ion_handle_data handle_data;
+    struct ion_allocation_data alloc;
+    struct ion_fd_data ion_info_fd;
+    int main_ion_fd = 0;
+    void *data = NULL;
+
+    main_ion_fd = open("/dev/ion", O_RDONLY);
+    if (main_ion_fd <= 0) {
+        CDBG_ERROR("Ion dev open failed %s\n", strerror(errno));
+        goto ION_OPEN_FAILED;
+    }
+
+    memset(&alloc, 0, sizeof(alloc));
+    alloc.len = buf->mem_info.size;
+    /* to make it page size aligned */
+    alloc.len = (alloc.len + 4095) & (~4095);
+    alloc.align = 4096;
+    alloc.flags = ION_FLAG_CACHED;
+    alloc.heap_id_mask = ion_type;
+    rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &alloc);
+    if (rc < 0) {
+        CDBG_ERROR("ION allocation failed\n");
+        goto ION_ALLOC_FAILED;
+    }
+
+    memset(&ion_info_fd, 0, sizeof(ion_info_fd));
+    ion_info_fd.handle = alloc.handle;
+    rc = ioctl(main_ion_fd, ION_IOC_SHARE, &ion_info_fd);
+    if (rc < 0) {
+        CDBG_ERROR("ION map failed %s\n", strerror(errno));
+        goto ION_MAP_FAILED;
+    }
+
+    data = mmap(NULL,
+                alloc.len,
+                PROT_READ  | PROT_WRITE,
+                MAP_SHARED,
+                ion_info_fd.fd,
+                0);
+
+    if (data == MAP_FAILED) {
+        CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+        goto ION_MAP_FAILED;
+    }
+    buf->mem_info.main_ion_fd = main_ion_fd;
+    buf->mem_info.fd = ion_info_fd.fd;
+    buf->mem_info.handle = ion_info_fd.handle;
+    buf->mem_info.size = alloc.len;
+    buf->mem_info.data = data;
+    return MM_CAMERA_OK;
+
+ION_MAP_FAILED:
+    memset(&handle_data, 0, sizeof(handle_data));
+    handle_data.handle = ion_info_fd.handle;
+    ioctl(main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+    close(main_ion_fd);
+ION_OPEN_FAILED:
+    return -MM_CAMERA_E_GENERAL;
+}
+
+int mm_app_deallocate_ion_memory(mm_camera_app_buf_t *buf)
+{
+  struct ion_handle_data handle_data;
+  int rc = 0;
+
+  rc = munmap(buf->mem_info.data, buf->mem_info.size);
+
+  if (buf->mem_info.fd > 0) {
+      close(buf->mem_info.fd);
+      buf->mem_info.fd = 0;
+  }
+
+  if (buf->mem_info.main_ion_fd > 0) {
+      memset(&handle_data, 0, sizeof(handle_data));
+      handle_data.handle = buf->mem_info.handle;
+      ioctl(buf->mem_info.main_ion_fd, ION_IOC_FREE, &handle_data);
+      close(buf->mem_info.main_ion_fd);
+      buf->mem_info.main_ion_fd = 0;
+  }
+  return rc;
+}
+
+/* cmd = ION_IOC_CLEAN_CACHES, ION_IOC_INV_CACHES, ION_IOC_CLEAN_INV_CACHES */
+int mm_app_cache_ops(mm_camera_app_meminfo_t *mem_info,
+                     unsigned int cmd)
+{
+    struct ion_flush_data cache_inv_data;
+    struct ion_custom_data custom_data;
+    int ret = MM_CAMERA_OK;
+
+#ifdef USE_ION
+    if (NULL == mem_info) {
+        CDBG_ERROR("%s: mem_info is NULL, return here", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    memset(&cache_inv_data, 0, sizeof(cache_inv_data));
+    memset(&custom_data, 0, sizeof(custom_data));
+    cache_inv_data.vaddr = mem_info->data;
+    cache_inv_data.fd = mem_info->fd;
+    cache_inv_data.handle = mem_info->handle;
+    cache_inv_data.length = mem_info->size;
+    custom_data.cmd = cmd;
+    custom_data.arg = (unsigned long)&cache_inv_data;
+
+    CDBG("addr = %p, fd = %d, handle = %p length = %d, ION Fd = %d",
+         cache_inv_data.vaddr, cache_inv_data.fd,
+         cache_inv_data.handle, cache_inv_data.length,
+         mem_info->main_ion_fd);
+    if(mem_info->main_ion_fd > 0) {
+        if(ioctl(mem_info->main_ion_fd, ION_IOC_CUSTOM, &custom_data) < 0) {
+            ALOGE("%s: Cache Invalidate failed\n", __func__);
+            ret = -MM_CAMERA_E_GENERAL;
+        }
+    }
+#endif
+
+    return ret;
+}
+
+void mm_app_dump_frame(mm_camera_buf_def_t *frame,
+                       char *name,
+                       char *ext,
+                       int frame_idx)
+{
+    char file_name[64];
+    int file_fd;
+    int i;
+    if ( frame != NULL) {
+        snprintf(file_name, sizeof(file_name), "/data/%s_%d.%s", name, frame_idx, ext);
+        file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+        if (file_fd < 0) {
+            CDBG_ERROR("%s: cannot open file %s \n", __func__, file_name);
+        } else {
+            for (i = 0; i < frame->num_planes; i++) {
+                write(file_fd,
+                      (uint8_t *)frame->buffer + frame->planes[i].data_offset,
+                      frame->planes[i].length);
+            }
+
+            close(file_fd);
+            CDBG("dump %s", file_name);
+        }
+    }
+}
+
+void mm_app_dump_jpeg_frame(const void * data, uint32_t size, char* name, char* ext, int index)
+{
+    char buf[32];
+    int file_fd;
+    if ( data != NULL) {
+        snprintf(buf, sizeof(buf), "/data/%s_%d.%s", name, index, ext);
+        CDBG("%s: %s size =%d, jobId=%d", __func__, buf, size, index);
+        file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+        write(file_fd, data, size);
+        close(file_fd);
+    }
+}
+
+int mm_app_alloc_bufs(mm_camera_app_buf_t* app_bufs,
+                      cam_frame_len_offset_t *frame_offset_info,
+                      uint8_t num_bufs,
+                      uint8_t is_streambuf)
+{
+    int i, j;
+    int ion_type = 0x1 << CAMERA_ION_FALLBACK_HEAP_ID;
+
+    if (is_streambuf) {
+        ion_type |= 0x1 << CAMERA_ION_HEAP_ID;
+    }
+
+    for (i = 0; i < num_bufs ; i++) {
+        app_bufs[i].mem_info.size = frame_offset_info->frame_len;
+        mm_app_allocate_ion_memory(&app_bufs[i], ion_type);
+
+        app_bufs[i].buf.buf_idx = i;
+        app_bufs[i].buf.num_planes = frame_offset_info->num_planes;
+        app_bufs[i].buf.fd = app_bufs[i].mem_info.fd;
+        app_bufs[i].buf.frame_len = app_bufs[i].mem_info.size;
+        app_bufs[i].buf.buffer = app_bufs[i].mem_info.data;
+        app_bufs[i].buf.mem_info = (void *)&app_bufs[i].mem_info;
+
+        /* Plane 0 needs to be set seperately. Set other planes
+             * in a loop. */
+        app_bufs[i].buf.planes[0].length = frame_offset_info->mp[0].len;
+        app_bufs[i].buf.planes[0].m.userptr = app_bufs[i].buf.fd;
+        app_bufs[i].buf.planes[0].data_offset = frame_offset_info->mp[0].offset;
+        app_bufs[i].buf.planes[0].reserved[0] = 0;
+        for (j = 1; j < frame_offset_info->num_planes; j++) {
+            app_bufs[i].buf.planes[j].length = frame_offset_info->mp[j].len;
+            app_bufs[i].buf.planes[j].m.userptr = app_bufs[i].buf.fd;
+            app_bufs[i].buf.planes[j].data_offset = frame_offset_info->mp[j].offset;
+            app_bufs[i].buf.planes[j].reserved[0] =
+                app_bufs[i].buf.planes[j-1].reserved[0] +
+                app_bufs[i].buf.planes[j-1].length;
+        }
+    }
+    CDBG("%s: X", __func__);
+    return MM_CAMERA_OK;
+}
+
+int mm_app_release_bufs(uint8_t num_bufs,
+                        mm_camera_app_buf_t* app_bufs)
+{
+    int i, rc = MM_CAMERA_OK;
+
+    CDBG("%s: E", __func__);
+
+    for (i = 0; i < num_bufs; i++) {
+        rc = mm_app_deallocate_ion_memory(&app_bufs[i]);
+    }
+    memset(app_bufs, 0, num_bufs * sizeof(mm_camera_app_buf_t));
+    CDBG("%s: X", __func__);
+    return rc;
+}
+
+int mm_app_stream_initbuf(cam_frame_len_offset_t *frame_offset_info,
+                          uint8_t *num_bufs,
+                          uint8_t **initial_reg_flag,
+                          mm_camera_buf_def_t **bufs,
+                          mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                          void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    mm_camera_buf_def_t *pBufs = NULL;
+    uint8_t *reg_flags = NULL;
+    int i, rc;
+
+    stream->offset = *frame_offset_info;
+    CDBG("%s: alloc buf for stream_id %d, len=%d",
+         __func__, stream->s_id, frame_offset_info->frame_len);
+
+    pBufs = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t) * stream->num_of_bufs);
+    reg_flags = (uint8_t *)malloc(sizeof(uint8_t) * stream->num_of_bufs);
+    if (pBufs == NULL || reg_flags == NULL) {
+        CDBG_ERROR("%s: No mem for bufs", __func__);
+        if (pBufs != NULL) {
+            free(pBufs);
+        }
+        if (reg_flags != NULL) {
+            free(reg_flags);
+        }
+        return -1;
+    }
+
+    rc = mm_app_alloc_bufs(&stream->s_bufs[0],
+                           frame_offset_info,
+                           stream->num_of_bufs,
+                           1);
+
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: mm_stream_alloc_bufs err = %d", __func__, rc);
+        free(pBufs);
+        free(reg_flags);
+        return rc;
+    }
+
+    for (i = 0; i < stream->num_of_bufs; i++) {
+        /* mapping stream bufs first */
+        pBufs[i] = stream->s_bufs[i].buf;
+        reg_flags[i] = 1;
+        rc = ops_tbl->map_ops(pBufs[i].buf_idx,
+                              -1,
+                              pBufs[i].fd,
+                              pBufs[i].frame_len,
+                              ops_tbl->userdata);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s: mapping buf[%d] err = %d", __func__, i, rc);
+            break;
+        }
+    }
+
+    if (rc != MM_CAMERA_OK) {
+        int j;
+        for (j=0; j>i; j++) {
+            ops_tbl->unmap_ops(pBufs[j].buf_idx, -1, ops_tbl->userdata);
+        }
+        mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+        free(pBufs);
+        free(reg_flags);
+        return rc;
+    }
+
+    *num_bufs = stream->num_of_bufs;
+    *bufs = pBufs;
+    *initial_reg_flag = reg_flags;
+
+    CDBG("%s: X",__func__);
+    return rc;
+}
+
+int32_t mm_app_stream_deinitbuf(mm_camera_map_unmap_ops_tbl_t *ops_tbl,
+                                void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    int i;
+
+    for (i = 0; i < stream->num_of_bufs ; i++) {
+        /* mapping stream bufs first */
+        ops_tbl->unmap_ops(stream->s_bufs[i].buf.buf_idx, -1, ops_tbl->userdata);
+    }
+
+    mm_app_release_bufs(stream->num_of_bufs, &stream->s_bufs[0]);
+
+    CDBG("%s: X",__func__);
+    return 0;
+}
+
+int32_t mm_app_stream_clean_invalidate_buf(int index, void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    return mm_app_cache_ops(&stream->s_bufs[index].mem_info,
+      ION_IOC_CLEAN_INV_CACHES);
+}
+
+int32_t mm_app_stream_invalidate_buf(int index, void *user_data)
+{
+    mm_camera_stream_t *stream = (mm_camera_stream_t *)user_data;
+    return mm_app_cache_ops(&stream->s_bufs[index].mem_info, ION_IOC_INV_CACHES);
+}
+
+static void notify_evt_cb(uint32_t camera_handle,
+                          mm_camera_event_t *evt,
+                          void *user_data)
+{
+    mm_camera_test_obj_t *test_obj =
+        (mm_camera_test_obj_t *)user_data;
+    if (test_obj == NULL || test_obj->cam->camera_handle != camera_handle) {
+        CDBG_ERROR("%s: Not a valid test obj", __func__);
+        return;
+    }
+
+    CDBG("%s:E evt = %d", __func__, evt->server_event_type);
+    switch (evt->server_event_type) {
+       case CAM_EVENT_TYPE_AUTO_FOCUS_DONE:
+           CDBG("%s: rcvd auto focus done evt", __func__);
+           break;
+       case CAM_EVENT_TYPE_ZOOM_DONE:
+           CDBG("%s: rcvd zoom done evt", __func__);
+           break;
+       default:
+           break;
+    }
+
+    CDBG("%s:X", __func__);
+}
+
+int mm_app_open(mm_camera_app_t *cam_app,
+                uint8_t cam_id,
+                mm_camera_test_obj_t *test_obj)
+{
+    int32_t rc;
+    cam_frame_len_offset_t offset_info;
+
+    CDBG("%s:BEGIN\n", __func__);
+
+    test_obj->cam = cam_app->hal_lib.mm_camera_open(cam_id);
+    if(test_obj->cam == NULL) {
+        CDBG_ERROR("%s:dev open error\n", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    CDBG("Open Camera id = %d handle = %d", cam_id, test_obj->cam->camera_handle);
+
+    /* alloc ion mem for capability buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(cam_capability_t);
+    rc = mm_app_alloc_bufs(&test_obj->cap_buf,
+                           &offset_info,
+                           1,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:alloc buf for capability error\n", __func__);
+        goto error_after_cam_open;
+    }
+
+    /* mapping capability buf */
+    rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+                                     CAM_MAPPING_BUF_TYPE_CAPABILITY,
+                                     test_obj->cap_buf.mem_info.fd,
+                                     test_obj->cap_buf.mem_info.size);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:map for capability error\n", __func__);
+        goto error_after_cap_buf_alloc;
+    }
+
+    /* alloc ion mem for getparm buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(parm_buffer_t);
+    rc = mm_app_alloc_bufs(&test_obj->parm_buf,
+                           &offset_info,
+                           1,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:alloc buf for getparm_buf error\n", __func__);
+        goto error_after_cap_buf_map;
+    }
+
+    /* mapping getparm buf */
+    rc = test_obj->cam->ops->map_buf(test_obj->cam->camera_handle,
+                                     CAM_MAPPING_BUF_TYPE_PARM_BUF,
+                                     test_obj->parm_buf.mem_info.fd,
+                                     test_obj->parm_buf.mem_info.size);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:map getparm_buf error\n", __func__);
+        goto error_after_getparm_buf_alloc;
+    }
+
+    rc = test_obj->cam->ops->register_event_notify(test_obj->cam->camera_handle,
+                                                   notify_evt_cb,
+                                                   test_obj);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: failed register_event_notify", __func__);
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+
+    rc = test_obj->cam->ops->query_capability(test_obj->cam->camera_handle);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: failed query_capability", __func__);
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+
+    memset(&test_obj->jpeg_ops, 0, sizeof(mm_jpeg_ops_t));
+    test_obj->jpeg_hdl = cam_app->hal_lib.jpeg_open(&test_obj->jpeg_ops);
+    if (test_obj->jpeg_hdl == 0) {
+        CDBG_ERROR("%s: jpeg lib open err", __func__);
+        rc = -MM_CAMERA_E_GENERAL;
+        goto error_after_getparm_buf_map;
+    }
+
+    return rc;
+
+error_after_getparm_buf_map:
+    test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                  CAM_MAPPING_BUF_TYPE_PARM_BUF);
+error_after_getparm_buf_alloc:
+    mm_app_release_bufs(1, &test_obj->parm_buf);
+error_after_cap_buf_map:
+    test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                  CAM_MAPPING_BUF_TYPE_CAPABILITY);
+error_after_cap_buf_alloc:
+    mm_app_release_bufs(1, &test_obj->cap_buf);
+error_after_cam_open:
+    test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+    test_obj->cam = NULL;
+    return rc;
+}
+
+int mm_app_close(mm_camera_test_obj_t *test_obj)
+{
+    uint32_t rc = MM_CAMERA_OK;
+
+    if (test_obj == NULL || test_obj->cam ==NULL) {
+        CDBG_ERROR("%s: cam not opened", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    /* unmap capability buf */
+    rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                       CAM_MAPPING_BUF_TYPE_CAPABILITY);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: unmap capability buf failed, rc=%d", __func__, rc);
+    }
+
+    /* unmap parm buf */
+    rc = test_obj->cam->ops->unmap_buf(test_obj->cam->camera_handle,
+                                       CAM_MAPPING_BUF_TYPE_PARM_BUF);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: unmap setparm buf failed, rc=%d", __func__, rc);
+    }
+
+    rc = test_obj->cam->ops->close_camera(test_obj->cam->camera_handle);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: close camera failed, rc=%d", __func__, rc);
+    }
+    test_obj->cam = NULL;
+
+    /* close jpeg client */
+    if (test_obj->jpeg_hdl && test_obj->jpeg_ops.close) {
+        rc = test_obj->jpeg_ops.close(test_obj->jpeg_hdl);
+        test_obj->jpeg_hdl = 0;
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s: close jpeg failed, rc=%d", __func__, rc);
+        }
+    }
+
+    /* dealloc capability buf */
+    rc = mm_app_release_bufs(1, &test_obj->cap_buf);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: release capability buf failed, rc=%d", __func__, rc);
+    }
+
+    /* dealloc parm buf */
+    rc = mm_app_release_bufs(1, &test_obj->parm_buf);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s: release setparm buf failed, rc=%d", __func__, rc);
+    }
+
+    return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t * mm_app_add_channel(mm_camera_test_obj_t *test_obj,
+                                         mm_camera_channel_type_t ch_type,
+                                         mm_camera_channel_attr_t *attr,
+                                         mm_camera_buf_notify_t channel_cb,
+                                         void *userdata)
+{
+    uint32_t ch_id = 0;
+    mm_camera_channel_t *channel = NULL;
+
+    ch_id = test_obj->cam->ops->add_channel(test_obj->cam->camera_handle,
+                                            attr,
+                                            channel_cb,
+                                            userdata);
+    if (ch_id == 0) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return NULL;
+    }
+    channel = &test_obj->channels[ch_type];
+    channel->ch_id = ch_id;
+    return channel;
+}
+
+int mm_app_del_channel(mm_camera_test_obj_t *test_obj,
+                       mm_camera_channel_t *channel)
+{
+    test_obj->cam->ops->delete_channel(test_obj->cam->camera_handle,
+                                       channel->ch_id);
+    memset(channel, 0, sizeof(mm_camera_channel_t));
+    return MM_CAMERA_OK;
+}
+
+mm_camera_stream_t * mm_app_add_stream(mm_camera_test_obj_t *test_obj,
+                                       mm_camera_channel_t *channel)
+{
+    mm_camera_stream_t *stream = NULL;
+    int rc = MM_CAMERA_OK;
+    cam_frame_len_offset_t offset_info;
+
+    stream = &(channel->streams[channel->num_streams++]);
+    stream->s_id = test_obj->cam->ops->add_stream(test_obj->cam->camera_handle,
+                                                  channel->ch_id);
+    if (stream->s_id == 0) {
+        CDBG_ERROR("%s: add stream failed", __func__);
+        return NULL;
+    }
+
+    /* alloc ion mem for stream_info buf */
+    memset(&offset_info, 0, sizeof(offset_info));
+    offset_info.frame_len = sizeof(cam_stream_info_t);
+    rc = mm_app_alloc_bufs(&stream->s_info_buf,
+                           &offset_info,
+                           1,
+                           0);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:alloc buf for stream_info error\n", __func__);
+        test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                          channel->ch_id,
+                                          stream->s_id);
+        stream->s_id = 0;
+        return NULL;
+    }
+
+    /* mapping streaminfo buf */
+    rc = test_obj->cam->ops->map_stream_buf(test_obj->cam->camera_handle,
+                                            channel->ch_id,
+                                            stream->s_id,
+                                            CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                                            0,
+                                            -1,
+                                            stream->s_info_buf.mem_info.fd,
+                                            stream->s_info_buf.mem_info.size);
+    if (rc != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:map setparm_buf error\n", __func__);
+        mm_app_deallocate_ion_memory(&stream->s_info_buf);
+        test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                          channel->ch_id,
+                                          stream->s_id);
+        stream->s_id = 0;
+        return NULL;
+    }
+
+    return stream;
+}
+
+int mm_app_del_stream(mm_camera_test_obj_t *test_obj,
+                      mm_camera_channel_t *channel,
+                      mm_camera_stream_t *stream)
+{
+    test_obj->cam->ops->unmap_stream_buf(test_obj->cam->camera_handle,
+                                         channel->ch_id,
+                                         stream->s_id,
+                                         CAM_MAPPING_BUF_TYPE_STREAM_INFO,
+                                         0,
+                                         -1);
+    mm_app_deallocate_ion_memory(&stream->s_info_buf);
+    test_obj->cam->ops->delete_stream(test_obj->cam->camera_handle,
+                                      channel->ch_id,
+                                      stream->s_id);
+    memset(stream, 0, sizeof(mm_camera_stream_t));
+    return MM_CAMERA_OK;
+}
+
+mm_camera_channel_t *mm_app_get_channel_by_type(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_type_t ch_type)
+{
+    return &test_obj->channels[ch_type];
+}
+
+int mm_app_config_stream(mm_camera_test_obj_t *test_obj,
+                         mm_camera_channel_t *channel,
+                         mm_camera_stream_t *stream,
+                         mm_camera_stream_config_t *config)
+{
+    return test_obj->cam->ops->config_stream(test_obj->cam->camera_handle,
+                                             channel->ch_id,
+                                             stream->s_id,
+                                             config);
+}
+
+int mm_app_start_channel(mm_camera_test_obj_t *test_obj,
+                         mm_camera_channel_t *channel)
+{
+    return test_obj->cam->ops->start_channel(test_obj->cam->camera_handle,
+                                             channel->ch_id);
+}
+
+int mm_app_stop_channel(mm_camera_test_obj_t *test_obj,
+                        mm_camera_channel_t *channel)
+{
+    return test_obj->cam->ops->stop_channel(test_obj->cam->camera_handle,
+                                            channel->ch_id);
+}
+
+int main(int argc, char **argv)
+{
+    int c;
+    int rc;
+    int run_tc = 0;
+    int run_dual_tc = 0;
+    mm_camera_app_t my_cam_app;
+
+    CDBG("\nCamera Test Application\n");
+
+    while ((c = getopt(argc, argv, "tdh")) != -1) {
+        switch (c) {
+           case 't':
+               run_tc = 1;
+               break;
+           case 'd':
+               run_dual_tc = 1;
+               break;
+           case 'h':
+           default:
+               printf("usage: %s [-t] [-d] \n", argv[0]);
+               printf("-t:   Unit test        \n");
+               printf("-d:   Dual camera test \n");
+               return 0;
+        }
+    }
+
+    memset(&my_cam_app, 0, sizeof(mm_camera_app_t));
+    if((mm_app_load_hal(&my_cam_app) != MM_CAMERA_OK)) {
+        CDBG_ERROR("%s:mm_app_init err\n", __func__);
+        return -1;
+    }
+
+    if(run_tc) {
+        printf("\tRunning unit test engine only\n");
+        rc = mm_app_unit_test_entry(&my_cam_app);
+        printf("\tUnit test engine. EXIT(%d)!!!\n", rc);
+        return rc;
+    }
+#if 0
+    if(run_dual_tc) {
+        printf("\tRunning Dual camera test engine only\n");
+        rc = mm_app_dual_test_entry(&my_cam_app);
+        printf("\t Dual camera engine. EXIT(%d)!!!\n", rc);
+        exit(rc);
+    }
+#endif
+    /* Clean up and exit. */
+    CDBG("Exiting test app\n");
+    return 0;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
new file mode 100755
index 0000000..6a3515c
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_dual_test.c
@@ -0,0 +1,1936 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_qcamera_unit_test.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+#define MM_QCAMERA_APP_WAIT_TIME 1000000000
+
+extern int system_dimension_set(int cam_id);
+extern int stopPreview(int cam_id);
+extern int takePicture_yuv(int cam_id);
+extern int takePicture_rdi(int cam_id);
+extern int startRdi(int cam_id);
+extern int stopRdi(int cam_id);
+extern int startStats(int cam_id);
+extern int stopStats(int cam_id);
+
+
+/*
+* 1. open back
+* 2. open front
+* 3. start back
+* 4. start front
+* 5. stop back
+* 6. stop front
+* 7. close back
+* 8. close front
+* 9. take picture
+* a. start recording
+* b. stop recording
+* c. take picture rdi
+*/
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+static int num_test_cases = 0;
+struct test_case_params {
+  uint16_t launch;
+  uint16_t preview;
+  uint16_t recording;
+  uint16_t snapshot;
+};
+
+/*  Test case 12436857 :*/
+
+int mm_app_dtc_0(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 0...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL stop camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+
+        CDBG_ERROR("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        sleep(1);
+        CDBG_ERROR("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                CDBG("%s: startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 12436587 :*/
+
+int mm_app_dtc_1(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 1...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+        CDBG_ERROR("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                CDBG("%s: startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 12436578 :*/
+
+int mm_app_dtc_2(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 2...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+        CDBG_ERROR("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                CDBG("%s: startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 241395768 : 1357 * 3, This is performed three times
+* And for each iteration 9 is performed thrice */
+
+int mm_app_dtc_3(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview and snapshot on back Camera and RDI on Front camera 3...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() frontcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        usleep(10*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++) {
+          CDBG_ERROR("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                  CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                  CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          CDBG_ERROR("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                 CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                  goto end;
+          }
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+              CDBG_ERROR("DUAL take picture for back \n");
+              if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+                  CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+                  break;
+              }
+              mm_camera_app_wait();
+
+          }
+          usleep(10*1000);
+          CDBG_ERROR("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                  CDBG_ERROR("%s: stopPreview() backcamera err=%d\n", __func__, rc);
+                  goto end;
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                  CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+          usleep(20*1000);
+        }
+        CDBG_ERROR("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG("%s: stopRdi() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 2413ab5768 : 1357 * 3, This is performed three times
+* And for each iteration ab is performed thrice */
+
+int mm_app_dtc_4(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 4...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() frontcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        usleep(20*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+          CDBG_ERROR("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                 CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                 CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          CDBG_ERROR("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                 goto end;
+          }
+          usleep(30*1000);
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+             CDBG_ERROR("DUAL start camera record for back \n");
+             if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                 CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+                 break;
+             }
+
+             mm_camera_app_wait();
+             usleep(15*1000);
+             CDBG_ERROR("DUAL stop camera record for back \n");
+             if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                 CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+                 break;
+             }
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                 CDBG_ERROR("%s: stopPreview() backcamera err=%d\n", __func__, rc);
+                 goto end;
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                 CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+          usleep(20*1000);
+        }
+        CDBG_ERROR("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG("%s: stopRdi() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 24135768 : 1357 * 3, This is performed three times*/
+
+int mm_app_dtc_5(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 5...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() frontcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        for (k = 0; k < 4 ; k++) {
+          CDBG_ERROR("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                  CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                  CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+
+          CDBG_ERROR("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                 CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                  goto end;
+          }
+          mm_camera_app_wait();
+          sleep(1);
+
+          CDBG_ERROR("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                  CDBG_ERROR("%s: stopPreview() backcamera err=%d\n", __func__, rc);
+                  goto end;
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                  CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                  rc = -1;
+                  goto end;
+          }
+          sleep(1);
+        }
+        CDBG_ERROR("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG("%s: stopRdi() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 13246857 : 2468 * 3, This is performed three times*/
+
+int mm_app_dtc_6(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 6...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        for (k = 0; k < 4 ; k++) {
+        CDBG_ERROR("DUAL open front camera %d \n",k);
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL stop camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+
+        CDBG_ERROR("DUAL close front camera\n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        sleep(1);
+        }
+        CDBG_ERROR("DUAL stop camera Preview for back \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                CDBG("%s: startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close back camera \n");
+        if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*Multi Threaded Test Cases*/
+static void *front_thread(void *data)
+{
+        int front_camera = 1;
+        int rc = MM_CAMERA_OK;
+        int i,j,k,m;
+        struct test_case_params params
+          = *((struct test_case_params *)data);
+        for (i = 0; i < params.launch; i++) {
+          CDBG_ERROR("DUAL open front camera %d\n",i);
+          if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+
+          if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+            CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+
+          for (j = 0; j < params.preview; j++) {
+            CDBG_ERROR("DUAL start camera Rdi for front %d ,%d \n",i,j);
+            if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+              CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+              goto end;
+            }
+            mm_camera_app_wait();
+            usleep(20*1000);
+            for (k = 0; k < params.snapshot; k++) {
+              CDBG_ERROR("DUAL take picture for front %d,%d,%d \n",i,j,k);
+              if ( MM_CAMERA_OK != (rc = takePicture_rdi(front_camera))) {
+                CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+                goto end;
+              }
+              mm_camera_app_wait();
+              usleep(30*1000);
+            }
+            CDBG_ERROR("DUAL stop camera Rdi for front %d,%d\n",i,j);
+            if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+              CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+              goto end;
+            }
+            usleep(10*1000);
+          }
+
+          CDBG_ERROR("DUAL close front camera %d\n",i);
+          if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+        }
+end:
+        CDBG_ERROR("DUAL front thread close %d",rc);
+        return NULL;
+}
+
+static void *back_thread(void *data)
+{
+        int rc = MM_CAMERA_OK;
+        int back_camera = 0;
+        int i,j,k,m;
+        struct test_case_params params
+          = *((struct test_case_params *)data);
+        for (i = 0; i < params.launch; i++) {
+          CDBG_ERROR("DUAL open back camera %d\n",i);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+            CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+
+          for (j = 0; j < params.preview; j++) {
+            CDBG_ERROR("DUAL start camera Preview for back %d, %d\n",i,j);
+            if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+              CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+              goto end;
+            }
+            mm_camera_app_wait();
+            usleep(20*1000);
+            for (k = 0; k < params.snapshot; k++) {
+              CDBG_ERROR("DUAL take picture for back %d, %d, %d\n",i,j,k);
+              if ( MM_CAMERA_OK != (rc = takePicture_yuv(back_camera))) {
+                CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+                goto end;
+              }
+              mm_camera_app_wait();
+              usleep(30*1000);
+            }
+
+            for (m = 0; m < params.recording; m++) {
+              CDBG_ERROR("DUAL start record for back %d, %d, %d\n",i,j,m);
+              if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+                break;
+              }
+
+              mm_camera_app_wait();
+              usleep(10*1000);
+              CDBG_ERROR("DUAL stop camera record for back \n");
+              if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+                break;
+              }
+              usleep(10*1000);
+            }
+            CDBG_ERROR("DUAL stop camera Preview for back %d, %d\n",i,j);
+            if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+              CDBG("%s: startPreview() err=%d\n", __func__, rc);
+              goto end;
+            }
+            usleep(10*1000);
+          }
+
+          CDBG_ERROR("DUAL close back camera %d\n",i);
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+          }
+        }
+end:
+        CDBG_ERROR("DUAL back thread close %d",rc);
+        return NULL;
+}
+
+/*  Test case m13572468 : Open & start  in 2 concurrent pthread*/
+int mm_app_dtc_7(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params params;
+        memset(&params, 0, sizeof(struct test_case_params));
+        params.launch = 5;
+        params.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 7...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &params);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &params);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case m139572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_8(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.snapshot= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 8...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0)
+          printf("\nPassed\n");
+        else
+          printf("\nFailed\n");
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case m1395724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_9(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.snapshot= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        fparams.snapshot = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 9...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case m13ab572468 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_10(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.recording= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 10...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case m13ab5724c68 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_11(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 5;
+        bparams.preview = 5;
+        bparams.recording= 5;
+        fparams.launch = 5;
+        fparams.preview = 5;
+        fparams.snapshot = 5;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 11...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case m1728 : Open & start in 2 concurrent pthread*/
+int mm_app_dtc_12(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int result = 0;
+
+        pthread_t back_thread_id, front_thread_id;
+        struct test_case_params bparams, fparams;
+        memset(&bparams, 0, sizeof(struct test_case_params));
+        memset(&fparams, 0, sizeof(struct test_case_params));
+        bparams.launch = 15;
+        fparams.launch = 15;
+        printf("\n Verifying Preview on back Camera and RDI on Front camera 12...\n");
+
+        CDBG_ERROR("start back DUAL ");
+        rc = pthread_create(&back_thread_id, NULL, back_thread, &bparams);
+        CDBG_ERROR("start front DUAL ");
+        rc = pthread_create(&front_thread_id, NULL, front_thread, &fparams);
+        sleep(1);
+        CDBG_ERROR("stop back DUAL ");
+        rc = pthread_join(back_thread_id, NULL);
+        CDBG_ERROR("stop front DUAL ");
+        rc = pthread_join(front_thread_id, NULL);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*  Test case 2413(ab)5768
+ *  Test the dual camera usecase. We startPreview on front camera,
+ *  but backend will allocate RDI buffers and start front camera in
+ *  RDI streaming mode. It then diverts RDI frames, converts them into YUV 420
+ *  through C2D and generate preview data in the buffers allocated here.
+ *  Back camera will use the pixel interface as usual.
+ */
+
+int mm_app_dtc_13(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j,k;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n 13. Verifying Preview + Recording on back Camera and Preview(through RDI) on Front camera\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = startPreview(front_camera))) {
+               CDBG_ERROR("%s: front camera startPreview() err=%d\n", __func__, rc);
+               goto end;
+        }
+        mm_camera_app_wait();
+        usleep(20*1000);
+
+        for (k = 0; k < MM_QCAMERA_APP_INTERATION ; k++){
+          CDBG_ERROR("DUAL open back camera %d \n",k);
+          if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                 CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                 CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+
+          CDBG_ERROR("DUAL start camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                 goto end;
+          }
+          usleep(30*1000);
+
+          for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+             CDBG_ERROR("DUAL start camera record for back Iteration %d \n", j);
+             if ( MM_CAMERA_OK != (rc = startRecording(back_camera))) {
+                 CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+                 break;
+             }
+
+             mm_camera_app_wait();
+             usleep(10*1000*1000);
+             CDBG_ERROR("DUAL stop camera record for back Iteration %d\n", j);
+             if ( MM_CAMERA_OK != (rc = stopRecording(back_camera))) {
+                 CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+                 break;
+             }
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL stop camera Preview for back \n");
+          if( MM_CAMERA_OK != (rc = stopPreview(back_camera))) {
+                 CDBG_ERROR("%s: stopPreview() backcamera err=%d\n", __func__, rc);
+                 goto end;
+          }
+          usleep(10*1000);
+
+          CDBG_ERROR("DUAL close back camera\n");
+          if( mm_app_close(back_camera) != MM_CAMERA_OK) {
+                 CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                 rc = -1;
+                 goto end;
+          }
+          usleep(20*1000);
+        }
+        CDBG_ERROR("DUAL stop camera Preview for Rdi \n");
+        if( MM_CAMERA_OK != (rc = stopPreview(front_camera))) {
+                CDBG_ERROR("%s: stopPreview() frontcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        usleep(10*1000);
+        CDBG_ERROR("DUAL close front camera \n");
+        if( mm_app_close(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/*Below 6  are reference test cases just to test the open path for dual camera*/
+int mm_app_dtc_1243(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera Preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+int mm_app_dtc_2134(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera Preview for front \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera Rdi for back \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+int mm_app_dtc_2143(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+int mm_app_dtc_2413(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera rdi for front \n");
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera preview for back \n");
+
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+int mm_app_dtc_1234(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL open back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL open front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+        CDBG_ERROR("DUAL start camera preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+               CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+
+        CDBG_ERROR("DUAL start camera rdi for front \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+                printf("\nPassed\n");
+        }else{
+                printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+int mm_app_dtc_1324(mm_camera_app_t *cam_apps)
+{
+        int rc = MM_CAMERA_OK;
+        int i,j;
+        int result = 0;
+        int front_camera = 1;
+        int back_camera = 0;
+
+        printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+        CDBG_ERROR("DUAL start back camera \n");
+        if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL start camera preview for back \n");
+        if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+                CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+                goto end;
+        }
+        //mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL start front camera \n");
+        if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+
+       if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+                CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+                rc = -1;
+                goto end;
+        }
+        CDBG_ERROR("DUAL start rdi preview \n");
+
+        if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+                CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+                goto end;
+        }
+        mm_camera_app_wait();
+        sleep(1);
+        CDBG_ERROR("DUAL end \n");
+
+end:
+        if(rc == 0) {
+          printf("\nPassed\n");
+        }else{
+          printf("\nFailed\n");
+        }
+        CDBG("%s:END, rc = %d\n", __func__, rc);
+        return rc;
+}
+
+/* single camera test cases*/
+int mm_app_dtc_s_0(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+    int front_camera = 1;
+    int back_camera = 0;
+
+    printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+    if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+    CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+        CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+        goto end;
+    }
+
+    mm_camera_app_wait();
+    if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+        CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+        CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+        goto end;
+    }
+    mm_camera_app_wait();
+
+    if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+        CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+        CDBG("%s: startPreview() err=%d\n", __func__, rc);
+        goto end;
+    }
+
+    if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_dtc_s_1(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying Snapshot on front and back camera...\n");
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+
+        if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+                break;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = takePicture_yuv(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+                break;
+            }
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+                CDBG_ERROR("%s: Snapshot/Preview Callback not received in time or qbuf Faile\n", __func__);
+                break;
+            }*/
+            mm_camera_app_wait();
+            result++;
+        }
+        if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+            CDBG("%s: startPreview() err=%d\n", __func__, rc);
+            break;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf("%s: Snapshot Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\t***Passed***\n");
+    }else{
+        printf("\t***Failed***\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_dtc_s_2(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying Video on front and back camera...\n");
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+
+        if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+            CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+            break;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = startRecording(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+                break;
+            }
+
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+            CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+            break;
+            }*/
+            mm_camera_app_wait();
+            if( MM_CAMERA_OK != (rc = stopRecording(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+                break;
+            }
+            result++;
+        }
+        if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+            CDBG("%s: startPreview() err=%d\n", __func__, rc);
+            break;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_dtc_s_3(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+
+    printf("\n Verifying RDI Stream on front and back camera...\n");
+    if(cam_apps->num_cameras == 0) {
+        CDBG_ERROR("%s:Query Failed: Num of cameras = %d\n",__func__, cam_apps->num_cameras);
+        rc = -1;
+        goto end;
+    }
+    for(i = 0; i < cam_apps->num_cameras; i++) {
+        if( mm_app_open(i) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+            CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+            if( MM_CAMERA_OK != (rc = startRdi(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+                break;
+            }
+
+            /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+            CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+            break;
+            }*/
+            mm_camera_app_wait();
+            if( MM_CAMERA_OK != (rc = stopRdi(my_cam_app.cam_open))) {
+                CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+                break;
+            }
+            result++;
+        }
+        if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+            rc = -1;
+            goto end;
+        }
+        if(result != MM_QCAMERA_APP_INTERATION) {
+            printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+            rc = -1;
+            break;
+        }
+
+        result = 0;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+/*Stats Test Case*/
+int mm_app_dtc_s_5(mm_camera_app_t *cam_apps)
+{
+    int rc = MM_CAMERA_OK;
+    int i,j;
+    int result = 0;
+    int front_camera = 1;
+    int back_camera = 0;
+
+    printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+    if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+    if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+    CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+
+    if( MM_CAMERA_OK != (rc = startStats(back_camera))) {
+        CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+        goto end;
+    }
+
+    mm_camera_app_wait();
+
+    if( MM_CAMERA_OK != (rc = stopStats(my_cam_app.cam_open))) {
+        CDBG("%s: startPreview() err=%d\n", __func__, rc);
+        goto end;
+    }
+
+    if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+        CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+        rc = -1;
+        goto end;
+    }
+end:
+    if(rc == 0) {
+        printf("\nPassed\n");
+    }else{
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_gen_dual_test_cases()
+{
+    int tc = 0;
+    memset(mm_app_tc, 0, sizeof(mm_app_tc));
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_0;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_1;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_2;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_3;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_4;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_5;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_6;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_7;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_8;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_9;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_10;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_11;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_12;
+    if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_13;
+
+    return tc;
+}
+
+int mm_app_dual_test_entry(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, tc = 0;
+    int cam_id = 0;
+
+    tc = mm_app_gen_dual_test_cases();
+    CDBG("Running %d test cases\n",tc);
+    for(i = 0; i < tc; i++) {
+        mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+        if(mm_app_tc[i].r != MM_CAMERA_OK) {
+            printf("%s: test case %d error = %d, abort unit testing engine!!!!\n",
+                    __func__, i, mm_app_tc[i].r);
+            rc = mm_app_tc[i].r;
+            goto end;
+        }
+    }
+end:
+    printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+    return rc;
+}
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
new file mode 100644
index 0000000..6afa31b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_preview.c
@@ -0,0 +1,415 @@
+/*
+Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static void mm_app_preview_notify_cb(mm_camera_super_buf_t *bufs,
+                                     void *user_data)
+{
+    char file_name[64];
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    CDBG("%s: BEGIN - length=%d, frame idx = %d\n",
+         __func__, frame->frame_len, frame->frame_idx);
+    snprintf(file_name, sizeof(file_name), "P_C%d", pme->cam->camera_handle);
+    mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            frame)) {
+        CDBG_ERROR("%s: Failed in Preview Qbuf\n", __func__);
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    CDBG("%s: END\n", __func__);
+}
+
+static void mm_app_zsl_notify_cb(mm_camera_super_buf_t *bufs,
+                                 void *user_data)
+{
+    int i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *p_frame = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+
+    CDBG("%s: BEGIN\n", __func__);
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        CDBG_ERROR("%s: Wrong channel id (%d)", __func__, bufs->ch_id);
+        return;
+    }
+
+    /* find preview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_PREVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == p_stream) {
+        CDBG_ERROR("%s: cannot find preview stream", __func__);
+        return;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        CDBG_ERROR("%s: cannot find snapshot stream", __func__);
+        return;
+    }
+
+    /* find preview frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+            p_frame = bufs->bufs[i];
+            break;
+        }
+    }
+
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+
+    if (!m_frame || !p_frame) {
+        CDBG_ERROR("%s: cannot find preview/snapshot frame", __func__);
+        return;
+    }
+
+    mm_app_dump_frame(p_frame, "zsl_preview", "yuv", p_frame->frame_idx);
+    mm_app_dump_frame(m_frame, "zsl_main", "yuv", m_frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            p_frame)) {
+        CDBG_ERROR("%s: Failed in preview Qbuf\n", __func__);
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)p_frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            m_frame)) {
+        CDBG_ERROR("%s: Failed in main Qbuf\n", __func__);
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    CDBG("%s: END\n", __func__);
+}
+
+mm_camera_stream_t * mm_app_add_preview_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_PREVIEW;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_PREVIEW_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_PREVIEW_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config preview stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_snapshot_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config preview stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_preview_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_PREVIEW,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return NULL;
+    }
+
+    stream = mm_app_add_preview_stream(test_obj,
+                                       channel,
+                                       mm_app_preview_notify_cb,
+                                       (void *)test_obj,
+                                       PREVIEW_BUF_NUM);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+int mm_app_stop_and_del_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    uint8_t i;
+
+    rc = mm_app_stop_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    for (i = 0; i < channel->num_streams; i++) {
+        stream = &channel->streams[i];
+        rc = mm_app_del_stream(test_obj, channel, stream);
+        if (MM_CAMERA_OK != rc) {
+            CDBG_ERROR("%s:del stream(%d) failed rc=%d\n", __func__, i, rc);
+        }
+    }
+
+    rc = mm_app_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:delete channel failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+    uint8_t i;
+
+    channel =  mm_app_add_preview_channel(test_obj);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start preview failed rc=%d\n", __func__, rc);
+        for (i = 0; i < channel->num_streams; i++) {
+            stream = &channel->streams[i];
+            mm_app_del_stream(test_obj, channel, stream);
+        }
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+
+    rc = mm_app_stop_and_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *s_preview = NULL;
+    mm_camera_stream_t *s_main = NULL;
+    mm_camera_channel_attr_t attr;
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+    attr.look_back = 2;
+    attr.post_frame_skip = 0;
+    attr.water_mark = 2;
+    attr.max_unmatched_frames = 3;
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_ZSL,
+                                 &attr,
+                                 mm_app_zsl_notify_cb,
+                                 test_obj);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_preview = mm_app_add_preview_stream(test_obj,
+                                          channel,
+                                          mm_app_preview_notify_cb,
+                                          (void *)test_obj,
+                                          PREVIEW_BUF_NUM);
+    if (NULL == s_preview) {
+        CDBG_ERROR("%s: add preview stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_main = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        NULL,
+                                        NULL,
+                                        PREVIEW_BUF_NUM,
+                                        0);
+    if (NULL == s_main) {
+        CDBG_ERROR("%s: add main snapshot stream failed\n", __func__);
+        mm_app_del_stream(test_obj, channel, s_preview);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start zsl failed rc=%d\n", __func__, rc);
+        mm_app_del_stream(test_obj, channel, s_preview);
+        mm_app_del_stream(test_obj, channel, s_main);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_preview_zsl(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_ZSL);
+
+    rc = mm_app_stop_and_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
new file mode 100644
index 0000000..dae204e
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_rdi.c
@@ -0,0 +1,303 @@
+/*
+Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static uint32_t rdi_len = 0;
+
+static void mm_app_rdi_dump_frame(mm_camera_buf_def_t *frame,
+                                  char *name,
+                                  char *ext,
+                                  int frame_idx)
+{
+    char file_name[64];
+    int file_fd;
+    int i;
+    if (frame != NULL) {
+        snprintf(file_name, sizeof(file_name), "/data/%s_%03d.%s", name, frame_idx, ext);
+        file_fd = open(file_name, O_RDWR | O_CREAT, 0777);
+        if (file_fd < 0) {
+            CDBG_ERROR("%s: cannot open file %s \n", __func__, file_name);
+        } else {
+            for (i = 0; i < frame->num_planes; i++) {
+                write(file_fd,
+                      (uint8_t *)frame->buffer + frame->planes[i].data_offset,
+                      rdi_len);
+            }
+
+            close(file_fd);
+            CDBG("dump %s", file_name);
+        }
+    }
+}
+
+static void mm_app_rdi_notify_cb(mm_camera_super_buf_t *bufs,
+                                 void *user_data)
+{
+    char file_name[64];
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    CDBG("%s: BEGIN - length=%d, frame idx = %d stream_id=%d\n",
+         __func__, frame->frame_len, frame->frame_idx, frame->stream_id);
+    snprintf(file_name, sizeof(file_name), "RDI_dump_%d", pme->cam->camera_handle);
+    mm_app_rdi_dump_frame(frame, file_name, "raw", frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            frame)) {
+        CDBG_ERROR("%s: Failed in RDI Qbuf\n", __func__);
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    CDBG("%s: END\n", __func__);
+}
+
+mm_camera_stream_t * mm_app_add_rdi_stream(mm_camera_test_obj_t *test_obj,
+                                               mm_camera_channel_t *channel,
+                                               mm_camera_buf_notify_t stream_cb,
+                                               void *userdata,
+                                               uint8_t num_bufs,
+                                               uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    int i;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+    cam_format_t fmt = CAM_FORMAT_MAX;
+    cam_stream_buf_plane_info_t *buf_planes;
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    CDBG_ERROR("%s: raw_dim w:%d height:%d\n", __func__, cam_cap->raw_dim[0].width, cam_cap->raw_dim[0].height);
+    for (i = 0;i < cam_cap->supported_raw_fmt_cnt;i++) {
+        CDBG_ERROR("%s: supported_raw_fmts[%d]=%d\n", __func__, i, cam_cap->supported_raw_fmts[i]);
+        if (CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG <= cam_cap->supported_raw_fmts[i] &&
+            CAM_FORMAT_BAYER_MIPI_RAW_12BPP_BGGR >= cam_cap->supported_raw_fmts[i])
+        {
+            fmt = cam_cap->supported_raw_fmts[i];
+            CDBG_ERROR("%s: fmt=%d\n", __func__, fmt);
+        }
+    }
+
+    if (CAM_FORMAT_MAX == fmt) {
+        CDBG_ERROR("%s: rdi format not supported\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_RAW;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = fmt;
+    stream->s_config.stream_info->dim.width = cam_cap->raw_dim[0].width;
+    stream->s_config.stream_info->dim.height = cam_cap->raw_dim[0].height;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config rdi stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    buf_planes = &stream->s_config.stream_info->buf_planes;
+    rdi_len = buf_planes->plane_info.mp[0].len;
+    CDBG("%s: plane_info %dx%d len:%d frame_len:%d\n", __func__,
+        buf_planes->plane_info.mp[0].stride, buf_planes->plane_info.mp[0].scanline,
+        buf_planes->plane_info.mp[0].len, buf_planes->plane_info.frame_len);
+
+    return stream;
+}
+
+mm_camera_stream_t * mm_app_add_rdi_snapshot_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_SNAPSHOT;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_SNAPSHOT_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_SNAPSHOT_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_SNAPSHOT_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config rdi stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_rdi_channel(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_RDI,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return NULL;
+    }
+
+    stream = mm_app_add_rdi_stream(test_obj,
+                                       channel,
+                                       mm_app_rdi_notify_cb,
+                                       (void *)test_obj,
+                                       RDI_BUF_NUM,
+                                       num_burst);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    CDBG("%s: channel=%d stream=%d\n", __func__, channel->ch_id, stream->s_id);
+    return channel;
+}
+
+int mm_app_stop_and_del_rdi_channel(mm_camera_test_obj_t *test_obj,
+                                mm_camera_channel_t *channel)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    uint8_t i;
+
+    rc = mm_app_stop_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop RDI failed rc=%d\n", __func__, rc);
+    }
+
+    for (i = 0; i < channel->num_streams; i++) {
+        stream = &channel->streams[i];
+        rc = mm_app_del_stream(test_obj, channel, stream);
+        if (MM_CAMERA_OK != rc) {
+            CDBG_ERROR("%s:del stream(%d) failed rc=%d\n", __func__, i, rc);
+        }
+    }
+
+    rc = mm_app_del_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:delete channel failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_rdi(mm_camera_test_obj_t *test_obj, uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+
+    channel = mm_app_add_rdi_channel(test_obj, num_burst);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start rdi failed rc=%d\n", __func__, rc);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_rdi(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+
+    mm_camera_channel_t *channel =
+        mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_RDI);
+
+    rc = mm_app_stop_and_del_rdi_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop RDI failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
new file mode 100644
index 0000000..ba35f5a
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_snapshot.c
@@ -0,0 +1,416 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+/* This callback is received once the complete JPEG encoding is done */
+static void jpeg_encode_cb(jpeg_job_status_t status,
+                           uint32_t client_hdl,
+                           uint32_t jobId,
+                           mm_jpeg_output_t *p_buf,
+                           void *userData)
+{
+    int i = 0;
+    mm_camera_test_obj_t *pme = NULL;
+    CDBG("%s: BEGIN\n", __func__);
+
+    pme = (mm_camera_test_obj_t *)userData;
+    if (pme->jpeg_hdl != client_hdl ||
+        jobId != pme->current_job_id ||
+        !pme->current_job_frames) {
+        CDBG_ERROR("%s: NULL current job frames or not matching job ID (%d, %d)",
+                   __func__, jobId, pme->current_job_id);
+        return;
+    }
+
+    /* dump jpeg img */
+    CDBG_ERROR("%s: job %d, status=%d", __func__, jobId, status);
+    if (status == JPEG_JOB_STATUS_DONE && p_buf != NULL) {
+        mm_app_dump_jpeg_frame(p_buf->buf_vaddr, p_buf->buf_filled_len, "jpeg_dump", "jpg", jobId);
+    }
+
+    /* buf done current encoding frames */
+    pme->current_job_id = 0;
+    for (i = 0; i < pme->current_job_frames->num_bufs; i++) {
+        if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->current_job_frames->camera_handle,
+                                                pme->current_job_frames->ch_id,
+                                                pme->current_job_frames->bufs[i])) {
+            CDBG_ERROR("%s: Failed in Qbuf\n", __func__);
+        }
+    }
+    free(pme->current_job_frames);
+    pme->current_job_frames = NULL;
+
+    /* signal snapshot is done */
+    mm_camera_app_done();
+}
+
+int encodeData(mm_camera_test_obj_t *test_obj, mm_camera_super_buf_t* recvd_frame,
+               mm_camera_stream_t *m_stream)
+{
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    int rc = -MM_CAMERA_E_GENERAL;
+    mm_jpeg_job_t job;
+
+    /* remember current frames being encoded */
+    test_obj->current_job_frames =
+        (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t));
+    if (!test_obj->current_job_frames) {
+        CDBG_ERROR("%s: No memory for current_job_frames", __func__);
+        return rc;
+    }
+    *(test_obj->current_job_frames) = *recvd_frame;
+
+    memset(&job, 0, sizeof(job));
+    job.job_type = JPEG_JOB_TYPE_ENCODE;
+    job.encode_job.session_id = test_obj->current_jpeg_sess_id;
+
+    job.encode_job.rotation = 0;
+    if (cam_cap->position == CAM_POSITION_BACK) {
+        /* back camera, rotate 90 */
+        job.encode_job.rotation = 90;
+    }
+
+    /* fill in main src img encode param */
+    job.encode_job.main_dim.src_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.main_dim.dst_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.src_index = 0;
+
+    job.encode_job.thumb_dim.src_dim = m_stream->s_config.stream_info->dim;
+    job.encode_job.thumb_dim.dst_dim.width = DEFAULT_PREVIEW_WIDTH;
+    job.encode_job.thumb_dim.dst_dim.height = DEFAULT_PREVIEW_HEIGHT;
+
+    /* fill in sink img param */
+    job.encode_job.dst_index = 0;
+
+    rc = test_obj->jpeg_ops.start_job(&job, &test_obj->current_job_id);
+    if ( 0 != rc ) {
+        free(test_obj->current_job_frames);
+        test_obj->current_job_frames = NULL;
+    }
+
+    return rc;
+}
+
+int createEncodingSession(mm_camera_test_obj_t *test_obj,
+                          mm_camera_stream_t *m_stream,
+                          mm_camera_buf_def_t *m_frame)
+{
+    mm_jpeg_encode_params_t encode_param;
+
+    memset(&encode_param, 0, sizeof(mm_jpeg_encode_params_t));
+    encode_param.jpeg_cb = jpeg_encode_cb;
+    encode_param.userdata = (void*)test_obj;
+    encode_param.encode_thumbnail = 1;
+    encode_param.quality = 85;
+    encode_param.color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+
+    /* fill in main src img encode param */
+    encode_param.num_src_bufs = 1;
+    encode_param.src_main_buf[0].index = 0;
+    encode_param.src_main_buf[0].buf_size = m_frame->frame_len;
+    encode_param.src_main_buf[0].buf_vaddr = (uint8_t *)m_frame->buffer;
+    encode_param.src_main_buf[0].fd = m_frame->fd;
+    encode_param.src_main_buf[0].format = MM_JPEG_FMT_YUV;
+    encode_param.src_main_buf[0].offset = m_stream->offset;
+
+    /* fill in sink img param */
+    encode_param.num_dst_bufs = 1;
+    encode_param.dest_buf[0].index = 0;
+    encode_param.dest_buf[0].buf_size = test_obj->jpeg_buf.buf.frame_len;
+    encode_param.dest_buf[0].buf_vaddr = (uint8_t *)test_obj->jpeg_buf.buf.buffer;
+    encode_param.dest_buf[0].fd = test_obj->jpeg_buf.buf.fd;
+    encode_param.dest_buf[0].format = MM_JPEG_FMT_YUV;
+
+    return test_obj->jpeg_ops.create_session(test_obj->jpeg_hdl,
+                                             &encode_param,
+                                             &test_obj->current_jpeg_sess_id);
+}
+
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+                                      void *user_data)
+{
+
+    int rc;
+    int i = 0;
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *p_stream = NULL;
+    mm_camera_stream_t *m_stream = NULL;
+    mm_camera_buf_def_t *p_frame = NULL;
+    mm_camera_buf_def_t *m_frame = NULL;
+
+    CDBG("%s: BEGIN\n", __func__);
+
+    /* find channel */
+    for (i = 0; i < MM_CHANNEL_TYPE_MAX; i++) {
+        if (pme->channels[i].ch_id == bufs->ch_id) {
+            channel = &pme->channels[i];
+            break;
+        }
+    }
+    if (NULL == channel) {
+        CDBG_ERROR("%s: Wrong channel id (%d)", __func__, bufs->ch_id);
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_SNAPSHOT) {
+            m_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL == m_stream) {
+        CDBG_ERROR("%s: cannot find snapshot stream", __func__);
+        rc = -1;
+        goto error;
+    }
+
+    /* find snapshot frame */
+    for (i = 0; i < bufs->num_bufs; i++) {
+        if (bufs->bufs[i]->stream_id == m_stream->s_id) {
+            m_frame = bufs->bufs[i];
+            break;
+        }
+    }
+    if (NULL == m_frame) {
+        CDBG_ERROR("%s: main frame is NULL", __func__);
+        rc = -1;
+        goto error;
+    }
+
+    mm_app_dump_frame(m_frame, "main", "yuv", m_frame->frame_idx);
+
+    /* find postview stream */
+    for (i = 0; i < channel->num_streams; i++) {
+        if (channel->streams[i].s_config.stream_info->stream_type == CAM_STREAM_TYPE_POSTVIEW) {
+            p_stream = &channel->streams[i];
+            break;
+        }
+    }
+    if (NULL != p_stream) {
+        /* find preview frame */
+        for (i = 0; i < bufs->num_bufs; i++) {
+            if (bufs->bufs[i]->stream_id == p_stream->s_id) {
+                p_frame = bufs->bufs[i];
+                break;
+            }
+        }
+        if (NULL != p_frame) {
+            mm_app_dump_frame(p_frame, "postview", "yuv", p_frame->frame_idx);
+        }
+    }
+
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)m_frame->mem_info,
+                     ION_IOC_CLEAN_INV_CACHES);
+
+    /* create a new jpeg encoding session */
+    rc = createEncodingSession(pme, m_stream, m_frame);
+    if (0 != rc) {
+        CDBG_ERROR("%s: error creating jpeg session", __func__);
+        goto error;
+    }
+
+    /* start jpeg encoding job */
+    rc = encodeData(pme, bufs, m_stream);
+
+error:
+    /* buf done rcvd frames in error case */
+    if ( 0 != rc ) {
+        for (i=0; i<bufs->num_bufs; i++) {
+            if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                                    bufs->ch_id,
+                                                    bufs->bufs[i])) {
+                CDBG_ERROR("%s: Failed in Qbuf\n", __func__);
+            }
+            mm_app_cache_ops((mm_camera_app_meminfo_t *)bufs->bufs[i]->mem_info,
+                             ION_IOC_INV_CACHES);
+        }
+    }
+
+    CDBG("%s: END\n", __func__);
+}
+
+mm_camera_channel_t * mm_app_add_snapshot_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_SNAPSHOT,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return NULL;
+    }
+
+    stream = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        mm_app_snapshot_notify_cb,
+                                        (void *)test_obj,
+                                        1,
+                                        1);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add snapshot stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+mm_camera_stream_t * mm_app_add_postview_stream(mm_camera_test_obj_t *test_obj,
+                                                mm_camera_channel_t *channel,
+                                                mm_camera_buf_notify_t stream_cb,
+                                                void *userdata,
+                                                uint8_t num_bufs,
+                                                uint8_t num_burst)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_POSTVIEW;
+    if (num_burst == 0) {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    } else {
+        stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_BURST;
+        stream->s_config.stream_info->num_of_burst = num_burst;
+    }
+    stream->s_config.stream_info->fmt = DEFAULT_PREVIEW_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_PREVIEW_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_PREVIEW_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config preview stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+int mm_app_start_capture(mm_camera_test_obj_t *test_obj,
+                         uint8_t num_snapshots)
+{
+    int32_t rc = MM_CAMERA_OK;
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *s_main = NULL;
+    mm_camera_stream_t *s_postview = NULL;
+    mm_camera_channel_attr_t attr;
+
+    memset(&attr, 0, sizeof(mm_camera_channel_attr_t));
+    attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS;
+    attr.max_unmatched_frames = 3;
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_CAPTURE,
+                                 &attr,
+                                 mm_app_snapshot_notify_cb,
+                                 test_obj);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_postview = mm_app_add_postview_stream(test_obj,
+                                            channel,
+                                            NULL,
+                                            NULL,
+                                            num_snapshots,
+                                            num_snapshots);
+    if (NULL == s_postview) {
+        CDBG_ERROR("%s: add preview stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    s_main = mm_app_add_snapshot_stream(test_obj,
+                                        channel,
+                                        NULL,
+                                        NULL,
+                                        num_snapshots,
+                                        num_snapshots);
+    if (NULL == s_main) {
+        CDBG_ERROR("%s: add main snapshot stream failed\n", __func__);
+        mm_app_del_stream(test_obj, channel, s_postview);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    rc = mm_app_start_channel(test_obj, channel);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start zsl failed rc=%d\n", __func__, rc);
+        mm_app_del_stream(test_obj, channel, s_postview);
+        mm_app_del_stream(test_obj, channel, s_main);
+        mm_app_del_channel(test_obj, channel);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_capture(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *ch = NULL;
+
+    ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_CAPTURE);
+
+    rc = mm_app_stop_channel(test_obj, ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:stop recording failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
new file mode 100644
index 0000000..a59e1db
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_unit_test.c
@@ -0,0 +1,641 @@
+/* Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAMERA_APP_UTEST_OUTER_LOOP 10
+#define MM_QCAMERA_APP_UTEST_INNER_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+
+int mm_app_tc_open_close(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying open/close cameras...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+        sleep(1);
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_preview(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc |= mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_zsl(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_preview_zsl(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_preview_zsl() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_preview_zsl(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_preview_zsl() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_video_preview(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop video preview...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_record_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_record_preview(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_video_record(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop recording...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        rc = mm_app_start_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_record(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_start_record() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+
+            sleep(1);
+
+            rc = mm_app_stop_record(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_stop_record() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:start/stop record cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_stop_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_start_stop_live_snapshot(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying start/stop live snapshot...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        rc = mm_app_start_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_start_record_preview() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        rc = mm_app_start_record(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_start_record() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_live_snapshot(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_start_live_snapshot() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+
+            /* wait for jpeg is done */
+            mm_camera_app_wait();
+
+            rc = mm_app_stop_live_snapshot(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s:mm_app_stop_live_snapshot() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:start/stop live snapshot cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_stop_record(&test_obj);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_stop_record(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_stop_record() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_stop_record_preview(&test_obj);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        sleep(1);
+
+        rc = mm_app_stop_record_preview(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_stop_record_preview() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            mm_app_close(&test_obj);
+            break;
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_capture_regular(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+    uint8_t num_snapshot = 1;
+    uint8_t num_rcvd_snapshot = 0;
+
+    printf("\n Verifying capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_capture(&test_obj, num_snapshot);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_capture() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            while (num_rcvd_snapshot < num_snapshot) {
+                mm_camera_app_wait();
+                num_rcvd_snapshot++;
+            }
+            rc = mm_app_stop_capture(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_capture() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_capture_burst(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+    uint8_t num_snapshot = 3;
+    uint8_t num_rcvd_snapshot = 0;
+
+    printf("\n Verifying capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_capture(&test_obj, num_snapshot);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_capture() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            while (num_rcvd_snapshot < num_snapshot) {
+                mm_camera_app_wait();
+                num_rcvd_snapshot++;
+            }
+            rc = mm_app_stop_capture(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_capture() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc = mm_app_close(&test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_rdi_burst(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying rdi burst (3) capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_rdi(&test_obj, 3);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_rdi(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc2 = mm_app_close(&test_obj);
+        if (rc2 != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc2);
+            if (rc == MM_CAMERA_OK) {
+                rc = rc2;
+            }
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_tc_rdi_cont(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK, rc2 = MM_CAMERA_OK;
+    int i, j;
+    mm_camera_test_obj_t test_obj;
+
+    printf("\n Verifying rdi continuous capture...\n");
+    for (i = 0; i < cam_app->num_cameras; i++) {
+        memset(&test_obj, 0, sizeof(mm_camera_test_obj_t));
+        rc = mm_app_open(cam_app, i, &test_obj);
+        if (rc != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_open() cam_idx=%d, err=%d\n",
+                       __func__, i, rc);
+            break;
+        }
+
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_INNER_LOOP; j++) {
+            rc = mm_app_start_rdi(&test_obj, 0);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_start_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+            sleep(1);
+            rc = mm_app_stop_rdi(&test_obj);
+            if (rc != MM_CAMERA_OK) {
+                CDBG_ERROR("%s: mm_app_stop_preview() cam_idx=%d, err=%d\n",
+                           __func__, i, rc);
+                break;
+            }
+        }
+
+        rc2 = mm_app_close(&test_obj);
+        if (rc2 != MM_CAMERA_OK) {
+            CDBG_ERROR("%s:mm_app_close() cam_idx=%d, err=%d\n",
+                       __func__, i, rc2);
+            if (rc == MM_CAMERA_OK) {
+                rc = rc2;
+            }
+            break;
+        }
+    }
+    if (rc == MM_CAMERA_OK) {
+        printf("\nPassed\n");
+    } else {
+        printf("\nFailed\n");
+    }
+    CDBG("%s:END, rc = %d\n", __func__, rc);
+    return rc;
+}
+
+int mm_app_gen_test_cases()
+{
+    int tc = 0;
+    memset(mm_app_tc, 0, sizeof(mm_app_tc));
+    if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_open_close;
+    if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_preview;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_zsl;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_preview;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_video_record;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_start_stop_live_snapshot;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_regular;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_capture_burst;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_cont;
+    //if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_rdi_burst;
+
+    return tc;
+}
+
+int mm_app_unit_test_entry(mm_camera_app_t *cam_app)
+{
+    int rc = MM_CAMERA_OK;
+    int i, j, tc = 0;
+
+    tc = mm_app_gen_test_cases();
+    CDBG("Running %d test cases\n",tc);
+    for (i = 0; i < tc; i++) {
+        for (j = 0; j < MM_QCAMERA_APP_UTEST_OUTER_LOOP; j++) {
+            mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+            if (mm_app_tc[i].r != MM_CAMERA_OK) {
+                printf("%s: test case %d (iteration %d) error = %d, abort unit testing engine!!!!\n",
+                       __func__, i, j, mm_app_tc[i].r);
+                rc = mm_app_tc[i].r;
+                goto end;
+            }
+        }
+    }
+end:
+    printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+    return rc;
+}
+
+
+
+
diff --git a/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
new file mode 100644
index 0000000..136c8a8
--- /dev/null
+++ b/camera/QCamera2/stack/mm-camera-test/src/mm_qcamera_video.c
@@ -0,0 +1,256 @@
+/*
+Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include "mm_qcamera_dbg.h"
+#include "mm_qcamera_app.h"
+
+static void mm_app_video_notify_cb(mm_camera_super_buf_t *bufs,
+                                   void *user_data)
+{
+    char file_name[64];
+    mm_camera_buf_def_t *frame = bufs->bufs[0];
+    mm_camera_test_obj_t *pme = (mm_camera_test_obj_t *)user_data;
+
+    CDBG("%s: BEGIN - length=%d, frame idx = %d\n",
+         __func__, frame->frame_len, frame->frame_idx);
+    snprintf(file_name, sizeof(file_name), "V_C%d", pme->cam->camera_handle);
+    mm_app_dump_frame(frame, file_name, "yuv", frame->frame_idx);
+
+    if (MM_CAMERA_OK != pme->cam->ops->qbuf(bufs->camera_handle,
+                                            bufs->ch_id,
+                                            frame)) {
+        CDBG_ERROR("%s: Failed in Preview Qbuf\n", __func__);
+    }
+    mm_app_cache_ops((mm_camera_app_meminfo_t *)frame->mem_info,
+                     ION_IOC_INV_CACHES);
+
+    CDBG("%s: END\n", __func__);
+}
+
+mm_camera_stream_t * mm_app_add_video_stream(mm_camera_test_obj_t *test_obj,
+                                             mm_camera_channel_t *channel,
+                                             mm_camera_buf_notify_t stream_cb,
+                                             void *userdata,
+                                             uint8_t num_bufs)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_stream_t *stream = NULL;
+    cam_capability_t *cam_cap = (cam_capability_t *)(test_obj->cap_buf.buf.buffer);
+
+    stream = mm_app_add_stream(test_obj, channel);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add stream failed\n", __func__);
+        return NULL;
+    }
+
+    stream->s_config.mem_vtbl.get_bufs = mm_app_stream_initbuf;
+    stream->s_config.mem_vtbl.put_bufs = mm_app_stream_deinitbuf;
+    stream->s_config.mem_vtbl.clean_invalidate_buf =
+      mm_app_stream_clean_invalidate_buf;
+    stream->s_config.mem_vtbl.invalidate_buf = mm_app_stream_invalidate_buf;
+    stream->s_config.mem_vtbl.user_data = (void *)stream;
+    stream->s_config.stream_cb = stream_cb;
+    stream->s_config.userdata = userdata;
+    stream->num_of_bufs = num_bufs;
+
+    stream->s_config.stream_info = (cam_stream_info_t *)stream->s_info_buf.buf.buffer;
+    memset(stream->s_config.stream_info, 0, sizeof(cam_stream_info_t));
+    stream->s_config.stream_info->stream_type = CAM_STREAM_TYPE_VIDEO;
+    stream->s_config.stream_info->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
+    stream->s_config.stream_info->fmt = DEFAULT_VIDEO_FORMAT;
+    stream->s_config.stream_info->dim.width = DEFAULT_VIDEO_WIDTH;
+    stream->s_config.stream_info->dim.height = DEFAULT_VIDEO_HEIGHT;
+    stream->s_config.padding_info = cam_cap->padding_info;
+
+    rc = mm_app_config_stream(test_obj, channel, stream, &stream->s_config);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:config preview stream err=%d\n", __func__, rc);
+        return NULL;
+    }
+
+    return stream;
+}
+
+mm_camera_channel_t * mm_app_add_video_channel(mm_camera_test_obj_t *test_obj)
+{
+    mm_camera_channel_t *channel = NULL;
+    mm_camera_stream_t *stream = NULL;
+
+    channel = mm_app_add_channel(test_obj,
+                                 MM_CHANNEL_TYPE_VIDEO,
+                                 NULL,
+                                 NULL,
+                                 NULL);
+    if (NULL == channel) {
+        CDBG_ERROR("%s: add channel failed", __func__);
+        return NULL;
+    }
+
+    stream = mm_app_add_video_stream(test_obj,
+                                     channel,
+                                     mm_app_video_notify_cb,
+                                     (void *)test_obj,
+                                     1);
+    if (NULL == stream) {
+        CDBG_ERROR("%s: add video stream failed\n", __func__);
+        mm_app_del_channel(test_obj, channel);
+        return NULL;
+    }
+
+    return channel;
+}
+
+int mm_app_start_record_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *p_ch = NULL;
+    mm_camera_channel_t *v_ch = NULL;
+    mm_camera_channel_t *s_ch = NULL;
+
+    p_ch = mm_app_add_preview_channel(test_obj);
+    if (NULL == p_ch) {
+        CDBG_ERROR("%s: add preview channel failed", __func__);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    v_ch = mm_app_add_video_channel(test_obj);
+    if (NULL == v_ch) {
+        CDBG_ERROR("%s: add video channel failed", __func__);
+        mm_app_del_channel(test_obj, p_ch);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    s_ch = mm_app_add_snapshot_channel(test_obj);
+    if (NULL == s_ch) {
+        CDBG_ERROR("%s: add snapshot channel failed", __func__);
+        mm_app_del_channel(test_obj, p_ch);
+        mm_app_del_channel(test_obj, v_ch);
+        return -MM_CAMERA_E_GENERAL;
+    }
+
+    rc = mm_app_start_channel(test_obj, p_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start preview failed rc=%d\n", __func__, rc);
+        mm_app_del_channel(test_obj, p_ch);
+        mm_app_del_channel(test_obj, v_ch);
+        mm_app_del_channel(test_obj, s_ch);
+        return rc;
+    }
+
+    return rc;
+}
+
+int mm_app_stop_record_preview(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *p_ch = NULL;
+    mm_camera_channel_t *v_ch = NULL;
+    mm_camera_channel_t *s_ch = NULL;
+
+    p_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_PREVIEW);
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_stop_and_del_channel(test_obj, p_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    rc = mm_app_stop_and_del_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    rc = mm_app_stop_and_del_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:Stop Preview failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_record(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *v_ch = NULL;
+
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+    rc = mm_app_start_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start recording failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_stop_record(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *v_ch = NULL;
+
+    v_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_VIDEO);
+
+    rc = mm_app_stop_channel(test_obj, v_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:stop recording failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_start_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *s_ch = NULL;
+
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_start_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:start recording failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
+
+int mm_app_stop_live_snapshot(mm_camera_test_obj_t *test_obj)
+{
+    int rc = MM_CAMERA_OK;
+    mm_camera_channel_t *s_ch = NULL;
+
+    s_ch = mm_app_get_channel_by_type(test_obj, MM_CHANNEL_TYPE_SNAPSHOT);
+
+    rc = mm_app_stop_channel(test_obj, s_ch);
+    if (MM_CAMERA_OK != rc) {
+        CDBG_ERROR("%s:stop recording failed rc=%d\n", __func__, rc);
+    }
+
+    return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/Android.mk b/camera/QCamera2/stack/mm-jpeg-interface/Android.mk
new file mode 100644
index 0000000..ede4148
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/Android.mk
@@ -0,0 +1,32 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS+= -D_ANDROID_
+
+LOCAL_C_INCLUDES += \
+    frameworks/native/include/media/openmax \
+    $(LOCAL_PATH)/inc \
+    $(LOCAL_PATH)/../common \
+    $(LOCAL_PATH)/../../../ \
+    $(LOCAL_PATH)/../../../mm-image-codec/qexif \
+    $(LOCAL_PATH)/../../../mm-image-codec/qomx_core
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+    LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_SRC_FILES := \
+    src/mm_jpeg_queue.c \
+    src/mm_jpeg_exif.c \
+    src/mm_jpeg.c \
+    src/mm_jpeg_interface.c
+
+LOCAL_MODULE           := libmmjpeg_interface
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libqomx_core
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
new file mode 100644
index 0000000..55ac502
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -0,0 +1,206 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef MM_JPEG_H_
+#define MM_JPEG_H_
+
+#include <cam_semaphore.h>
+#include "mm_jpeg_interface.h"
+#include "cam_list.h"
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "QOMX_JpegExtensions.h"
+
+#define MM_JPEG_MAX_THREADS 30
+#define MM_JPEG_CIRQ_SIZE 30
+#define MM_JPEG_MAX_SESSION 10
+#define MAX_EXIF_TABLE_ENTRIES 50
+#define ASPECT_TOLERANCE 0.001
+
+typedef struct {
+  struct cam_list list;
+  void* data;
+} mm_jpeg_q_node_t;
+
+typedef struct {
+  mm_jpeg_q_node_t head; /* dummy head */
+  uint32_t size;
+  pthread_mutex_t lock;
+} mm_jpeg_queue_t;
+
+typedef enum {
+  MM_JPEG_CMD_TYPE_JOB,          /* job cmd */
+  MM_JPEG_CMD_TYPE_EXIT,         /* EXIT cmd for exiting jobMgr thread */
+  MM_JPEG_CMD_TYPE_MAX
+} mm_jpeg_cmd_type_t;
+
+typedef struct {
+  union {
+    int i_data[MM_JPEG_CIRQ_SIZE];
+    void *p_data[MM_JPEG_CIRQ_SIZE];
+  };
+  int front;
+  int rear;
+  int count;
+  pthread_mutex_t lock;
+} mm_jpeg_cirq_t;
+
+typedef struct {
+  uint32_t client_hdl;           /* client handler */
+  uint32_t jobId;                /* job ID */
+  uint32_t sessionId;            /* session ID */
+  mm_jpeg_encode_params_t params; /* encode params */
+  mm_jpeg_encode_job_t encode_job;             /* job description */
+  pthread_t encode_pid;          /* encode thread handler*/
+
+  void *jpeg_obj;                /* ptr to mm_jpeg_obj */
+  jpeg_job_status_t job_status;  /* job status */
+
+  int state_change_pending;      /* flag to indicate if state change is pending */
+  OMX_ERRORTYPE error_flag;      /* variable to indicate error during encoding */
+  OMX_BOOL abort_flag;      /* variable to indicate abort during encoding */
+
+  /* OMX related */
+  OMX_HANDLETYPE omx_handle;                      /* handle to omx engine */
+  OMX_CALLBACKTYPE omx_callbacks;                 /* callbacks to omx engine */
+
+  /* buffer headers */
+  OMX_BUFFERHEADERTYPE *p_in_omx_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_in_omx_thumb_buf[MM_JPEG_MAX_BUF];
+  OMX_BUFFERHEADERTYPE *p_out_omx_buf[MM_JPEG_MAX_BUF];
+
+  OMX_PARAM_PORTDEFINITIONTYPE inputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE outputPort;
+  OMX_PARAM_PORTDEFINITIONTYPE inputTmbPort;
+
+  /* event locks */
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+
+  QEXIF_INFO_DATA exif_info_local[MAX_EXIF_TABLE_ENTRIES];  //all exif tags for JPEG encoder
+  int exif_count_local;
+
+  mm_jpeg_cirq_t cb_q;
+  int32_t ebd_count;
+  int32_t fbd_count;
+
+  /* this flag represents whether the job is active */
+  OMX_BOOL active;
+
+  /* this flag indicates if the configration is complete */
+  OMX_BOOL config;
+
+  /* job history count to generate unique id */
+  int job_hist;
+
+  OMX_BOOL encoding;
+} mm_jpeg_job_session_t;
+
+typedef struct {
+  mm_jpeg_encode_job_t encode_job;
+  uint32_t job_id;
+  uint32_t client_handle;
+} mm_jpeg_encode_job_info_t;
+
+typedef struct {
+  mm_jpeg_cmd_type_t type;
+  union {
+    mm_jpeg_encode_job_info_t enc_info;
+  };
+} mm_jpeg_job_q_node_t;
+
+typedef struct {
+  uint8_t is_used;                /* flag: if is a valid client */
+  uint32_t client_handle;         /* client handle */
+  mm_jpeg_job_session_t session[MM_JPEG_MAX_SESSION];
+  pthread_mutex_t lock;           /* job lock */
+} mm_jpeg_client_t;
+
+typedef struct {
+  pthread_t pid;                  /* job cmd thread ID */
+  cam_semaphore_t job_sem;        /* semaphore for job cmd thread */
+  mm_jpeg_queue_t job_queue;      /* queue for job to do */
+} mm_jpeg_job_cmd_thread_t;
+
+#define MAX_JPEG_CLIENT_NUM 8
+typedef struct mm_jpeg_obj_t {
+  /* ClientMgr */
+  int num_clients;                                /* num of clients */
+  mm_jpeg_client_t clnt_mgr[MAX_JPEG_CLIENT_NUM]; /* client manager */
+
+  /* JobMkr */
+  pthread_mutex_t job_lock;                       /* job lock */
+  mm_jpeg_job_cmd_thread_t job_mgr;               /* job mgr thread including todo_q*/
+  mm_jpeg_queue_t ongoing_job_q;                  /* queue for ongoing jobs */
+} mm_jpeg_obj;
+
+extern int32_t mm_jpeg_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj);
+extern uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t* job,
+  uint32_t* jobId);
+extern int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId);
+extern int32_t mm_jpeg_close(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl);
+extern int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id);
+extern int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj,
+  uint32_t session_id);
+extern int32_t mm_jpeg_destroy_job(mm_jpeg_job_session_t *p_session);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index);
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler);
+
+/* basic queue functions */
+extern int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, void* node);
+extern void* mm_jpeg_queue_deq(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue);
+extern uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue);
+extern void* mm_jpeg_queue_peek(mm_jpeg_queue_t* queue);
+extern int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data);
+extern int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data);
+extern int process_meta_data_v1(cam_metadata_info_t *p_meta,
+  QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam_exif_params);
+extern int process_meta_data_v3(metadata_buffer_t *p_meta,
+  QOMX_EXIF_INFO *exif_info, mm_jpeg_exif_params_t *p_cam3a_params);
+
+#endif /* MM_JPEG_H_ */
+
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
new file mode 100644
index 0000000..b0ef9c7
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
@@ -0,0 +1,68 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_DBG_H__
+#define __MM_JPEG_DBG_H__
+
+#ifndef LOG_DEBUG
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-jpeg-intf"
+    #include <utils/Log.h>
+  #else
+    #include <stdio.h>
+    #define ALOGE CDBG
+  #endif
+  #undef CDBG
+  #define CDBG(fmt, args...) do{}while(0)
+#else
+  #ifdef _ANDROID_
+    #undef LOG_NIDEBUG
+    #undef LOG_TAG
+    #define LOG_NIDEBUG 0
+    #define LOG_TAG "mm-jpeg-intf"
+    #include <utils/Log.h>
+    #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+  #else
+    #include <stdio.h>
+    #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+    #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+  #endif
+#endif
+
+#ifdef _ANDROID_
+  #define CDBG_HIGH(fmt, args...)  ALOGI(fmt, ##args)
+  #define CDBG_ERROR(fmt, args...)  ALOGE(fmt, ##args)
+#else
+  #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+  #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_JPEG_DBG_H__ */
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
new file mode 100644
index 0000000..5b82bdb
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -0,0 +1,2565 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/prctl.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <math.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+/* define max num of supported concurrent jpeg jobs by OMX engine.
+ * Current, only one per time */
+#define NUM_MAX_JPEG_CNCURRENT_JOBS 1
+
+#define JOB_ID_MAGICVAL 0x1
+#define JOB_HIST_MAX 10000
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  int rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    CDBG_HIGH("%s:%d] written size %d", __func__, __LINE__, len); \
+    fclose(fp); \
+  } else { \
+    CDBG_ERROR("%s:%d] open %s failed", __func__, __LINE__, filename); \
+  } \
+})
+
+/** DUMP_TO_FILE2:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file if the memory is non-contiguous
+ **/
+#define DUMP_TO_FILE2(filename, p_addr1, len1, paddr2, len2) ({ \
+  int rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr1, 1, len1, fp); \
+    rc = fwrite(p_addr2, 1, len2, fp); \
+    CDBG_HIGH("%s:%d] written %d %d", __func__, __LINE__, len1, len2); \
+    fclose(fp); \
+  } else { \
+    CDBG_ERROR("%s:%d] open %s failed", __func__, __LINE__, filename); \
+  } \
+})
+
+/** MM_JPEG_CHK_ABORT:
+ *  @p: client pointer
+ *  @ret: return value
+ *  @label: label to jump to
+ *
+ *  check the abort failure
+ **/
+#define MM_JPEG_CHK_ABORT(p, ret, label) ({ \
+  if (OMX_TRUE == p->abort_flag) { \
+    CDBG_ERROR("%s:%d] jpeg abort", __func__, __LINE__); \
+    ret = OMX_ErrorNone; \
+    goto label; \
+  } \
+})
+
+#define GET_CLIENT_IDX(x) ((x) & 0xff)
+#define GET_SESSION_IDX(x) (((x) >> 8) & 0xff)
+#define GET_JOB_IDX(x) (((x) >> 16) & 0xff)
+
+/** GCD:
+ *  @x: input number
+ *  @y: input number
+ *  @gcd: gcd of x and y
+ *
+ *  Find the GCD of 2 numbers
+ **/
+#define GCD( x, y, gcd) ({ \
+  int tmp; \
+  while (*y != 0) { \
+    tmp = *x % *y; \
+    *x = *y; \
+    *y = tmp; \
+  } \
+  *gcd = *x; \
+})
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+    OMX_PTR pAppData,
+    OMX_EVENTTYPE eEvent,
+    OMX_U32 nData1,
+    OMX_U32 nData2,
+    OMX_PTR pEventData);
+
+/** cirq_reset:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Resets the circular queue
+ *
+ **/
+static inline void cirq_reset(mm_jpeg_cirq_t *q)
+{
+  q->front = 0;
+  q->rear = 0;
+  q->count = 0;
+  pthread_mutex_init(&q->lock, NULL);
+}
+
+/** cirq_empty:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is empty
+ *
+ **/
+#define cirq_empty(q) (q->count == 0)
+
+/** cirq_full:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       check if the curcular queue is full
+ *
+ **/
+#define cirq_full(q) (q->count == MM_JPEG_CIRQ_SIZE)
+
+/** cirq_enqueue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be inserted
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       enqueue an element into circular queue
+ *
+ **/
+#define cirq_enqueue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_full(q)) { \
+    rc = -1; \
+  } else { \
+    q->type[q->rear] = data; \
+    q->rear = (q->rear + 1) % MM_JPEG_CIRQ_SIZE; \
+    q->count++; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+/** cirq_dequeue:
+ *
+ *  Arguments:
+ *    @q: circular queue
+ *    @data: data to be popped
+ *
+ *  Return:
+ *       true/false
+ *
+ *  Description:
+ *       dequeue an element from the circular queue
+ *
+ **/
+#define cirq_dequeue(q, type, data) ({ \
+  int rc = 0; \
+  pthread_mutex_lock(&q->lock); \
+  if (cirq_empty(q)) { \
+    pthread_mutex_unlock(&q->lock); \
+    rc = -1; \
+  } else { \
+    data = q->type[q->front]; \
+    q->count--; \
+  } \
+  pthread_mutex_unlock(&q->lock); \
+  rc; \
+})
+
+/**
+ *
+ * special queue functions for job queue
+ **/
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id);
+
+/** mm_jpeg_pending_func_t:
+ *
+ * Intermediate function for transition change
+ **/
+typedef OMX_ERRORTYPE (*mm_jpeg_transition_func_t)(void *);
+
+
+/** mm_jpeg_queue_func_t:
+ *
+ * Intermediate function for queue operation
+ **/
+typedef void (*mm_jpeg_queue_func_t)(void *);
+
+/** mm_jpeg_session_send_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Send the buffers to OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_send_buffers(void *data)
+{
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_BUFFER_INFO lbuffer_info;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  memset(&lbuffer_info, 0x0, sizeof(QOMX_BUFFER_INFO));
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    lbuffer_info.fd = p_params->src_main_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_in_omx_buf[i]), 0,
+      &lbuffer_info, p_params->src_main_buf[i].buf_size,
+      p_params->src_main_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_tmb_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    lbuffer_info.fd = p_params->src_thumb_buf[i].fd;
+    ret = OMX_UseBuffer(p_session->omx_handle,
+        &(p_session->p_in_omx_thumb_buf[i]), 2,
+        &lbuffer_info, p_params->src_thumb_buf[i].buf_size,
+        p_params->src_thumb_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, i);
+    ret = OMX_UseBuffer(p_session->omx_handle, &(p_session->p_out_omx_buf[i]),
+      1, NULL, p_params->dest_buf[i].buf_size,
+      p_params->dest_buf[i].buf_vaddr);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_session_free_buffers:
+ *
+ *  Arguments:
+ *    @data: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Free the buffers from OMX layer
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_free_buffers(void *data)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint32_t i = 0;
+  mm_jpeg_job_session_t* p_session = (mm_jpeg_job_session_t *)data;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  for (i = 0; i < p_params->num_src_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 0, p_session->p_in_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_tmb_bufs; i++) {
+    CDBG("%s:%d] Source buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 2, p_session->p_in_omx_thumb_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      return ret;
+    }
+  }
+
+  for (i = 0; i < p_params->num_dst_bufs; i++) {
+    CDBG("%s:%d] Dest buffer %d", __func__, __LINE__, i);
+    ret = OMX_FreeBuffer(p_session->omx_handle, 1, p_session->p_out_omx_buf[i]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      return ret;
+    }
+  }
+  CDBG("%s:%d]", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_session_change_state:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *    @new_state: new state to be transitioned to
+ *    @p_exec: transition function
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       This method is used for state transition
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_change_state(mm_jpeg_job_session_t* p_session,
+  OMX_STATETYPE new_state,
+  mm_jpeg_transition_func_t p_exec)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_STATETYPE current_state;
+  CDBG("%s:%d] new_state %d p_exec %p", __func__, __LINE__,
+    new_state, p_exec);
+
+
+  pthread_mutex_lock(&p_session->lock);
+
+  ret = OMX_GetState(p_session->omx_handle, &current_state);
+
+  if (ret) {
+    pthread_mutex_unlock(&p_session->lock);
+    return ret;
+  }
+
+  if (current_state == new_state) {
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  p_session->state_change_pending = OMX_TRUE;
+  ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+    new_state, NULL);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorIncorrectStateTransition;
+  }
+  CDBG("%s:%d] ", __func__, __LINE__);
+  if (OMX_ErrorNone != p_session->error_flag) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, p_session->error_flag);
+    pthread_mutex_unlock(&p_session->lock);
+    return p_session->error_flag;
+  }
+  if (p_exec) {
+    ret = p_exec(p_session);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+      pthread_mutex_unlock(&p_session->lock);
+      return ret;
+    }
+  }
+  CDBG("%s:%d] ", __func__, __LINE__);
+  if (p_session->state_change_pending) {
+    CDBG("%s:%d] before wait", __func__, __LINE__);
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    CDBG("%s:%d] after wait", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_session_create:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error types
+ *
+ *  Description:
+ *       Create a jpeg encode session
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_create(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  mm_jpeg_cirq_t *p_cirq = NULL;
+
+  pthread_mutex_init(&p_session->lock, NULL);
+  pthread_cond_init(&p_session->cond, NULL);
+  cirq_reset(&p_session->cb_q);
+  p_session->state_change_pending = OMX_FALSE;
+  p_session->abort_flag = OMX_FALSE;
+  p_session->error_flag = OMX_ErrorNone;
+  p_session->ebd_count = 0;
+  p_session->fbd_count = 0;
+  p_session->encode_pid = -1;
+  p_session->config = OMX_FALSE;
+  p_session->exif_count_local = 0;
+
+  p_session->omx_callbacks.EmptyBufferDone = mm_jpeg_ebd;
+  p_session->omx_callbacks.FillBufferDone = mm_jpeg_fbd;
+  p_session->omx_callbacks.EventHandler = mm_jpeg_event_handler;
+  rc = OMX_GetHandle(&p_session->omx_handle,
+    "OMX.qcom.image.jpeg.encoder",
+    (void *)p_session,
+    &p_session->omx_callbacks);
+
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s:%d] OMX_GetHandle failed (%d)", __func__, __LINE__, rc);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_session_destroy:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Destroy a jpeg encode session
+ *
+ **/
+void mm_jpeg_session_destroy(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+  if (NULL == p_session->omx_handle) {
+    CDBG_ERROR("%s:%d] invalid handle", __func__, __LINE__);
+    return;
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateIdle, NULL);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  rc = mm_jpeg_session_change_state(p_session, OMX_StateLoaded,
+    mm_jpeg_session_free_buffers);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  rc = OMX_FreeHandle(p_session->omx_handle);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] OMX_FreeHandle failed (%d)", __func__, __LINE__, rc);
+  }
+  p_session->omx_handle = NULL;
+
+  pthread_mutex_destroy(&p_session->lock);
+  pthread_cond_destroy(&p_session->cond);
+  CDBG("%s:%d] X", __func__, __LINE__);
+}
+
+/** mm_jpeg_session_config_main_buffer_offset:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the buffer offsets
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_buffer_offset(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  int32_t i = 0;
+  OMX_INDEXTYPE buffer_index;
+  QOMX_YUV_FRAME_INFO frame_info;
+  int32_t totalSize = 0;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[p_jobparams->src_index];
+
+  memset(&frame_info, 0x0, sizeof(QOMX_YUV_FRAME_INFO));
+
+  frame_info.cbcrStartOffset[0] = p_src_buf->offset.mp[0].len;
+  frame_info.cbcrStartOffset[1] = p_src_buf->offset.mp[1].len;
+  frame_info.yOffset = p_src_buf->offset.mp[0].offset;
+  frame_info.cbcrOffset[0] = p_src_buf->offset.mp[1].offset;
+  frame_info.cbcrOffset[1] = p_src_buf->offset.mp[2].offset;
+  totalSize = p_src_buf->buf_size;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME, &buffer_index);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  CDBG_HIGH("%s:%d] yOffset = %d, cbcrOffset = (%d %d), totalSize = %d,"
+    "cbcrStartOffset = (%d %d)", __func__, __LINE__,
+    (int)frame_info.yOffset,
+    (int)frame_info.cbcrOffset[0],
+    (int)frame_info.cbcrOffset[1],
+    totalSize,
+    (int)frame_info.cbcrStartOffset[0],
+    (int)frame_info.cbcrStartOffset[1]);
+
+  rc = OMX_SetParameter(p_session->omx_handle, buffer_index, &frame_info);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+/** mm_jpeg_encoding_mode:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure the serial or parallel encoding
+ *       mode
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_encoding_mode(
+  mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE rc = 0;
+  int32_t i = 0;
+  OMX_INDEXTYPE indextype;
+  QOMX_ENCODING_MODE encoding_mode;
+  int32_t totalSize = 0;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  rc = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_ENCODING_MODE_NAME, &indextype);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+
+  CDBG_HIGH("%s:%d] OMX_Serial_Encoding = %d, OMX_Parallel_Encoding = %d ", __func__, __LINE__,
+    (int)OMX_Serial_Encoding,
+    (int)OMX_Parallel_Encoding);
+
+  encoding_mode = OMX_Serial_Encoding;
+  rc = OMX_SetParameter(p_session->omx_handle, indextype, &encoding_mode);
+  if (rc != OMX_ErrorNone) {
+    CDBG_ERROR("%s:%d] Failed", __func__, __LINE__);
+    return rc;
+  }
+  return rc;
+}
+
+/** map_jpeg_format:
+ *
+ *  Arguments:
+ *    @color_fmt: color format
+ *
+ *  Return:
+ *       OMX color format
+ *
+ *  Description:
+ *       Map mmjpeg color format to OMX color format
+ *
+ **/
+int map_jpeg_format(mm_jpeg_color_format color_fmt)
+{
+  switch (color_fmt) {
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+    return (int)OMX_COLOR_FormatYUV420SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+    return (int)OMX_COLOR_FormatYUV422SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2;
+  case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar;
+  case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+    return (int)OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar;
+  default:
+    CDBG_ERROR("%s:%d] invalid format %d", __func__, __LINE__, color_fmt);
+    return (int)OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar;
+  }
+}
+
+/** mm_jpeg_session_config_port:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_ports(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  mm_jpeg_buf_t *p_src_buf =
+    &p_params->src_main_buf[p_jobparams->src_index];
+
+  p_session->inputPort.nPortIndex = 0;
+  p_session->outputPort.nPortIndex = 1;
+  p_session->inputTmbPort.nPortIndex = 2;
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputTmbPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  ret = OMX_GetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  p_session->inputPort.format.image.nFrameWidth =
+    p_jobparams->main_dim.src_dim.width;
+  p_session->inputPort.format.image.nFrameHeight =
+    p_jobparams->main_dim.src_dim.height;
+  p_session->inputPort.format.image.nStride =
+    p_src_buf->offset.mp[0].stride;
+  p_session->inputPort.format.image.nSliceHeight =
+    p_src_buf->offset.mp[0].scanline;
+  p_session->inputPort.format.image.eColorFormat =
+    map_jpeg_format(p_params->color_format);
+  p_session->inputPort.nBufferSize =
+    p_params->src_main_buf[p_jobparams->src_index].buf_size;
+  p_session->inputPort.nBufferCountActual = p_params->num_src_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->inputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+    mm_jpeg_buf_t *p_tmb_buf =
+      &p_params->src_thumb_buf[p_jobparams->thumb_index];
+    p_session->inputTmbPort.format.image.nFrameWidth =
+      p_jobparams->thumb_dim.src_dim.width;
+    p_session->inputTmbPort.format.image.nFrameHeight =
+      p_jobparams->thumb_dim.src_dim.height;
+    p_session->inputTmbPort.format.image.nStride =
+      p_tmb_buf->offset.mp[0].stride;
+    p_session->inputTmbPort.format.image.nSliceHeight =
+      p_tmb_buf->offset.mp[0].scanline;
+    p_session->inputTmbPort.format.image.eColorFormat =
+      map_jpeg_format(p_params->color_format);
+    p_session->inputTmbPort.nBufferSize =
+      p_params->src_thumb_buf[p_jobparams->thumb_index].buf_size;
+    p_session->inputTmbPort.nBufferCountActual = p_params->num_tmb_bufs;
+    ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+      &p_session->inputTmbPort);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+
+    // Enable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortEnable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+  } else {
+    // Disable thumbnail port
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandPortDisable,
+        p_session->inputTmbPort.nPortIndex, NULL);
+
+    if (ret) {
+      CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+      return ret;
+    }
+  }
+
+  p_session->outputPort.nBufferSize =
+    p_params->dest_buf[p_jobparams->dst_index].buf_size;
+  p_session->outputPort.nBufferCountActual = p_params->num_dst_bufs;
+  ret = OMX_SetParameter(p_session->omx_handle, OMX_IndexParamPortDefinition,
+    &p_session->outputPort);
+  if (ret) {
+    CDBG_ERROR("%s:%d] failed", __func__, __LINE__);
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_get_thumbnail_crop
+ *
+ *  Arguments:
+ *    @p_thumb_dim: thumbnail dimension
+ *    @p_main_dim: main image dimension
+ *    @crop_width : flag indicating if width needs to be cropped
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *    If the main image and thumbnail ascpect ratios are differnt,
+ *    re-calculate the thumbnail crop info to prevent distortion
+ *
+ */
+OMX_ERRORTYPE mm_jpeg_get_thumbnail_crop(mm_jpeg_dim_t *p_thumb_dim,
+  mm_jpeg_dim_t *p_main_dim, uint8_t crop_width) {
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  int divisor;
+  int cropped_width = 0, cropped_height = 0;
+
+  if(crop_width) {
+   //Keep height constant
+   cropped_height = p_thumb_dim->src_dim.height;
+   cropped_width =  floor((cropped_height * p_thumb_dim->dst_dim.width) /
+      p_thumb_dim->dst_dim.height);
+  } else {
+    //Keep width constant
+    cropped_width =  p_thumb_dim->src_dim.width;
+    cropped_height = floor((cropped_width * p_thumb_dim->dst_dim.height) /
+      p_thumb_dim->dst_dim.width);
+  }
+  p_thumb_dim->crop.left = floor(p_thumb_dim->src_dim.width - cropped_width)/2;
+  p_thumb_dim->crop.top = floor(p_thumb_dim->src_dim.height - cropped_height)/2;
+  p_thumb_dim->crop.width = cropped_width;
+  p_thumb_dim->crop.height = cropped_height;
+
+  CDBG_HIGH("%s %d New thumbnail crop: left %d, top %d, crop width %d, crop height %d",
+    __func__, __LINE__, p_thumb_dim->crop.left, p_thumb_dim->crop.top,
+    p_thumb_dim->crop.width, p_thumb_dim->crop.height);
+
+  return ret;
+}
+
+/** mm_jpeg_omx_config_thumbnail:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure OMX ports
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_thumbnail(mm_jpeg_job_session_t* p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  QOMX_THUMBNAIL_INFO thumbnail_info;
+  OMX_INDEXTYPE thumb_indextype;
+  OMX_BOOL encode_thumbnail = OMX_FALSE;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *p_thumb_dim = &p_jobparams->thumb_dim;
+  mm_jpeg_dim_t *p_main_dim = &p_jobparams->main_dim;
+  QOMX_YUV_FRAME_INFO *p_frame_info = &thumbnail_info.tmbOffset;
+  mm_jpeg_buf_t *p_tmb_buf = &p_params->src_thumb_buf[p_jobparams->thumb_index];
+
+  CDBG_HIGH("%s:%d] encode_thumbnail %d", __func__, __LINE__,
+    p_params->encode_thumbnail);
+  if (OMX_FALSE == p_params->encode_thumbnail) {
+    return ret;
+  }
+
+  if ((p_thumb_dim->dst_dim.width == 0) || (p_thumb_dim->dst_dim.height == 0)) {
+    CDBG_ERROR("%s:%d] Error invalid output dim for thumbnail",
+      __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->src_dim.width == 0) || (p_thumb_dim->src_dim.height == 0)) {
+    CDBG_ERROR("%s:%d] Error invalid input dim for thumbnail",
+      __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+
+  if ((p_thumb_dim->crop.width == 0) || (p_thumb_dim->crop.height == 0)) {
+    p_thumb_dim->crop.width = p_thumb_dim->src_dim.width;
+    p_thumb_dim->crop.height = p_thumb_dim->src_dim.height;
+  }
+
+  /* check crop boundary */
+  if ((p_thumb_dim->crop.width + p_thumb_dim->crop.left > p_thumb_dim->src_dim.width) ||
+    (p_thumb_dim->crop.height + p_thumb_dim->crop.top > p_thumb_dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] invalid crop boundary (%d, %d) offset (%d, %d) out of (%d, %d)",
+      __func__, __LINE__,
+      p_thumb_dim->crop.width,
+      p_thumb_dim->crop.height,
+      p_thumb_dim->crop.left,
+      p_thumb_dim->crop.top,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&thumbnail_info, 0x0, sizeof(QOMX_THUMBNAIL_INFO));
+  ret = OMX_GetExtensionIndex(p_session->omx_handle,
+    QOMX_IMAGE_EXT_THUMBNAIL_NAME,
+    &thumb_indextype);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, ret);
+    return ret;
+  }
+
+  /* fill thumbnail info */
+  thumbnail_info.scaling_enabled = 1;
+  thumbnail_info.input_width = p_thumb_dim->src_dim.width;
+  thumbnail_info.input_height = p_thumb_dim->src_dim.height;
+  thumbnail_info.quality = p_params->thumb_quality;
+
+  if ((p_main_dim->src_dim.width < p_thumb_dim->src_dim.width) ||
+    (p_main_dim->src_dim.height < p_thumb_dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] Improper thumbnail dim %dx%d resetting to %dx%d",
+      __func__, __LINE__,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height,
+      p_main_dim->src_dim.width,
+      p_main_dim->src_dim.height);
+    thumbnail_info.input_width = p_main_dim->src_dim.width;
+    thumbnail_info.input_height = p_main_dim->src_dim.height;
+    if ((thumbnail_info.crop_info.nWidth > thumbnail_info.input_width)
+      || (thumbnail_info.crop_info.nHeight > thumbnail_info.input_height)) {
+      thumbnail_info.crop_info.nLeft = 0;
+      thumbnail_info.crop_info.nTop = 0;
+      thumbnail_info.crop_info.nWidth = thumbnail_info.input_width;
+      thumbnail_info.crop_info.nHeight = thumbnail_info.input_height;
+    }
+  }
+
+  //If the main image and thumbnail aspect ratio are different, reset the
+  // thumbnail crop info to avoid distortion
+  double main_aspect_ratio = (double)p_main_dim->dst_dim.width /
+    (double)p_main_dim->dst_dim.height;
+  double thumb_aspect_ratio = (double)p_thumb_dim->dst_dim.width /
+    (double)p_thumb_dim->dst_dim.height;
+  if ((thumb_aspect_ratio - main_aspect_ratio) > ASPECT_TOLERANCE) {
+    mm_jpeg_get_thumbnail_crop(p_thumb_dim, p_main_dim, 0);
+  } else if((main_aspect_ratio - thumb_aspect_ratio) > ASPECT_TOLERANCE){
+    mm_jpeg_get_thumbnail_crop(p_thumb_dim, p_main_dim, 1);
+  }
+
+
+  //Fill thumbnail crop info
+  thumbnail_info.crop_info.nWidth = p_thumb_dim->crop.width;
+  thumbnail_info.crop_info.nHeight = p_thumb_dim->crop.height;
+  thumbnail_info.crop_info.nLeft = p_thumb_dim->crop.left;
+  thumbnail_info.crop_info.nTop = p_thumb_dim->crop.top;
+
+  //If main image cropping/scaling is enabled, thumb FOV should be within
+  //main image FOV
+  if ((p_main_dim->crop.width != p_main_dim->src_dim.width) ||
+    (p_main_dim->crop.height != p_main_dim->src_dim.height)) {
+     if ((p_thumb_dim->crop.left < p_main_dim->crop.left) ||
+       ((p_thumb_dim->crop.left + p_thumb_dim->crop.width) >
+       (p_main_dim->crop.left + p_main_dim->crop.width)) ||
+       (p_thumb_dim->crop.top < p_main_dim->crop.top) ||
+       ((p_thumb_dim->crop.top + p_thumb_dim->crop.height) >
+       (p_main_dim->crop.top + p_main_dim->crop.height))) {
+       //Reset the FOV for the thumbnail
+        CDBG_HIGH("%s:%d] Resetting the thumbnail FOV wrt main image",
+          __func__, __LINE__);
+       thumbnail_info.crop_info.nLeft = p_main_dim->crop.left;
+       thumbnail_info.crop_info.nTop = p_main_dim->crop.height;
+       if ((p_thumb_dim->crop.width > p_main_dim->crop.width) ||
+         (p_thumb_dim->crop.height > p_main_dim->crop.height)) {
+         thumbnail_info.crop_info.nWidth = p_main_dim->crop.width;
+         thumbnail_info.crop_info.nHeight = p_main_dim->crop.height;
+       }
+    }
+  }
+
+  if ((p_thumb_dim->dst_dim.width > p_thumb_dim->src_dim.width)
+    || (p_thumb_dim->dst_dim.height > p_thumb_dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] Incorrect thumbnail dim %dx%d resetting to %dx%d",
+      __func__, __LINE__,
+      p_thumb_dim->dst_dim.width,
+      p_thumb_dim->dst_dim.height,
+      p_thumb_dim->src_dim.width,
+      p_thumb_dim->src_dim.height);
+    thumbnail_info.output_width = p_thumb_dim->src_dim.width;
+    thumbnail_info.output_height = p_thumb_dim->src_dim.height;
+  } else {
+    thumbnail_info.output_width = p_thumb_dim->dst_dim.width;
+    thumbnail_info.output_height = p_thumb_dim->dst_dim.height;
+  }
+
+  memset(p_frame_info, 0x0, sizeof(*p_frame_info));
+
+  p_frame_info->cbcrStartOffset[0] = p_tmb_buf->offset.mp[0].len;
+  p_frame_info->cbcrStartOffset[1] = p_tmb_buf->offset.mp[1].len;
+  p_frame_info->yOffset = p_tmb_buf->offset.mp[0].offset;
+  p_frame_info->cbcrOffset[0] = p_tmb_buf->offset.mp[1].offset;
+  p_frame_info->cbcrOffset[1] = p_tmb_buf->offset.mp[2].offset;
+
+  ret = OMX_SetConfig(p_session->omx_handle, thumb_indextype,
+    &thumbnail_info);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main_crop:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image crop
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main_crop(mm_jpeg_job_session_t *p_session)
+{
+  OMX_CONFIG_RECTTYPE rect_type_in, rect_type_out;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_dim_t *dim = &p_jobparams->main_dim;
+
+  if ((dim->crop.width == 0) || (dim->crop.height == 0)) {
+    dim->crop.width = dim->src_dim.width;
+    dim->crop.height = dim->src_dim.height;
+  }
+  /* error check first */
+  if ((dim->crop.width + dim->crop.left > dim->src_dim.width) ||
+    (dim->crop.height + dim->crop.top > dim->src_dim.height)) {
+    CDBG_ERROR("%s:%d] invalid crop boundary (%d, %d) out of (%d, %d)",
+      __func__, __LINE__,
+      dim->crop.width + dim->crop.left,
+      dim->crop.height + dim->crop.top,
+      dim->src_dim.width,
+      dim->src_dim.height);
+    return OMX_ErrorBadParameter;
+  }
+
+  memset(&rect_type_in, 0, sizeof(rect_type_in));
+  memset(&rect_type_out, 0, sizeof(rect_type_out));
+  rect_type_in.nPortIndex = 0;
+  rect_type_out.nPortIndex = 0;
+
+  if ((dim->src_dim.width != dim->crop.width) ||
+    (dim->src_dim.height != dim->crop.height) ||
+    (dim->src_dim.width != dim->dst_dim.width) ||
+    (dim->src_dim.height != dim->dst_dim.height)) {
+    /* Scaler information */
+    rect_type_in.nWidth = CEILING2(dim->crop.width);
+    rect_type_in.nHeight = CEILING2(dim->crop.height);
+    rect_type_in.nLeft = dim->crop.left;
+    rect_type_in.nTop = dim->crop.top;
+
+    if (dim->dst_dim.width && dim->dst_dim.height) {
+      rect_type_out.nWidth = dim->dst_dim.width;
+      rect_type_out.nHeight = dim->dst_dim.height;
+    }
+  }
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonInputCrop,
+    &rect_type_in);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+
+  CDBG("%s:%d] OMX_IndexConfigCommonInputCrop w = %d, h = %d, l = %d, t = %d,"
+    " port_idx = %d", __func__, __LINE__,
+    (int)rect_type_in.nWidth, (int)rect_type_in.nHeight,
+    (int)rect_type_in.nLeft, (int)rect_type_in.nTop,
+    (int)rect_type_in.nPortIndex);
+
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonOutputCrop,
+    &rect_type_out);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return ret;
+  }
+  CDBG("%s:%d] OMX_IndexConfigCommonOutputCrop w = %d, h = %d,"
+    " port_idx = %d", __func__, __LINE__,
+    (int)rect_type_out.nWidth, (int)rect_type_out.nHeight,
+    (int)rect_type_out.nPortIndex);
+
+  return ret;
+}
+
+/** mm_jpeg_session_config_main:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure main image
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_main(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+  /* config port */
+  CDBG("%s:%d] config port", __func__, __LINE__);
+  rc = mm_jpeg_session_config_ports(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config port failed", __func__);
+    return rc;
+  }
+
+  /* config buffer offset */
+  CDBG("%s:%d] config main buf offset", __func__, __LINE__);
+  rc = mm_jpeg_session_config_main_buffer_offset(p_session);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s: config buffer offset failed", __func__);
+    return rc;
+  }
+
+  /* set the encoding mode */
+  mm_jpeg_encoding_mode(p_session);
+
+  return rc;
+}
+
+/** mm_jpeg_session_config_common:
+ *
+ *  Arguments:
+ *    @p_session: job session
+ *
+ *  Return:
+ *       OMX error values
+ *
+ *  Description:
+ *       Configure common parameters
+ *
+ **/
+OMX_ERRORTYPE mm_jpeg_session_config_common(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int i;
+  OMX_INDEXTYPE exif_idx;
+  OMX_CONFIG_ROTATIONTYPE rotate;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  QOMX_EXIF_INFO exif_info;
+
+  /* set rotation */
+  memset(&rotate, 0, sizeof(rotate));
+  rotate.nPortIndex = 1;
+  rotate.nRotation = p_jobparams->rotation;
+  rc = OMX_SetConfig(p_session->omx_handle, OMX_IndexConfigCommonRotate,
+    &rotate);
+  if (OMX_ErrorNone != rc) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+      return rc;
+  }
+  CDBG("%s:%d] Set rotation to %d at port_idx = %d", __func__, __LINE__,
+    (int)p_jobparams->rotation, (int)rotate.nPortIndex);
+
+  /* Set Exif data*/
+  memset(&p_session->exif_info_local[0], 0, sizeof(p_session->exif_info_local));
+
+
+    /* set exif tags */
+    rc = OMX_GetExtensionIndex(p_session->omx_handle, QOMX_IMAGE_EXT_EXIF_NAME,
+      &exif_idx);
+  if (OMX_ErrorNone != rc) {
+    CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+    return rc;
+  }
+  CDBG_HIGH("%s:%d] Num of exif entries passed from HAL: %d", __func__, __LINE__,
+      (int)p_jobparams->exif_info.numOfEntries);
+  if (p_jobparams->exif_info.numOfEntries > 0) {
+     rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+      &p_jobparams->exif_info);
+    if (OMX_ErrorNone != rc) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+      return rc;
+    }
+  }
+  /*parse aditional exif data from the metadata if present*/
+  if ((NULL != p_jobparams->p_metadata_v3) ||
+    (NULL != p_jobparams->p_metadata_v1)) {
+    exif_info.numOfEntries = 0;
+    exif_info.exif_data = &p_session->exif_info_local[0];
+
+    if (NULL != p_jobparams->p_metadata_v3) {
+      process_meta_data_v3(p_jobparams->p_metadata_v3,
+          &exif_info, &p_jobparams->cam_exif_params);
+    } else {
+      process_meta_data_v1(p_jobparams->p_metadata_v1,
+        &exif_info, &p_jobparams->cam_exif_params);
+    }
+    /* After Parse metadata */
+    p_session->exif_count_local = exif_info.numOfEntries;
+
+    if (exif_info.numOfEntries > 0) {
+      /* set exif tags */
+      CDBG("%s:%d] exif tags from metadata count %d", __func__, __LINE__,
+        (int)exif_info.numOfEntries);
+      rc = OMX_SetConfig(p_session->omx_handle, exif_idx,
+      &exif_info);
+    if (OMX_ErrorNone != rc) {
+      CDBG_ERROR("%s:%d] Error %d", __func__, __LINE__, rc);
+      return rc;
+      }
+    }
+  } else {
+    CDBG_ERROR("%s:%d] Metadata is null", __func__, __LINE__, rc);
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_session_abort:
+ *
+ *  Arguments:
+ *    @p_session: jpeg session
+ *
+ *  Return:
+ *       OMX_BOOL
+ *
+ *  Description:
+ *       Abort ongoing job
+ *
+ **/
+OMX_BOOL mm_jpeg_session_abort(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+  pthread_mutex_lock(&p_session->lock);
+  if (OMX_TRUE == p_session->abort_flag) {
+    pthread_mutex_unlock(&p_session->lock);
+    CDBG("%s:%d] **** ALREADY ABORTED", __func__, __LINE__);
+    return 0;
+  }
+  p_session->abort_flag = OMX_TRUE;
+  if (OMX_TRUE == p_session->encoding) {
+    p_session->state_change_pending = OMX_TRUE;
+
+    CDBG("%s:%d] **** ABORTING", __func__, __LINE__);
+
+    ret = OMX_SendCommand(p_session->omx_handle, OMX_CommandStateSet,
+    OMX_StateIdle, NULL);
+
+    if (ret != OMX_ErrorNone) {
+      CDBG("%s:%d] OMX_SendCommand returned error %d", __func__, __LINE__, ret);
+      pthread_mutex_unlock(&p_session->lock);
+      return 1;
+    }
+    ret = mm_jpeg_destroy_job(p_session);
+    if (ret != 0) {
+      CDBG("%s:%d] Destroy job returned error %d", __func__, __LINE__, rc);
+    }
+
+    CDBG("%s:%d] before wait", __func__, __LINE__);
+    pthread_cond_wait(&p_session->cond, &p_session->lock);
+    CDBG("%s:%d] after wait", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] X", __func__, __LINE__);
+  return 0;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+inline int mm_jpeg_get_new_session_idx(mm_jpeg_obj *my_obj, int client_idx,
+  mm_jpeg_job_session_t **pp_session)
+{
+  int i = 0;
+  int index = -1;
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+    if (!my_obj->clnt_mgr[client_idx].session[i].active) {
+      *pp_session = &my_obj->clnt_mgr[client_idx].session[i];
+      my_obj->clnt_mgr[client_idx].session[i].active = OMX_TRUE;
+      index = i;
+      pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+      break;
+    }
+    pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  }
+  return index;
+}
+
+/** mm_jpeg_get_job_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+inline void mm_jpeg_remove_session_idx(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+  CDBG("%s:%d] client_idx %d session_idx %d", __func__, __LINE__,
+    client_idx, session_idx);
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  my_obj->clnt_mgr[client_idx].session[session_idx].active = OMX_FALSE;
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+}
+
+/** mm_jpeg_get_session_idx:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_idx: client index
+ *
+ *  Return:
+ *       job index
+ *
+ *  Description:
+ *       Get job index by client id
+ *
+ **/
+inline mm_jpeg_job_session_t *mm_jpeg_get_session(mm_jpeg_obj *my_obj, uint32_t job_id)
+{
+  mm_jpeg_job_session_t *p_session = NULL;
+  int client_idx =  GET_CLIENT_IDX(job_id);
+  int session_idx= GET_SESSION_IDX(job_id);
+
+  CDBG("%s:%d] client_idx %d session_idx %d", __func__, __LINE__,
+    client_idx, session_idx);
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    CDBG_ERROR("%s:%d] invalid job id %x", __func__, __LINE__,
+      job_id);
+    return NULL;
+  }
+  pthread_mutex_lock(&my_obj->clnt_mgr[client_idx].lock);
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  pthread_mutex_unlock(&my_obj->clnt_mgr[client_idx].lock);
+  return p_session;
+}
+
+/** mm_jpeg_configure_params
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the job specific params
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_configure_job_params(
+  mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  OMX_IMAGE_PARAM_QFACTORTYPE q_factor;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+
+    /* common config */
+  ret = mm_jpeg_session_config_common(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config common failed", __func__, __LINE__);
+
+  }
+
+  /* config Main Image crop */
+  CDBG("%s:%d] config main crop", __func__, __LINE__);
+  ret = mm_jpeg_session_config_main_crop(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s: config crop failed", __func__);
+    return ret;
+  }
+
+  /* set quality */
+  memset(&q_factor, 0, sizeof(q_factor));
+  q_factor.nPortIndex = 0;
+  q_factor.nQFactor = p_params->quality;
+  ret = OMX_SetConfig(p_session->omx_handle, OMX_IndexParamQFactor, &q_factor);
+  CDBG("%s:%d] config QFactor: %d", __func__, __LINE__, (int)q_factor.nQFactor);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] Error setting Q factor %d", __func__, __LINE__, ret);
+    return ret;
+  }
+
+  /* config thumbnail */
+  ret = mm_jpeg_session_config_thumbnail(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config thumbnail img failed", __func__, __LINE__);
+    return ret;
+  }
+
+  return ret;
+}
+/** mm_jpeg_session_configure:
+ *
+ *  Arguments:
+ *    @data: encode session
+ *
+ *  Return:
+ *       none
+ *
+ *  Description:
+ *       Configure the session
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_configure(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+
+  CDBG("%s:%d] E ", __func__, __LINE__);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+  /* config main img */
+  ret = mm_jpeg_session_config_main(p_session);
+  if (OMX_ErrorNone != ret) {
+    CDBG_ERROR("%s:%d] config main img failed", __func__, __LINE__);
+    goto error;
+  }
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateIdle,
+    mm_jpeg_session_send_buffers);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to idle failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  ret = mm_jpeg_session_change_state(p_session, OMX_StateExecuting,
+    NULL);
+  if (ret) {
+    CDBG_ERROR("%s:%d] change state to executing failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+error:
+  CDBG("%s:%d] X ret %d", __func__, __LINE__, ret);
+  return ret;
+}
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static inline void mm_jpeg_job_done(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /*Destroy job related params*/
+  mm_jpeg_destroy_job(p_session);
+
+  /*remove the job*/
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q,
+    p_session->jobId);
+  if (node) {
+    free(node);
+  }
+  p_session->encoding = OMX_FALSE;
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+}
+
+/** mm_jpeg_session_encode:
+ *
+ *  Arguments:
+ *    @p_session: encode session
+ *
+ *  Return:
+ *       OMX_ERRORTYPE
+ *
+ *  Description:
+ *       Start the encoding
+ *
+ **/
+static OMX_ERRORTYPE mm_jpeg_session_encode(mm_jpeg_job_session_t *p_session)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_encode_params_t *p_params = &p_session->params;
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  int dest_idx = 0;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj *)p_session->jpeg_obj;
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->abort_flag = OMX_FALSE;
+  p_session->encoding = OMX_FALSE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  if (OMX_FALSE == p_session->config) {
+    ret = mm_jpeg_session_configure(p_session);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+    }
+    p_session->config = OMX_TRUE;
+  }
+
+  ret = mm_jpeg_configure_job_params(p_session);
+  if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+  }
+  pthread_mutex_lock(&p_session->lock);
+  p_session->encoding = OMX_TRUE;
+  pthread_mutex_unlock(&p_session->lock);
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+#ifdef MM_JPEG_DUMP_INPUT
+  DUMP_TO_FILE("/data/mm_jpeg_int.yuv",
+    p_session->p_in_omx_buf[p_jobparams->src_index]->pBuffer,
+    (int)p_session->p_in_omx_buf[p_jobparams->src_index]->nAllocLen);
+#endif
+
+  ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+    p_session->p_in_omx_buf[p_jobparams->src_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  if (p_session->params.encode_thumbnail) {
+    ret = OMX_EmptyThisBuffer(p_session->omx_handle,
+        p_session->p_in_omx_thumb_buf[p_jobparams->thumb_index]);
+    if (ret) {
+      CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+      goto error;
+    }
+  }
+
+  ret = OMX_FillThisBuffer(p_session->omx_handle,
+    p_session->p_out_omx_buf[p_jobparams->dst_index]);
+  if (ret) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    goto error;
+  }
+
+  MM_JPEG_CHK_ABORT(p_session, ret, error);
+
+error:
+
+  CDBG("%s:%d] X ", __func__, __LINE__);
+  return ret;
+}
+
+/** mm_jpeg_process_encoding_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg client
+ *    @job_node: job node
+ *
+ *  Return:
+ *       0 for success -1 otherwise
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_process_encoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_job_q_node_t *node = NULL;
+
+  /* check if valid session */
+  p_session = mm_jpeg_get_session(my_obj, job_node->enc_info.job_id);
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid job id %x", __func__, __LINE__,
+      job_node->enc_info.job_id);
+    return -1;
+  }
+
+  /* sent encode cmd to OMX, queue job into ongoing queue */
+  rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, job_node);
+  if (rc) {
+    CDBG_ERROR("%s:%d] jpeg enqueue failed %d",
+      __func__, __LINE__, ret);
+    goto error;
+  }
+
+  p_session->encode_job = job_node->enc_info.encode_job;
+  p_session->jobId = job_node->enc_info.job_id;
+  ret = mm_jpeg_session_encode(p_session);
+  if (ret) {
+    CDBG_ERROR("%s:%d] encode session failed", __func__, __LINE__);
+    goto error;
+  }
+
+  CDBG("%s:%d] Success X ", __func__, __LINE__);
+  return rc;
+
+error:
+
+  if ((OMX_ErrorNone != ret) &&
+    (NULL != p_session->params.jpeg_cb)) {
+    p_session->job_status = JPEG_JOB_STATUS_ERROR;
+    CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+      p_session->job_status);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      NULL,
+      p_session->params.userdata);
+  }
+
+  /*remove the job*/
+  mm_jpeg_job_done(p_session);
+  CDBG("%s:%d] Error X ", __func__, __LINE__);
+
+  return rc;
+}
+
+/** mm_jpeg_jobmgr_thread:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       job manager thread main function
+ *
+ **/
+static void *mm_jpeg_jobmgr_thread(void *data)
+{
+  int rc = 0;
+  int running = 1;
+  uint32_t num_ongoing_jobs = 0;
+  mm_jpeg_obj *my_obj = (mm_jpeg_obj*)data;
+  mm_jpeg_job_cmd_thread_t *cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node = NULL;
+  prctl(PR_SET_NAME, (unsigned long)"mm_jpeg_thread", 0, 0, 0);
+
+  do {
+    do {
+      rc = cam_sem_wait(&cmd_thread->job_sem);
+      if (rc != 0 && errno != EINVAL) {
+        CDBG_ERROR("%s: cam_sem_wait error (%s)",
+          __func__, strerror(errno));
+        return NULL;
+      }
+    } while (rc != 0);
+
+    /* check ongoing q size */
+    num_ongoing_jobs = mm_jpeg_queue_get_size(&my_obj->ongoing_job_q);
+    if (num_ongoing_jobs >= NUM_MAX_JPEG_CNCURRENT_JOBS) {
+      CDBG("%s:%d] ongoing job already reach max %d", __func__,
+        __LINE__, num_ongoing_jobs);
+      continue;
+    }
+
+    pthread_mutex_lock(&my_obj->job_lock);
+    /* can go ahead with new work */
+    node = (mm_jpeg_job_q_node_t*)mm_jpeg_queue_deq(&cmd_thread->job_queue);
+    if (node != NULL) {
+      switch (node->type) {
+      case MM_JPEG_CMD_TYPE_JOB:
+        rc = mm_jpeg_process_encoding_job(my_obj, node);
+        break;
+      case MM_JPEG_CMD_TYPE_EXIT:
+      default:
+        /* free node */
+        free(node);
+        /* set running flag to false */
+        running = 0;
+        break;
+      }
+    }
+    pthread_mutex_unlock(&my_obj->job_lock);
+
+  } while (running);
+  return NULL;
+}
+
+/** mm_jpeg_jobmgr_thread_launch:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       launches the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t *job_mgr = &my_obj->job_mgr;
+
+  cam_sem_init(&job_mgr->job_sem, 0);
+  mm_jpeg_queue_init(&job_mgr->job_queue);
+
+  /* launch the thread */
+  pthread_create(&job_mgr->pid,
+    NULL,
+    mm_jpeg_jobmgr_thread,
+    (void *)my_obj);
+  return rc;
+}
+
+/** mm_jpeg_jobmgr_thread_release:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Releases the job manager thread
+ *
+ **/
+int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj)
+{
+  int32_t rc = 0;
+  mm_jpeg_job_cmd_thread_t * cmd_thread = &my_obj->job_mgr;
+  mm_jpeg_job_q_node_t* node =
+    (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+    return -1;
+  }
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->type = MM_JPEG_CMD_TYPE_EXIT;
+
+  mm_jpeg_queue_enq(&cmd_thread->job_queue, node);
+  cam_sem_post(&cmd_thread->job_sem);
+
+  /* wait until cmd thread exits */
+  if (pthread_join(cmd_thread->pid, NULL) != 0) {
+    CDBG("%s: pthread dead already", __func__);
+  }
+  mm_jpeg_queue_deinit(&cmd_thread->job_queue);
+
+  cam_sem_destroy(&cmd_thread->job_sem);
+  memset(cmd_thread, 0, sizeof(mm_jpeg_job_cmd_thread_t));
+  return rc;
+}
+
+/** mm_jpeg_init:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Initializes the jpeg client
+ *
+ **/
+int32_t mm_jpeg_init(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* init locks */
+  pthread_mutex_init(&my_obj->job_lock, NULL);
+
+  /* init ongoing job queue */
+  rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  /* init job semaphore and launch jobmgr thread */
+  CDBG("%s:%d] Launch jobmgr thread rc %d", __func__, __LINE__, rc);
+  rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+    return -1;
+  }
+
+  /* load OMX */
+  if (OMX_ErrorNone != OMX_Init()) {
+    /* roll back in error case */
+    CDBG_ERROR("%s:%d] OMX_Init failed (%d)", __func__, __LINE__, rc);
+    mm_jpeg_jobmgr_thread_release(my_obj);
+    mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+    pthread_mutex_destroy(&my_obj->job_lock);
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_deinit:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Deinits the jpeg client
+ *
+ **/
+int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj)
+{
+  int32_t rc = 0;
+
+  /* release jobmgr thread */
+  rc = mm_jpeg_jobmgr_thread_release(my_obj);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  /* unload OMX engine */
+  OMX_Deinit();
+
+  /* deinit ongoing job and cb queue */
+  rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+  if (0 != rc) {
+    CDBG_ERROR("%s:%d] Error", __func__, __LINE__);
+  }
+
+  /* destroy locks */
+  pthread_mutex_destroy(&my_obj->job_lock);
+
+  return rc;
+}
+
+/** mm_jpeg_new_client:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Create new jpeg client
+ *
+ **/
+uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj)
+{
+  uint32_t client_hdl = 0;
+  uint8_t idx;
+  int i = 0;
+
+  if (my_obj->num_clients >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: num of clients reached limit", __func__);
+    return client_hdl;
+  }
+
+  for (idx = 0; idx < MAX_JPEG_CLIENT_NUM; idx++) {
+    if (0 == my_obj->clnt_mgr[idx].is_used) {
+      break;
+    }
+  }
+
+  if (idx < MAX_JPEG_CLIENT_NUM) {
+    /* client session avail */
+    /* generate client handler by index */
+    client_hdl = mm_jpeg_util_generate_handler(idx);
+
+    /* update client session */
+    my_obj->clnt_mgr[idx].is_used = 1;
+    my_obj->clnt_mgr[idx].client_handle = client_hdl;
+
+    pthread_mutex_init(&my_obj->clnt_mgr[idx].lock, NULL);
+    for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+      memset(&my_obj->clnt_mgr[idx].session[i], 0x0, sizeof(mm_jpeg_job_session_t));
+    }
+
+    /* increse client count */
+    my_obj->num_clients++;
+  }
+
+  return client_hdl;
+}
+
+/** mm_jpeg_start_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @job: pointer to encode job
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding job
+ *
+ **/
+int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_t *job,
+  uint32_t *job_id)
+{
+  int32_t rc = -1;
+  uint8_t session_idx = 0;
+  uint8_t client_idx = 0;
+  mm_jpeg_job_q_node_t* node = NULL;
+  mm_jpeg_job_session_t *p_session = NULL;
+  mm_jpeg_encode_job_t *p_jobparams  = &job->encode_job;
+
+  *job_id = 0;
+
+  /* check if valid session */
+  session_idx = GET_SESSION_IDX(p_jobparams->session_id);
+  client_idx = GET_CLIENT_IDX(p_jobparams->session_id);
+  CDBG("%s:%d] session_idx %d client idx %d", __func__, __LINE__,
+    session_idx, client_idx);
+
+  if ((session_idx >= MM_JPEG_MAX_SESSION) ||
+    (client_idx >= MAX_JPEG_CLIENT_NUM)) {
+    CDBG_ERROR("%s:%d] invalid session id %x", __func__, __LINE__,
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  p_session = &my_obj->clnt_mgr[client_idx].session[session_idx];
+  if (OMX_FALSE == p_session->active) {
+    CDBG_ERROR("%s:%d] session not active %x", __func__, __LINE__,
+      job->encode_job.session_id);
+    return rc;
+  }
+
+  if ((p_jobparams->src_index >= p_session->params.num_src_bufs) ||
+    (p_jobparams->dst_index >= p_session->params.num_dst_bufs)) {
+    CDBG_ERROR("%s:%d] invalid buffer indices", __func__, __LINE__);
+    return rc;
+  }
+
+  /* enqueue new job into todo job queue */
+  node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+  if (NULL == node) {
+    CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+    return -1;
+  }
+
+  *job_id = job->encode_job.session_id |
+    ((p_session->job_hist++ % JOB_HIST_MAX) << 16);
+
+  memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+  node->enc_info.encode_job = job->encode_job;
+  node->enc_info.job_id = *job_id;
+  node->enc_info.client_handle = p_session->client_hdl;
+  node->type = MM_JPEG_CMD_TYPE_JOB;
+
+  rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, node);
+  if (0 == rc) {
+    cam_sem_post(&my_obj->job_mgr.job_sem);
+  }
+
+  return rc;
+}
+
+/** mm_jpeg_abort_job:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Abort the encoding session
+ *
+ **/
+int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+  uint32_t jobId)
+{
+  int32_t rc = -1;
+  uint8_t clnt_idx = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  OMX_BOOL ret = OMX_FALSE;
+  mm_jpeg_job_session_t *p_session = NULL;
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+  if (NULL != node) {
+    free(node);
+    goto abort_done;
+  }
+
+  /* abort job if in ongoing queue */
+  node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+  if (NULL != node) {
+    /* find job that is OMX ongoing, ask OMX to abort the job */
+    p_session = mm_jpeg_get_session(my_obj, node->enc_info.job_id);
+    if (p_session) {
+      mm_jpeg_session_abort(p_session);
+    } else {
+      CDBG_ERROR("%s:%d] Invalid job id 0x%x", __func__, __LINE__,
+        node->enc_info.job_id);
+    }
+    free(node);
+    goto abort_done;
+  }
+
+abort_done:
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  return rc;
+}
+
+/** mm_jpeg_create_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *    @p_params: pointer to encode params
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Start the encoding session
+ *
+ **/
+int32_t mm_jpeg_create_session(mm_jpeg_obj *my_obj,
+  uint32_t client_hdl,
+  mm_jpeg_encode_params_t *p_params,
+  uint32_t* p_session_id)
+{
+  int32_t rc = 0;
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  uint8_t clnt_idx = 0;
+  int session_idx = -1;
+  mm_jpeg_job_session_t *p_session = NULL;
+  *p_session_id = 0;
+
+  /* validate the parameters */
+  if ((p_params->num_src_bufs > MM_JPEG_MAX_BUF)
+    || (p_params->num_dst_bufs > MM_JPEG_MAX_BUF)) {
+    CDBG_ERROR("%s:%d] invalid num buffers", __func__, __LINE__);
+    return -1;
+  }
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+    return -1;
+  }
+
+  session_idx = mm_jpeg_get_new_session_idx(my_obj, clnt_idx, &p_session);
+  if (session_idx < 0) {
+    CDBG_ERROR("%s:%d] invalid session id (%d)", __func__, __LINE__, session_idx);
+    return -1;
+  }
+
+  ret = mm_jpeg_session_create(p_session);
+  if (OMX_ErrorNone != ret) {
+    p_session->active = OMX_FALSE;
+    CDBG_ERROR("%s:%d] jpeg session create failed", __func__, __LINE__);
+    return ret;
+  }
+
+  *p_session_id = (JOB_ID_MAGICVAL << 24) | (session_idx << 8) | clnt_idx;
+
+  /*copy the params*/
+  p_session->params = *p_params;
+  p_session->client_hdl = client_hdl;
+  p_session->sessionId = *p_session_id;
+  p_session->jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+  CDBG("%s:%d] session id %x", __func__, __LINE__, *p_session_id);
+
+  return ret;
+}
+/** mm_jpeg_destroy_job
+ *
+ *  Arguments:
+ *    @p_session: Session obj
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the job based paramenters
+ *
+ **/
+int32_t mm_jpeg_destroy_job(mm_jpeg_job_session_t *p_session)
+{
+  mm_jpeg_encode_job_t *p_jobparams = &p_session->encode_job;
+  int i = 0, rc = 0;
+
+  CDBG_HIGH("%s:%d] Exif entry count %d %d", __func__, __LINE__,
+    (int)p_jobparams->exif_info.numOfEntries,
+    (int)p_session->exif_count_local);
+  for (i = 0; i < p_session->exif_count_local; i++) {
+    rc = releaseExifEntry(&p_session->exif_info_local[i]);
+    if (rc) {
+      CDBG_ERROR("%s:%d] Exif release failed (%d)", __func__, __LINE__, rc);
+    }
+  }
+  p_session->exif_count_local = 0;
+
+  return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = 0;
+  uint8_t clnt_idx = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  OMX_BOOL ret = OMX_FALSE;
+  uint32_t session_id = p_session->sessionId;
+
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid session", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  /* abort job if in todo queue */
+  CDBG("%s:%d] abort todo jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  CDBG("%s:%d] abort ongoing jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpeg_session_destroy(p_session);
+  mm_jpeg_remove_session_idx(my_obj, session_id);
+  pthread_mutex_unlock(&my_obj->job_lock);
+
+  /* wake up jobMgr thread to work on new job if there is any */
+  cam_sem_post(&my_obj->job_mgr.job_sem);
+  CDBG("%s:%d] X", __func__, __LINE__);
+
+  return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_unlocked(mm_jpeg_obj *my_obj,
+  mm_jpeg_job_session_t *p_session)
+{
+  int32_t rc = -1;
+  uint8_t clnt_idx = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  OMX_BOOL ret = OMX_FALSE;
+  uint32_t session_id = p_session->sessionId;
+
+  if (NULL == p_session) {
+    CDBG_ERROR("%s:%d] invalid session", __func__, __LINE__);
+    return rc;
+  }
+
+  /* abort job if in todo queue */
+  CDBG("%s:%d] abort todo jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->job_mgr.job_queue, session_id);
+  }
+
+  /* abort job if in ongoing queue */
+  CDBG("%s:%d] abort ongoing jobs", __func__, __LINE__);
+  node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  while (NULL != node) {
+    free(node);
+    node = mm_jpeg_queue_remove_job_by_session_id(&my_obj->ongoing_job_q, session_id);
+  }
+
+  /* abort the current session */
+  mm_jpeg_session_abort(p_session);
+  mm_jpeg_remove_session_idx(my_obj, session_id);
+
+  return rc;
+}
+
+/** mm_jpeg_destroy_session:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @session_id: session index
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Destroy the encoding session
+ *
+ **/
+int32_t mm_jpeg_destroy_session_by_id(mm_jpeg_obj *my_obj, uint32_t session_id)
+{
+  mm_jpeg_job_session_t *p_session = mm_jpeg_get_session(my_obj, session_id);
+
+  return mm_jpeg_destroy_session(my_obj, p_session);
+}
+
+/** mm_jpeg_close:
+ *
+ *  Arguments:
+ *    @my_obj: jpeg object
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 for success else failure
+ *
+ *  Description:
+ *       Close the jpeg client
+ *
+ **/
+int32_t mm_jpeg_close(mm_jpeg_obj *my_obj, uint32_t client_hdl)
+{
+  int32_t rc = -1;
+  uint8_t clnt_idx = 0;
+  mm_jpeg_job_q_node_t *node = NULL;
+  OMX_BOOL ret = OMX_FALSE;
+  int i = 0;
+
+  /* check if valid client */
+  clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+  if (clnt_idx >= MAX_JPEG_CLIENT_NUM) {
+    CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+    return rc;
+  }
+
+  CDBG("%s:%d] E", __func__, __LINE__);
+
+  /* abort all jobs from the client */
+  pthread_mutex_lock(&my_obj->job_lock);
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  for (i = 0; i < MM_JPEG_MAX_SESSION; i++) {
+    if (OMX_TRUE == my_obj->clnt_mgr[clnt_idx].session[i].active)
+      mm_jpeg_destroy_session_unlocked(my_obj,
+        &my_obj->clnt_mgr[clnt_idx].session[i]);
+  }
+
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  pthread_mutex_unlock(&my_obj->job_lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  /* invalidate client session */
+  pthread_mutex_destroy(&my_obj->clnt_mgr[clnt_idx].lock);
+  memset(&my_obj->clnt_mgr[clnt_idx], 0, sizeof(mm_jpeg_client_t));
+
+  rc = 0;
+  CDBG("%s:%d] X", __func__, __LINE__);
+  return rc;
+}
+
+OMX_ERRORTYPE mm_jpeg_ebd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->ebd_count);
+  pthread_mutex_lock(&p_session->lock);
+  p_session->ebd_count++;
+  pthread_mutex_unlock(&p_session->lock);
+  return 0;
+}
+
+OMX_ERRORTYPE mm_jpeg_fbd(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_BUFFERHEADERTYPE *pBuffer)
+{
+  OMX_ERRORTYPE ret = OMX_ErrorNone;
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+  uint32_t i = 0;
+  int rc = 0;
+  mm_jpeg_output_t output_buf;
+
+  CDBG("%s:%d] count %d ", __func__, __LINE__, p_session->fbd_count);
+
+  if (OMX_TRUE == p_session->abort_flag) {
+    pthread_cond_signal(&p_session->cond);
+    return ret;
+  }
+
+  pthread_mutex_lock(&p_session->lock);
+  p_session->fbd_count++;
+  if (NULL != p_session->params.jpeg_cb) {
+    p_session->job_status = JPEG_JOB_STATUS_DONE;
+    output_buf.buf_filled_len = (uint32_t)pBuffer->nFilledLen;
+    output_buf.buf_vaddr = pBuffer->pBuffer;
+    output_buf.fd = 0;
+    CDBG("%s:%d] send jpeg callback %d", __func__, __LINE__,
+      p_session->job_status);
+    p_session->params.jpeg_cb(p_session->job_status,
+      p_session->client_hdl,
+      p_session->jobId,
+      &output_buf,
+      p_session->params.userdata);
+
+    /* remove from ready queue */
+    mm_jpeg_job_done(p_session);
+  }
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d] ", __func__, __LINE__);
+
+  return ret;
+}
+
+OMX_ERRORTYPE mm_jpeg_event_handler(OMX_HANDLETYPE hComponent,
+  OMX_PTR pAppData,
+  OMX_EVENTTYPE eEvent,
+  OMX_U32 nData1,
+  OMX_U32 nData2,
+  OMX_PTR pEventData)
+{
+  mm_jpeg_job_session_t *p_session = (mm_jpeg_job_session_t *) pAppData;
+
+  CDBG("%s:%d] %d %d %d", __func__, __LINE__, eEvent, (int)nData1,
+    (int)nData2);
+
+  pthread_mutex_lock(&p_session->lock);
+
+  if (OMX_TRUE == p_session->abort_flag) {
+    pthread_cond_signal(&p_session->cond);
+    pthread_mutex_unlock(&p_session->lock);
+    return OMX_ErrorNone;
+  }
+
+  if (eEvent == OMX_EventError) {
+    p_session->error_flag = OMX_ErrorHardware;
+    if (p_session->encoding == OMX_TRUE) {
+      CDBG("%s:%d] Error during encoding", __func__, __LINE__);
+
+      /* send jpeg callback */
+      if (NULL != p_session->params.jpeg_cb) {
+        p_session->job_status = JPEG_JOB_STATUS_ERROR;
+        CDBG("%s:%d] send jpeg error callback %d", __func__, __LINE__,
+          p_session->job_status);
+        p_session->params.jpeg_cb(p_session->job_status,
+          p_session->client_hdl,
+          p_session->jobId,
+          NULL,
+          p_session->params.userdata);
+      }
+
+      /* remove from ready queue */
+      mm_jpeg_job_done(p_session);
+    }
+    pthread_cond_signal(&p_session->cond);
+  } else if (eEvent == OMX_EventCmdComplete) {
+    if (p_session->state_change_pending == OMX_TRUE) {
+      p_session->state_change_pending = OMX_FALSE;
+      pthread_cond_signal(&p_session->cond);
+    }
+  }
+
+  pthread_mutex_unlock(&p_session->lock);
+  CDBG("%s:%d]", __func__, __LINE__);
+  return OMX_ErrorNone;
+}
+
+/* remove the first job from the queue with matching client handle */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(
+  mm_jpeg_queue_t* queue, uint32_t client_hdl)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data;
+
+    if (data && (data->enc_info.client_handle == client_hdl)) {
+      CDBG_HIGH("%s:%d] found matching client handle", __func__, __LINE__);
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      CDBG_HIGH("%s: queue size = %d", __func__, queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove the first job from the queue with matching session id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_session_id(
+  mm_jpeg_queue_t* queue, uint32_t session_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data;
+
+    if (data && (data->enc_info.encode_job.session_id == session_id)) {
+      CDBG_HIGH("%s:%d] found matching session id", __func__, __LINE__);
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      CDBG_HIGH("%s: queue size = %d", __func__, queue->size);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  pthread_mutex_lock(&queue->lock);
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data;
+
+    if (data && (data->enc_info.job_id == job_id)) {
+      CDBG_HIGH("%s:%d] found matching job id", __func__, __LINE__);
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  pthread_mutex_unlock(&queue->lock);
+
+  return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_unlk(
+  mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+  mm_jpeg_q_node_t* node = NULL;
+  mm_jpeg_job_q_node_t* data = NULL;
+  mm_jpeg_job_q_node_t* job_node = NULL;
+  struct cam_list *head = NULL;
+  struct cam_list *pos = NULL;
+
+  head = &queue->head.list;
+  pos = head->next;
+  while(pos != head) {
+    node = member_of(pos, mm_jpeg_q_node_t, list);
+    data = (mm_jpeg_job_q_node_t *)node->data;
+
+    if (data && (data->enc_info.job_id == job_id)) {
+      job_node = data;
+      cam_list_del_node(&node->list);
+      queue->size--;
+      free(node);
+      break;
+    }
+    pos = pos->next;
+  }
+
+  return job_node;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
new file mode 100644
index 0000000..85224fe
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -0,0 +1,560 @@
+/* Copyright (c) 2012-2014, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+#include <errno.h>
+#include <math.h>
+
+
+#define LOWER(a)               ((a) & 0xFFFF)
+#define UPPER(a)               (((a)>>16) & 0xFFFF)
+#define CHANGE_ENDIAN_16(a)  ((0x00FF & ((a)>>8)) | (0xFF00 & ((a)<<8)))
+#define ROUND(a)((a >= 0) ? (long)(a + 0.5) : (long)(a - 0.5))
+
+
+/** addExifEntry:
+ *
+ *  Arguments:
+ *   @exif_info : Exif info struct
+ *   @p_session: job session
+ *   @tagid   : exif tag ID
+ *   @type    : data type
+ *   @count   : number of data in uint of its type
+ *   @data    : input data ptr
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to add an entry to exif data
+ *
+ **/
+int32_t addExifEntry(QOMX_EXIF_INFO *p_exif_info, exif_tag_id_t tagid,
+  exif_tag_type_t type, uint32_t count, void *data)
+{
+    int32_t rc = 0;
+    int32_t numOfEntries = p_exif_info->numOfEntries;
+    QEXIF_INFO_DATA *p_info_data = p_exif_info->exif_data;
+    if(numOfEntries >= MAX_EXIF_TABLE_ENTRIES) {
+        ALOGE("%s: Number of entries exceeded limit", __func__);
+        return -1;
+    }
+
+    p_info_data[numOfEntries].tag_id = tagid;
+    p_info_data[numOfEntries].tag_entry.type = type;
+    p_info_data[numOfEntries].tag_entry.count = count;
+    p_info_data[numOfEntries].tag_entry.copy = 1;
+    switch (type) {
+    case EXIF_BYTE: {
+      if (count > 1) {
+        uint8_t *values = (uint8_t *)malloc(count);
+        if (values == NULL) {
+          ALOGE("%s: No memory for byte array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count);
+          p_info_data[numOfEntries].tag_entry.data._bytes = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._byte = *(uint8_t *)data;
+      }
+    }
+    break;
+    case EXIF_ASCII: {
+      char *str = NULL;
+      str = (char *)malloc(count + 1);
+      if (str == NULL) {
+        ALOGE("%s: No memory for ascii string", __func__);
+        rc = -1;
+      } else {
+        memset(str, 0, count + 1);
+        memcpy(str, data, count);
+        p_info_data[numOfEntries].tag_entry.data._ascii = str;
+      }
+    }
+    break;
+    case EXIF_SHORT: {
+      if (count > 1) {
+        uint16_t *values = (uint16_t *)malloc(count * sizeof(uint16_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for short array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint16_t));
+          p_info_data[numOfEntries].tag_entry.data._shorts = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._short = *(uint16_t *)data;
+      }
+    }
+    break;
+    case EXIF_LONG: {
+      if (count > 1) {
+        uint32_t *values = (uint32_t *)malloc(count * sizeof(uint32_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for long array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(uint32_t));
+          p_info_data[numOfEntries].tag_entry.data._longs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._long = *(uint32_t *)data;
+      }
+    }
+    break;
+    case EXIF_RATIONAL: {
+      if (count > 1) {
+        rat_t *values = (rat_t *)malloc(count * sizeof(rat_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for rational array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(rat_t));
+          p_info_data[numOfEntries].tag_entry.data._rats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._rat = *(rat_t *)data;
+      }
+    }
+    break;
+    case EXIF_UNDEFINED: {
+      uint8_t *values = (uint8_t *)malloc(count);
+      if (values == NULL) {
+        ALOGE("%s: No memory for undefined array", __func__);
+        rc = -1;
+      } else {
+        memcpy(values, data, count);
+        p_info_data[numOfEntries].tag_entry.data._undefined = values;
+      }
+    }
+    break;
+    case EXIF_SLONG: {
+      if (count > 1) {
+        int32_t *values = (int32_t *)malloc(count * sizeof(int32_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for signed long array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(int32_t));
+          p_info_data[numOfEntries].tag_entry.data._slongs = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._slong = *(int32_t *)data;
+      }
+    }
+    break;
+    case EXIF_SRATIONAL: {
+      if (count > 1) {
+        srat_t *values = (srat_t *)malloc(count * sizeof(srat_t));
+        if (values == NULL) {
+          ALOGE("%s: No memory for signed rational array", __func__);
+          rc = -1;
+        } else {
+          memcpy(values, data, count * sizeof(srat_t));
+          p_info_data[numOfEntries].tag_entry.data._srats = values;
+        }
+      } else {
+        p_info_data[numOfEntries].tag_entry.data._srat = *(srat_t *)data;
+      }
+    }
+    break;
+    }
+
+    // Increase number of entries
+    p_exif_info->numOfEntries++;
+    return rc;
+}
+
+/** releaseExifEntry
+ *
+ *  Arguments:
+ *   @p_exif_data : Exif info struct
+ *
+ *  Retrun     : int32_t type of status
+ *               0  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Function to release an entry from exif data
+ *
+ **/
+int32_t releaseExifEntry(QEXIF_INFO_DATA *p_exif_data)
+{
+ switch (p_exif_data->tag_entry.type) {
+  case EXIF_BYTE: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._bytes != NULL) {
+      free(p_exif_data->tag_entry.data._bytes);
+      p_exif_data->tag_entry.data._bytes = NULL;
+    }
+  }
+  break;
+  case EXIF_ASCII: {
+    if (p_exif_data->tag_entry.data._ascii != NULL) {
+      free(p_exif_data->tag_entry.data._ascii);
+      p_exif_data->tag_entry.data._ascii = NULL;
+    }
+  }
+  break;
+  case EXIF_SHORT: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._shorts != NULL) {
+      free(p_exif_data->tag_entry.data._shorts);
+      p_exif_data->tag_entry.data._shorts = NULL;
+    }
+  }
+  break;
+  case EXIF_LONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._longs != NULL) {
+      free(p_exif_data->tag_entry.data._longs);
+      p_exif_data->tag_entry.data._longs = NULL;
+    }
+  }
+  break;
+  case EXIF_RATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._rats != NULL) {
+      free(p_exif_data->tag_entry.data._rats);
+      p_exif_data->tag_entry.data._rats = NULL;
+    }
+  }
+  break;
+  case EXIF_UNDEFINED: {
+    if (p_exif_data->tag_entry.data._undefined != NULL) {
+      free(p_exif_data->tag_entry.data._undefined);
+      p_exif_data->tag_entry.data._undefined = NULL;
+    }
+  }
+  break;
+  case EXIF_SLONG: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._slongs != NULL) {
+      free(p_exif_data->tag_entry.data._slongs);
+      p_exif_data->tag_entry.data._slongs = NULL;
+    }
+  }
+  break;
+  case EXIF_SRATIONAL: {
+    if (p_exif_data->tag_entry.count > 1 &&
+      p_exif_data->tag_entry.data._srats != NULL) {
+      free(p_exif_data->tag_entry.data._srats);
+      p_exif_data->tag_entry.data._srats = NULL;
+    }
+  }
+  break;
+  } /*end of switch*/
+  return 0;
+}
+/** process_sensor_data:
+ *
+ *  Arguments:
+ *   @p_sensor_params : ptr to sensor data
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process sensor data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_sensor_data(cam_sensor_params_t *p_sensor_params,
+  QOMX_EXIF_INFO *exif_info)
+{
+  int rc = 0;
+  rat_t val_rat;
+
+  if (NULL == p_sensor_params) {
+    ALOGE("%s %d: Sensor params are null", __func__, __LINE__);
+    return 0;
+  }
+
+  ALOGD("%s:%d] From metadata aperture = %f ", __func__, __LINE__,
+    p_sensor_params->aperture_value );
+
+  if (p_sensor_params->aperture_value >= 1.0) {
+    double apex_value;
+    apex_value = (double)2.0 * log(p_sensor_params->aperture_value) / log(2.0);
+    val_rat.num = (uint32_t)(apex_value * 100);
+    val_rat.denom = 100;
+    rc = addExifEntry(exif_info, EXIFTAGID_APERTURE, EXIF_RATIONAL, 1, &val_rat);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+    }
+
+    val_rat.num = (uint32_t)(p_sensor_params->aperture_value * 100);
+    val_rat.denom = 100;
+    rc = addExifEntry(exif_info, EXIFTAGID_F_NUMBER, EXIF_RATIONAL, 1, &val_rat);
+    if (rc) {
+      ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+    }
+  }
+
+  /*Flash*/
+  short val_short;
+  if (p_sensor_params->flash_state == CAM_FLASH_STATE_FIRED) {
+    val_short = 1;
+  } else {
+    val_short = 0;
+  }
+  //val_short =  (p_sensor_params->flash_mode << 3) | val_short;
+  ALOGI("%s: Flash value %d flash mode %d flash state %d", __func__, val_short,
+    p_sensor_params->flash_mode, p_sensor_params->flash_state);
+  rc = addExifEntry(exif_info, EXIFTAGID_FLASH, EXIF_SHORT, 1, &val_short);
+  if (rc) {
+    ALOGE("%s %d]: Error adding flash exif entry", __func__, __LINE__);
+  }
+  return rc;
+}
+/** process_3a_data:
+ *
+ *  Arguments:
+ *   @p_3a_params : ptr to 3a data
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process 3a data
+ *
+ *  Notes: this needs to be filled for the metadata
+ **/
+int process_3a_data(cam_3a_params_t *p_3a_params, QOMX_EXIF_INFO *exif_info)
+{
+  int rc = 0;
+  srat_t val_srat;
+  rat_t val_rat;
+  double shutter_speed_value;
+
+  if (NULL == p_3a_params) {
+    ALOGE("%s %d: 3A params are null", __func__, __LINE__);
+    return 0;
+  }
+
+  ALOGD("%s:%d] exp_time %f, iso_value %d, wb_mode %d", __func__, __LINE__,
+    p_3a_params->exp_time, p_3a_params->iso_value, p_3a_params->wb_mode);
+
+  /*Exposure time*/
+  if (p_3a_params->exp_time == 0) {
+      val_rat.num = 0;
+      val_rat.denom = 0;
+  } else {
+      val_rat.num = 1;
+      val_rat.denom = ROUND(1.0/p_3a_params->exp_time);
+  }
+  ALOGD("%s: numer %d denom %d", __func__, val_rat.num, val_rat.denom );
+
+  rc = addExifEntry(exif_info, EXIFTAGID_EXPOSURE_TIME, EXIF_RATIONAL,
+    (sizeof(val_rat)/(8)), &val_rat);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry Exposure time",
+      __func__, __LINE__);
+  }
+
+  /* Shutter Speed*/
+  if (p_3a_params->exp_time > 0) {
+    shutter_speed_value = log10(1/p_3a_params->exp_time)/log10(2);
+    val_srat.num = shutter_speed_value * 1000;
+    val_srat.denom = 1000;
+  } else {
+    val_srat.num = 0;
+    val_srat.denom = 0;
+  }
+  rc = addExifEntry(exif_info, EXIFTAGID_SHUTTER_SPEED, EXIF_SRATIONAL,
+    (sizeof(val_srat)/(8)), &val_srat);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+  }
+
+  /*ISO*/
+  short val_short;
+  val_short = p_3a_params->iso_value;
+  rc = addExifEntry(exif_info, EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT,
+    sizeof(val_short)/2, &val_short);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+  }
+
+  /*WB mode*/
+  if (p_3a_params->wb_mode == CAM_WB_MODE_AUTO)
+    val_short = 0;
+  else
+    val_short = 1;
+  rc = addExifEntry(exif_info, EXIFTAGID_WHITE_BALANCE, EXIF_SHORT,
+    sizeof(val_short)/2, &val_short);
+  if (rc) {
+    ALOGE("%s:%d]: Error adding Exif Entry", __func__, __LINE__);
+  }
+
+  return rc;
+}
+
+/** process_meta_data_v1:
+ *
+ *  Arguments:
+ *   @p_meta : ptr to metadata
+ *   @exif_info: Exif info struct
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       process awb debug info
+ *
+ **/
+int process_meta_data_v1(cam_metadata_info_t *p_meta, QOMX_EXIF_INFO *exif_info,
+  mm_jpeg_exif_params_t *p_cam_exif_params)
+{
+  int rc = 0;
+
+  if (!p_meta) {
+    ALOGE("%s %d:Meta data is NULL", __func__, __LINE__);
+    return 0;
+  }
+  cam_3a_params_t *p_3a_params = p_meta->is_3a_params_valid ?
+    &p_meta->cam_3a_params : NULL;
+
+  if (NULL != p_3a_params) {
+    rc = process_3a_data(p_3a_params, exif_info);
+    if (rc) {
+      ALOGE("%s %d: Failed to extract 3a params", __func__, __LINE__);
+    }
+  }
+  cam_sensor_params_t *p_sensor_params = p_meta->is_sensor_params_valid ?
+    &p_meta->sensor_params : NULL;
+
+  if (NULL != p_sensor_params) {
+    rc = process_sensor_data(p_sensor_params, exif_info);
+    if (rc) {
+      ALOGE("%s %d: Failed to extract sensor params", __func__, __LINE__);
+    }
+  }
+  return rc;
+}
+
+/** process_meta_data_v3:
+ *
+ *  Arguments:
+ *   @p_meta : ptr to metadata
+ *   @exif_info: Exif info struct
+ *
+ *  Return     : int32_t type of status
+ *               NO_ERROR  -- success
+ *              none-zero failure code
+ *
+ *  Description:
+ *       Extract exif data from the metadata
+ **/
+int process_meta_data_v3(metadata_buffer_t *p_meta, QOMX_EXIF_INFO *exif_info,
+  mm_jpeg_exif_params_t *p_cam_exif_params)
+{
+  int rc = 0;
+  cam_sensor_params_t p_sensor_params;
+  cam_3a_params_t p_3a_params;
+
+  if (!p_meta) {
+    ALOGE("%s %d:Meta data is NULL", __func__, __LINE__);
+    return 0;
+  }
+
+  /* Process 3a data */
+  int32_t *iso =
+    (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, p_meta);
+
+  int64_t *sensor_exposure_time =
+    (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, p_meta);
+
+  cam_wb_mode_type *wb_mode =
+    (cam_wb_mode_type *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, p_meta);
+
+  memset(&p_3a_params,  0,  sizeof(cam_3a_params_t));
+  if (NULL != iso) {
+    p_3a_params.iso_value= *iso;
+  } else {
+    ALOGE("%s: Cannot extract Iso value", __func__);
+  }
+
+  if (NULL != sensor_exposure_time) {
+    p_3a_params.exp_time = (double)(*sensor_exposure_time / 1000000000.0);
+  } else {
+    ALOGE("%s: Cannot extract Exp time value", __func__);
+  }
+
+  if (NULL != wb_mode) {
+    p_3a_params.wb_mode = *wb_mode;
+  } else {
+    ALOGE("%s: Cannot extract white balance mode", __func__);
+  }
+
+  rc = process_3a_data(&p_3a_params, exif_info);
+  if (rc) {
+    ALOGE("%s %d: Failed to add 3a exif params", __func__, __LINE__);
+  }
+
+  /* Process sensor data */
+  float *aperture = (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, p_meta);
+  uint8_t *flash_mode = (uint8_t *) POINTER_OF(CAM_INTF_META_FLASH_MODE, p_meta);
+  uint8_t *flash_state =
+    (uint8_t *) POINTER_OF(CAM_INTF_META_FLASH_STATE, p_meta);
+
+  memset(&p_sensor_params, 0, sizeof(cam_sensor_params_t));
+
+  if (NULL != aperture) {
+     p_sensor_params.aperture_value = *aperture;
+  } else {
+    ALOGE("%s: Cannot extract Aperture value", __func__);
+  }
+
+  if (NULL != flash_mode) {
+     p_sensor_params.flash_mode = *flash_mode;
+  } else {
+    ALOGE("%s: Cannot extract flash mode value", __func__);
+  }
+
+  if (NULL != flash_state) {
+    p_sensor_params.flash_state = *flash_state;
+  } else {
+    ALOGE("%s: Cannot extract flash state value", __func__);
+  }
+
+  rc = process_sensor_data(&p_sensor_params, exif_info);
+  if (rc) {
+      ALOGE("%s %d: Failed to extract sensor params", __func__, __LINE__);
+  }
+
+  return rc;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
new file mode 100644
index 0000000..4ffaeba
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
@@ -0,0 +1,344 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+static mm_jpeg_obj* g_jpeg_obj = NULL;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+/** mm_jpeg_util_generate_handler:
+ *
+ *  Arguments:
+ *    @index: client index
+ *
+ *  Return:
+ *       handle value
+ *
+ *  Description:
+ *       utility function to generate handler
+ *
+ **/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index)
+{
+  uint32_t handler = 0;
+  pthread_mutex_lock(&g_handler_lock);
+  g_handler_history_count++;
+  if (0 == g_handler_history_count) {
+    g_handler_history_count++;
+  }
+  handler = g_handler_history_count;
+  handler = (handler<<8) | index;
+  pthread_mutex_unlock(&g_handler_lock);
+  return handler;
+}
+
+/** mm_jpeg_util_get_index_by_handler:
+ *
+ *  Arguments:
+ *    @handler: handle value
+ *
+ *  Return:
+ *       client index
+ *
+ *  Description:
+ *       get client index
+ *
+ **/
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler)
+{
+  return (handler & 0x000000ff);
+}
+
+/** mm_jpeg_intf_start_job:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @job: jpeg job object
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       start the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_start_job(mm_jpeg_job_t* job, uint32_t* job_id)
+{
+  int32_t rc = -1;
+
+  if (NULL == job ||
+    NULL == job_id) {
+    CDBG_ERROR("%s:%d] invalid parameters for job or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+  rc = mm_jpeg_start_job(g_jpeg_obj, job, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_create_session:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *    @p_params: encode parameters
+ *    @p_session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Create new jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_create_session(uint32_t client_hdl,
+    mm_jpeg_encode_params_t *p_params,
+    uint32_t *p_session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl || NULL == p_params || NULL == p_session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_create_session(g_jpeg_obj, client_hdl, p_params, p_session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_destroy_session:
+ *
+ *  Arguments:
+ *    @session_id: session id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Destroy jpeg session
+ *
+ **/
+static int32_t mm_jpeg_intf_destroy_session(uint32_t session_id)
+{
+  int32_t rc = -1;
+
+  if (0 == session_id) {
+    CDBG_ERROR("%s:%d] invalid client_hdl or jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_destroy_session_by_id(g_jpeg_obj, session_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_abort_job:
+ *
+ *  Arguments:
+ *    @jobId: job id
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Abort the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_abort_job(uint32_t job_id)
+{
+  int32_t rc = -1;
+
+  if (0 == job_id) {
+    CDBG_ERROR("%s:%d] invalid jobId", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_abort_job(g_jpeg_obj, job_id);
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** mm_jpeg_intf_close:
+ *
+ *  Arguments:
+ *    @client_hdl: client handle
+ *
+ *  Return:
+ *       0 success, failure otherwise
+ *
+ *  Description:
+ *       Close the jpeg job
+ *
+ **/
+static int32_t mm_jpeg_intf_close(uint32_t client_hdl)
+{
+  int32_t rc = -1;
+
+  if (0 == client_hdl) {
+    CDBG_ERROR("%s:%d] invalid client_hdl", __func__, __LINE__);
+    return rc;
+  }
+
+  pthread_mutex_lock(&g_intf_lock);
+  if (NULL == g_jpeg_obj) {
+    /* mm_jpeg obj not exists, return error */
+    CDBG_ERROR("%s:%d] mm_jpeg is not opened yet", __func__, __LINE__);
+    pthread_mutex_unlock(&g_intf_lock);
+    return rc;
+  }
+
+  rc = mm_jpeg_close(g_jpeg_obj, client_hdl);
+  g_jpeg_obj->num_clients--;
+  if(0 == rc) {
+    if (0 == g_jpeg_obj->num_clients) {
+      /* No client, close jpeg internally */
+      rc = mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return rc;
+}
+
+/** jpeg_open:
+ *
+ *  Arguments:
+ *    @ops: ops table pointer
+ *
+ *  Return:
+ *       0 failure, success otherwise
+ *
+ *  Description:
+ *       Open a jpeg client
+ *
+ **/
+uint32_t jpeg_open(mm_jpeg_ops_t *ops)
+{
+  int32_t rc = 0;
+  uint32_t clnt_hdl = 0;
+  mm_jpeg_obj* jpeg_obj = NULL;
+
+  pthread_mutex_lock(&g_intf_lock);
+  /* first time open */
+  if(NULL == g_jpeg_obj) {
+    jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+    if(NULL == jpeg_obj) {
+      CDBG_ERROR("%s:%d] no mem", __func__, __LINE__);
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* initialize jpeg obj */
+    memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+    rc = mm_jpeg_init(jpeg_obj);
+    if(0 != rc) {
+      CDBG_ERROR("%s:%d] mm_jpeg_init err = %d", __func__, __LINE__, rc);
+      free(jpeg_obj);
+      pthread_mutex_unlock(&g_intf_lock);
+      return clnt_hdl;
+    }
+
+    /* remember in global variable */
+    g_jpeg_obj = jpeg_obj;
+  }
+
+  /* open new client */
+  clnt_hdl = mm_jpeg_new_client(g_jpeg_obj);
+  if (clnt_hdl > 0) {
+    /* valid client */
+    if (NULL != ops) {
+      /* fill in ops tbl if ptr not NULL */
+      ops->start_job = mm_jpeg_intf_start_job;
+      ops->abort_job = mm_jpeg_intf_abort_job;
+      ops->create_session = mm_jpeg_intf_create_session;
+      ops->destroy_session = mm_jpeg_intf_destroy_session;
+      ops->close = mm_jpeg_intf_close;
+    }
+  } else {
+    /* failed new client */
+    CDBG_ERROR("%s:%d] mm_jpeg_new_client failed", __func__, __LINE__);
+
+    if (0 == g_jpeg_obj->num_clients) {
+      /* no client, close jpeg */
+      mm_jpeg_deinit(g_jpeg_obj);
+      free(g_jpeg_obj);
+      g_jpeg_obj = NULL;
+    }
+  }
+
+  pthread_mutex_unlock(&g_intf_lock);
+  return clnt_hdl;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
new file mode 100644
index 0000000..b6d87aa
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
@@ -0,0 +1,155 @@
+/* Copyright (c) 2012-2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <pthread.h>
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue)
+{
+    pthread_mutex_init(&queue->lock, NULL);
+    cam_list_init(&queue->head.list);
+    queue->size = 0;
+    return 0;
+}
+
+int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, void* data)
+{
+    mm_jpeg_q_node_t* node =
+        (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+    if (NULL == node) {
+        CDBG_ERROR("%s: No memory for mm_jpeg_q_node_t", __func__);
+        return -1;
+    }
+
+    memset(node, 0, sizeof(mm_jpeg_q_node_t));
+    node->data = data;
+
+    pthread_mutex_lock(&queue->lock);
+    cam_list_add_tail_node(&node->list, &queue->head.list);
+    queue->size++;
+    pthread_mutex_unlock(&queue->lock);
+
+    return 0;
+
+}
+
+void* mm_jpeg_queue_deq(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_node_t* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue)
+{
+    uint32_t size = 0;
+
+    pthread_mutex_lock(&queue->lock);
+    size = queue->size;
+    pthread_mutex_unlock(&queue->lock);
+
+    return size;
+
+}
+
+int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_queue_flush(queue);
+    pthread_mutex_destroy(&queue->lock);
+    return 0;
+}
+
+int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_node_t* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+        cam_list_del_node(&node->list);
+        queue->size--;
+
+        /* for now we only assume there is no ptr inside data
+         * so we free data directly */
+        if (NULL != node->data) {
+            free(node->data);
+        }
+        free(node);
+        pos = pos->next;
+    }
+    queue->size = 0;
+    pthread_mutex_unlock(&queue->lock);
+    return 0;
+}
+
+void* mm_jpeg_queue_peek(mm_jpeg_queue_t* queue)
+{
+    mm_jpeg_q_node_t* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&queue->lock);
+    head = &queue->head.list;
+    pos = head->next;
+    if (pos != head) {
+        node = member_of(pos, mm_jpeg_q_node_t, list);
+    }
+    pthread_mutex_unlock(&queue->lock);
+
+    if (NULL != node) {
+        data = node->data;
+    }
+    return data;
+}
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk b/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk
new file mode 100644
index 0000000..03dde01
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/Android.mk
@@ -0,0 +1,37 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+MM_JPEG_TEST_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_PATH := $(MM_JPEG_TEST_PATH)
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_CFLAGS := -DCAMERA_ION_HEAP_ID=ION_IOMMU_HEAP_ID
+LOCAL_CFLAGS += -Werror -Wno-unused-parameter
+LOCAL_CFLAGS += -D_ANDROID_
+LOCAL_CFLAGS += -include mm_jpeg_dbg.h
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+OMX_CORE_DIR := device/lge/hammerhead/camera/mm-image-codec
+
+LOCAL_C_INCLUDES := $(MM_JPEG_TEST_PATH)
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../inc
+LOCAL_C_INCLUDES += $(MM_JPEG_TEST_PATH)/../../common
+LOCAL_C_INCLUDES += $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qexif
+LOCAL_C_INCLUDES += $(OMX_CORE_DIR)/qomx_core
+
+
+LOCAL_SRC_FILES := mm_jpeg_ionbuf.c
+LOCAL_SRC_FILES += mm_jpeg_test.c 
+
+LOCAL_MODULE           := mm-jpeg-interface-test
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := liblog libcutils libdl libmmjpeg_interface
+
+include $(BUILD_EXECUTABLE)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.c b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.c
new file mode 100644
index 0000000..1f52bfd
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.c
@@ -0,0 +1,132 @@
+/* Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include <sys/ioctl.h>
+#include <linux/msm_ion.h>
+#include "mm_jpeg_ionbuf.h"
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void *buffer_allocate(buffer_test_t *p_buffer)
+{
+  void *l_buffer = NULL;
+
+  int lrc = 0;
+  struct ion_handle_data lhandle_data;
+
+   p_buffer->alloc.len = p_buffer->size;
+   p_buffer->alloc.align = 4096;
+   p_buffer->alloc.flags = ION_FLAG_CACHED;
+   p_buffer->alloc.heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
+
+   p_buffer->ion_fd = open("/dev/ion", O_RDONLY);
+   if(p_buffer->ion_fd < 0) {
+    CDBG_ERROR("%s :Ion open failed", __func__);
+    goto ION_ALLOC_FAILED;
+  }
+
+  /* Make it page size aligned */
+  p_buffer->alloc.len = (p_buffer->alloc.len + 4095) & (~4095);
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_ALLOC, &p_buffer->alloc);
+  if (lrc < 0) {
+    CDBG_ERROR("%s :ION allocation failed len %d", __func__,
+      p_buffer->alloc.len);
+    goto ION_ALLOC_FAILED;
+  }
+
+  p_buffer->ion_info_fd.handle = p_buffer->alloc.handle;
+  lrc = ioctl(p_buffer->ion_fd, ION_IOC_SHARE,
+    &p_buffer->ion_info_fd);
+  if (lrc < 0) {
+    CDBG_ERROR("%s :ION map failed %s", __func__, strerror(errno));
+    goto ION_MAP_FAILED;
+  }
+
+  p_buffer->p_pmem_fd = p_buffer->ion_info_fd.fd;
+
+  l_buffer = mmap(NULL, p_buffer->alloc.len, PROT_READ  | PROT_WRITE,
+    MAP_SHARED,p_buffer->p_pmem_fd, 0);
+
+  if (l_buffer == MAP_FAILED) {
+    CDBG_ERROR("%s :ION_MMAP_FAILED: %s (%d)", __func__,
+      strerror(errno), errno);
+    goto ION_MAP_FAILED;
+  }
+
+  return l_buffer;
+
+ION_MAP_FAILED:
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+  return NULL;
+ION_ALLOC_FAILED:
+  return NULL;
+
+}
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_test_t *p_buffer)
+{
+  int lrc = 0;
+  int lsize = (p_buffer->size + 4095) & (~4095);
+
+  struct ion_handle_data lhandle_data;
+  lrc = munmap(p_buffer->addr, lsize);
+
+  close(p_buffer->ion_info_fd.fd);
+
+  lhandle_data.handle = p_buffer->ion_info_fd.handle;
+  ioctl(p_buffer->ion_fd, ION_IOC_FREE, &lhandle_data);
+
+  close(p_buffer->ion_fd);
+  return lrc;
+}
+
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.h b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.h
new file mode 100644
index 0000000..d25156b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_ionbuf.h
@@ -0,0 +1,80 @@
+/* Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __MM_JPEG_IONBUF_H__
+#define __MM_JPEG_IONBUF_H__
+
+
+#include <stdio.h>
+#include <linux/msm_ion.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <errno.h>
+#include <fcntl.h>
+#include "mm_jpeg_dbg.h"
+
+typedef struct  {
+  struct ion_fd_data ion_info_fd;
+  struct ion_allocation_data alloc;
+  int p_pmem_fd;
+  long size;
+  int ion_fd;
+  uint8_t *addr;
+} buffer_test_t;
+
+/** buffer_allocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      allocates ION buffer
+ *
+ **/
+void* buffer_allocate(buffer_test_t *p_buffer);
+
+/** buffer_deallocate:
+ *
+ *  Arguments:
+ *     @p_buffer: ION buffer
+ *
+ *  Return:
+ *     buffer address
+ *
+ *  Description:
+ *      deallocates ION buffer
+ *
+ **/
+int buffer_deallocate(buffer_test_t *p_buffer);
+
+#endif
+
diff --git a/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
new file mode 100644
index 0000000..8ffbb6b
--- /dev/null
+++ b/camera/QCamera2/stack/mm-jpeg-interface/test/mm_jpeg_test.c
@@ -0,0 +1,327 @@
+/* Copyright (c) 2013, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg_ionbuf.h"
+#include <sys/time.h>
+
+/** DUMP_TO_FILE:
+ *  @filename: file name
+ *  @p_addr: address of the buffer
+ *  @len: buffer length
+ *
+ *  dump the image to the file
+ **/
+#define DUMP_TO_FILE(filename, p_addr, len) ({ \
+  int rc = 0; \
+  FILE *fp = fopen(filename, "w+"); \
+  if (fp) { \
+    rc = fwrite(p_addr, 1, len, fp); \
+    fclose(fp); \
+  } else { \
+    CDBG_ERROR("%s:%d] cannot dump image", __func__, __LINE__); \
+  } \
+})
+
+static int g_count = 1, g_i;
+
+typedef struct {
+  char *filename;
+  int width;
+  int height;
+  char *out_filename;
+} jpeg_test_input_t;
+
+static jpeg_test_input_t jpeg_input[] = {
+  {"/data/test.yuv", 1280, 720, "/data/test.jpg"}
+};
+
+typedef struct {
+  char *filename;
+  int width;
+  int height;
+  char *out_filename;
+  pthread_mutex_t lock;
+  pthread_cond_t cond;
+  buffer_test_t input;
+  buffer_test_t output;
+  int use_ion;
+  uint32_t handle;
+  mm_jpeg_ops_t ops;
+  uint32_t job_id[5];
+  mm_jpeg_encode_params_t params;
+  mm_jpeg_job_t job;
+  uint32_t session_id;
+} mm_jpeg_intf_test_t;
+
+static void mm_jpeg_encode_callback(jpeg_job_status_t status,
+  uint32_t client_hdl,
+  uint32_t jobId,
+  mm_jpeg_output_t *p_output,
+  void *userData)
+{
+  mm_jpeg_intf_test_t *p_obj = (mm_jpeg_intf_test_t *)userData;
+
+  if (status == JPEG_JOB_STATUS_ERROR) {
+    CDBG_ERROR("%s:%d] Encode error", __func__, __LINE__);
+  } else {
+    CDBG_ERROR("%s:%d] Encode success file%s addr %p len %d",
+      __func__, __LINE__, p_obj->out_filename,
+      p_output->buf_vaddr, p_output->buf_filled_len);
+    DUMP_TO_FILE(p_obj->out_filename, p_output->buf_vaddr, p_output->buf_filled_len);
+  }
+  g_i++;
+  if (g_i >= g_count) {
+    CDBG_ERROR("%s:%d] Signal the thread", __func__, __LINE__);
+    pthread_cond_signal(&p_obj->cond);
+  }
+}
+
+int mm_jpeg_test_alloc(buffer_test_t *p_buffer, int use_pmem)
+{
+  int ret = 0;
+  /*Allocate buffers*/
+  if (use_pmem) {
+    p_buffer->addr = (uint8_t *)buffer_allocate(p_buffer);
+    if (NULL == p_buffer->addr) {
+      CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+      return -1;
+    }
+  } else {
+    /* Allocate heap memory */
+    p_buffer->addr = (uint8_t *)malloc(p_buffer->size);
+    if (NULL == p_buffer->addr) {
+      CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+      return -1;
+    }
+  }
+  return ret;
+}
+
+void mm_jpeg_test_free(buffer_test_t *p_buffer)
+{
+  if (p_buffer->addr == NULL)
+    return;
+
+  if (p_buffer->p_pmem_fd > 0)
+    buffer_deallocate(p_buffer);
+  else
+    free(p_buffer->addr);
+
+  memset(p_buffer, 0x0, sizeof(buffer_test_t));
+}
+
+int mm_jpeg_test_read(mm_jpeg_intf_test_t *p_obj)
+{
+  int rc = 0;
+  FILE *fp = NULL;
+  int file_size = 0;
+  fp = fopen(p_obj->filename, "rb");
+  if (!fp) {
+    CDBG_ERROR("%s:%d] error", __func__, __LINE__);
+    return -1;
+  }
+  fseek(fp, 0, SEEK_END);
+  file_size = ftell(fp);
+  fseek(fp, 0, SEEK_SET);
+  CDBG_ERROR("%s:%d] input file size is %d buf_size %ld",
+    __func__, __LINE__, file_size, p_obj->input.size);
+
+  if (p_obj->input.size > file_size) {
+    CDBG_ERROR("%s:%d] error", __func__, __LINE__);
+    fclose(fp);
+    return -1;
+  }
+  fread(p_obj->input.addr, 1, p_obj->input.size, fp);
+  fclose(fp);
+  return 0;
+}
+
+static int encode_init(jpeg_test_input_t *p_input, mm_jpeg_intf_test_t *p_obj)
+{
+  int rc = -1;
+  int size = p_input->width * p_input->height;
+  mm_jpeg_encode_params_t *p_params = &p_obj->params;
+  mm_jpeg_encode_job_t *p_job_params = &p_obj->job.encode_job;
+
+  p_obj->filename = p_input->filename;
+  p_obj->width = p_input->width;
+  p_obj->height = p_input->height;
+  p_obj->out_filename = p_input->out_filename;
+  p_obj->use_ion = 1;
+
+  pthread_mutex_init(&p_obj->lock, NULL);
+  pthread_cond_init(&p_obj->cond, NULL);
+
+  /* allocate buffers */
+  p_obj->input.size = size * 3/2;
+  rc = mm_jpeg_test_alloc(&p_obj->input, p_obj->use_ion);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    return -1;
+  }
+
+  p_obj->output.size = size * 3/2;
+  rc = mm_jpeg_test_alloc(&p_obj->output, 0);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    return -1;
+  }
+
+  rc = mm_jpeg_test_read(p_obj);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    return -1;
+  }
+
+  /* set encode parameters */
+  p_params->jpeg_cb = mm_jpeg_encode_callback;
+  p_params->userdata = p_obj;
+  p_params->color_format = MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2;
+
+  /* dest buffer config */
+  p_params->dest_buf[0].buf_size = p_obj->output.size;
+  p_params->dest_buf[0].buf_vaddr = p_obj->output.addr;
+  p_params->dest_buf[0].fd = p_obj->output.p_pmem_fd;
+  p_params->dest_buf[0].index = 0;
+  p_params->num_dst_bufs = 1;
+
+  /* src buffer config*/
+  p_params->src_main_buf[0].buf_size = p_obj->input.size;
+  p_params->src_main_buf[0].buf_vaddr = p_obj->input.addr;
+  p_params->src_main_buf[0].fd = p_obj->input.p_pmem_fd;
+  p_params->src_main_buf[0].index = 0;
+  p_params->src_main_buf[0].format = MM_JPEG_FMT_YUV;
+  p_params->src_main_buf[0].offset.mp[0].len = size;
+  p_params->src_main_buf[0].offset.mp[1].len = size >> 1;
+  p_params->num_src_bufs = 1;
+
+  p_params->encode_thumbnail = 1;
+  p_params->exif_info.numOfEntries = 0;
+  p_params->quality = 80;
+
+  p_job_params->dst_index = 0;
+  p_job_params->src_index = 0;
+  p_job_params->rotation = 0;
+
+  /* main dimension */
+  p_job_params->main_dim.src_dim.width = p_obj->width;
+  p_job_params->main_dim.src_dim.height = p_obj->height;
+  p_job_params->main_dim.dst_dim.width = p_obj->width;
+  p_job_params->main_dim.dst_dim.height = p_obj->height;
+  p_job_params->main_dim.crop.top = 0;
+  p_job_params->main_dim.crop.left = 0;
+  p_job_params->main_dim.crop.width = p_obj->width;
+  p_job_params->main_dim.crop.height = p_obj->height;
+
+  /* thumb dimension */
+  p_job_params->thumb_dim.src_dim.width = p_obj->width;
+  p_job_params->thumb_dim.src_dim.height = p_obj->height;
+  p_job_params->thumb_dim.dst_dim.width = 512;
+  p_job_params->thumb_dim.dst_dim.height = 384;
+  p_job_params->thumb_dim.crop.top = 0;
+  p_job_params->thumb_dim.crop.left = 0;
+  p_job_params->thumb_dim.crop.width = p_obj->width;
+  p_job_params->thumb_dim.crop.height = p_obj->height;
+  return 0;
+}
+
+static int encode_test(jpeg_test_input_t *p_input)
+{
+  int rc = 0;
+  mm_jpeg_intf_test_t jpeg_obj;
+  int i = 0;
+
+  memset(&jpeg_obj, 0x0, sizeof(jpeg_obj));
+  rc = encode_init(p_input, &jpeg_obj);
+  if (rc) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    return -1;
+  }
+
+  jpeg_obj.handle = jpeg_open(&jpeg_obj.ops);
+  if (jpeg_obj.handle == 0) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    goto end;
+  }
+
+  rc = jpeg_obj.ops.create_session(jpeg_obj.handle, &jpeg_obj.params,
+    &jpeg_obj.job.encode_job.session_id);
+  if (jpeg_obj.job.encode_job.session_id == 0) {
+    CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+    goto end;
+  }
+
+  for (i = 0; i < g_count; i++) {
+    jpeg_obj.job.job_type = JPEG_JOB_TYPE_ENCODE;
+    rc = jpeg_obj.ops.start_job(&jpeg_obj.job, &jpeg_obj.job_id[i]);
+    if (rc) {
+      CDBG_ERROR("%s:%d] Error",__func__, __LINE__);
+      goto end;
+    }
+  }
+
+  /*
+  usleep(5);
+  jpeg_obj.ops.abort_job(jpeg_obj.job_id[0]);
+  */
+  pthread_mutex_lock(&jpeg_obj.lock);
+  pthread_cond_wait(&jpeg_obj.cond, &jpeg_obj.lock);
+  pthread_mutex_unlock(&jpeg_obj.lock);
+
+  jpeg_obj.ops.destroy_session(jpeg_obj.job.encode_job.session_id);
+
+  jpeg_obj.ops.close(jpeg_obj.handle);
+
+
+end:
+  mm_jpeg_test_free(&jpeg_obj.input);
+  mm_jpeg_test_free(&jpeg_obj.output);
+  return 0;
+}
+
+/** main:
+ *
+ *  Arguments:
+ *    @argc
+ *    @argv
+ *
+ *  Return:
+ *       0 or -ve values
+ *
+ *  Description:
+ *       main function
+ *
+ **/
+int main(int argc, char* argv[])
+{
+  return encode_test(&jpeg_input[0]);
+}
+
+
diff --git a/camera/QCamera2/util/QCameraCmdThread.cpp b/camera/QCamera2/util/QCameraCmdThread.cpp
new file mode 100644
index 0000000..c5be4ad
--- /dev/null
+++ b/camera/QCamera2/util/QCameraCmdThread.cpp
@@ -0,0 +1,210 @@
+/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <sys/prctl.h>
+#include "QCameraCmdThread.h"
+
+using namespace android;
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraCmdThread
+ *
+ * DESCRIPTION: default constructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::QCameraCmdThread() :
+    cmd_queue()
+{
+    cmd_pid = 0;
+    cam_sem_init(&sync_sem, 0);
+    cam_sem_init(&cmd_sem, 0);
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraCmdThread
+ *
+ * DESCRIPTION: deconstructor of QCameraCmdThread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraCmdThread::~QCameraCmdThread()
+{
+    cam_sem_destroy(&sync_sem);
+    cam_sem_destroy(&cmd_sem);
+}
+
+/*===========================================================================
+ * FUNCTION   : launch
+ *
+ * DESCRIPTION: launch Cmd Thread
+ *
+ * PARAMETERS :
+ *   @start_routine : thread routine function ptr
+ *   @user_data     : user data ptr
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::launch(void *(*start_routine)(void *),
+                                 void* user_data)
+{
+    /* launch the thread */
+    pthread_create(&cmd_pid,
+                   NULL,
+                   start_routine,
+                   user_data);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : setName
+ *
+ * DESCRIPTION: name the cmd thread
+ *
+ * PARAMETERS :
+ *   @name : desired name for the thread
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::setName(const char* name)
+{
+    /* name the thread */
+    prctl(PR_SET_NAME, (unsigned long)name, 0, 0, 0);
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : sendCmd
+ *
+ * DESCRIPTION: send a command to the Cmd Thread
+ *
+ * PARAMETERS :
+ *   @cmd     : command to be executed.
+ *   @sync_cmd: flag to indicate if this is a synchorinzed cmd. If true, this call
+ *              will wait until signal is set after the command is completed.
+ *   @priority: flag to indicate if this is a cmd with priority. If true, the cmd
+ *              will be enqueued to the head with priority.
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority)
+{
+    camera_cmd_t *node = (camera_cmd_t *)malloc(sizeof(camera_cmd_t));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_cmd_t", __func__);
+        return NO_MEMORY;
+    }
+    memset(node, 0, sizeof(camera_cmd_t));
+    node->cmd = cmd;
+
+    if (priority) {
+        cmd_queue.enqueueWithPriority((void *)node);
+    } else {
+        cmd_queue.enqueue((void *)node);
+    }
+    cam_sem_post(&cmd_sem);
+
+    /* if is a sync call, need to wait until it returns */
+    if (sync_cmd) {
+        cam_sem_wait(&sync_sem);
+    }
+    return NO_ERROR;
+}
+
+/*===========================================================================
+ * FUNCTION   : getCmd
+ *
+ * DESCRIPTION: dequeue a cmommand from cmd queue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : cmd dequeued
+ *==========================================================================*/
+camera_cmd_type_t QCameraCmdThread::getCmd()
+{
+    camera_cmd_type_t cmd = CAMERA_CMD_TYPE_NONE;
+    camera_cmd_t *node = (camera_cmd_t *)cmd_queue.dequeue();
+    if (NULL == node) {
+        ALOGD("%s: No notify avail", __func__);
+        return CAMERA_CMD_TYPE_NONE;
+    } else {
+        cmd = node->cmd;
+        free(node);
+    }
+    return cmd;
+}
+
+/*===========================================================================
+ * FUNCTION   : exit
+ *
+ * DESCRIPTION: exit the CMD thread
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCameraCmdThread::exit()
+{
+    int32_t rc = NO_ERROR;
+
+    if (cmd_pid == 0) {
+        return rc;
+    }
+
+    rc = sendCmd(CAMERA_CMD_TYPE_EXIT, 0, 1);
+    if (NO_ERROR != rc) {
+        ALOGE("%s: Error during exit, rc = %d", __func__, rc);
+        return rc;
+    }
+
+    /* wait until cmd thread exits */
+    if (pthread_join(cmd_pid, NULL) != 0) {
+        ALOGD("%s: pthread dead already\n", __func__);
+    }
+    cmd_pid = 0;
+    return rc;
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/util/QCameraCmdThread.h b/camera/QCamera2/util/QCameraCmdThread.h
new file mode 100644
index 0000000..a9511dc
--- /dev/null
+++ b/camera/QCamera2/util/QCameraCmdThread.h
@@ -0,0 +1,74 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_CMD_THREAD_H__
+#define __QCAMERA_CMD_THREAD_H__
+
+#include <pthread.h>
+#include <cam_semaphore.h>
+
+#include "cam_types.h"
+#include "QCameraQueue.h"
+
+namespace qcamera {
+
+typedef enum
+{
+    CAMERA_CMD_TYPE_NONE,
+    CAMERA_CMD_TYPE_START_DATA_PROC,
+    CAMERA_CMD_TYPE_STOP_DATA_PROC,
+    CAMERA_CMD_TYPE_DO_NEXT_JOB,
+    CAMERA_CMD_TYPE_EXIT,
+    CAMERA_CMD_TYPE_MAX
+} camera_cmd_type_t;
+
+typedef struct {
+    camera_cmd_type_t cmd;
+} camera_cmd_t;
+
+class QCameraCmdThread {
+public:
+    QCameraCmdThread();
+    ~QCameraCmdThread();
+
+    int32_t launch(void *(*start_routine)(void *), void* user_data);
+    int32_t setName(const char* name);
+    int32_t exit();
+    int32_t sendCmd(camera_cmd_type_t cmd, uint8_t sync_cmd, uint8_t priority);
+    camera_cmd_type_t getCmd();
+
+    QCameraQueue cmd_queue;      /* cmd queue */
+    pthread_t cmd_pid;           /* cmd thread ID */
+    cam_semaphore_t cmd_sem;               /* semaphore for cmd thread */
+    cam_semaphore_t sync_sem;              /* semaphore for synchronized call signal */
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_CMD_THREAD_H__ */
diff --git a/camera/QCamera2/util/QCameraQueue.cpp b/camera/QCamera2/util/QCameraQueue.cpp
new file mode 100644
index 0000000..c6bb94e
--- /dev/null
+++ b/camera/QCamera2/util/QCameraQueue.cpp
@@ -0,0 +1,295 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions are
+* met:
+*     * Redistributions of source code must retain the above copyright
+*       notice, this list of conditions and the following disclaimer.
+*     * Redistributions in binary form must reproduce the above
+*       copyright notice, this list of conditions and the following
+*       disclaimer in the documentation and/or other materials provided
+*       with the distribution.
+*     * Neither the name of The Linux Foundation nor the names of its
+*       contributors may be used to endorse or promote products derived
+*       from this software without specific prior written permission.
+*
+* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+* ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*
+*/
+
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include "QCameraQueue.h"
+
+namespace qcamera {
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: default constructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue()
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = NULL;
+    m_userData = NULL;
+}
+
+/*===========================================================================
+ * FUNCTION   : QCameraQueue
+ *
+ * DESCRIPTION: constructor of QCameraQueue
+ *
+ * PARAMETERS :
+ *   @data_rel_fn : function ptr to release node data internal resource
+ *   @user_data   : user data ptr
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::QCameraQueue(release_data_fn data_rel_fn, void *user_data)
+{
+    pthread_mutex_init(&m_lock, NULL);
+    cam_list_init(&m_head.list);
+    m_size = 0;
+    m_dataFn = data_rel_fn;
+    m_userData = user_data;
+}
+
+/*===========================================================================
+ * FUNCTION   : ~QCameraQueue
+ *
+ * DESCRIPTION: deconstructor of QCameraQueue
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+QCameraQueue::~QCameraQueue()
+{
+    flush();
+    pthread_mutex_destroy(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : isEmpty
+ *
+ * DESCRIPTION: return if the queue is empty or not
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : true -- queue is empty; false -- not empty
+ *==========================================================================*/
+bool QCameraQueue::isEmpty()
+{
+    bool flag = true;
+    pthread_mutex_lock(&m_lock);
+    if (m_size > 0) {
+        flag = false;
+    }
+    pthread_mutex_unlock(&m_lock);
+    return flag;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueue
+ *
+ * DESCRIPTION: enqueue data into the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueue(void *data)
+{
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_q_node", __func__);
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    cam_list_add_tail_node(&node->list, &m_head.list);
+    m_size++;
+    pthread_mutex_unlock(&m_lock);
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : enqueueWithPriority
+ *
+ * DESCRIPTION: enqueue data into queue with priority, will insert into the
+ *              head of the queue
+ *
+ * PARAMETERS :
+ *   @data    : data to be enqueued
+ *
+ * RETURN     : true -- success; false -- failed
+ *==========================================================================*/
+bool QCameraQueue::enqueueWithPriority(void *data)
+{
+    camera_q_node *node =
+        (camera_q_node *)malloc(sizeof(camera_q_node));
+    if (NULL == node) {
+        ALOGE("%s: No memory for camera_q_node", __func__);
+        return false;
+    }
+
+    memset(node, 0, sizeof(camera_q_node));
+    node->data = data;
+
+    pthread_mutex_lock(&m_lock);
+    struct cam_list *p_next = m_head.list.next;
+
+    m_head.list.next = &node->list;
+    p_next->prev = &node->list;
+    node->list.next = p_next;
+    node->list.prev = &m_head.list;
+
+    m_size++;
+    pthread_mutex_unlock(&m_lock);
+    return true;
+}
+
+/*===========================================================================
+ * FUNCTION   : dequeue
+ *
+ * DESCRIPTION: dequeue data from the queue
+ *
+ * PARAMETERS :
+ *   @bFromHead : if true, dequeue from the head
+ *                if false, dequeue from the tail
+ *
+ * RETURN     : data ptr. NULL if not any data in the queue.
+ *==========================================================================*/
+void* QCameraQueue::dequeue(bool bFromHead)
+{
+    camera_q_node* node = NULL;
+    void* data = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    if (bFromHead) {
+        pos = head->next;
+    } else {
+        pos = head->prev;
+    }
+    if (pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        cam_list_del_node(&node->list);
+        m_size--;
+    }
+    pthread_mutex_unlock(&m_lock);
+
+    if (NULL != node) {
+        data = node->data;
+        free(node);
+    }
+
+    return data;
+}
+
+/*===========================================================================
+ * FUNCTION   : flush
+ *
+ * DESCRIPTION: flush all nodes from the queue, queue will be empty after this
+ *              operation.
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flush(){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        cam_list_del_node(&node->list);
+        m_size--;
+
+        if (NULL != node->data) {
+            if (m_dataFn) {
+                m_dataFn(node->data, m_userData);
+            }
+            free(node->data);
+        }
+        free(node);
+
+    }
+    m_size = 0;
+    pthread_mutex_unlock(&m_lock);
+}
+
+/*===========================================================================
+ * FUNCTION   : flushNodes
+ *
+ * DESCRIPTION: flush only specific nodes, depending on
+ *              the given matching function.
+ *
+ * PARAMETERS :
+ *   @match   : matching function
+ *
+ * RETURN     : None
+ *==========================================================================*/
+void QCameraQueue::flushNodes(match_fn match){
+    camera_q_node* node = NULL;
+    struct cam_list *head = NULL;
+    struct cam_list *pos = NULL;
+
+    if ( NULL == match ) {
+        return;
+    }
+
+    pthread_mutex_lock(&m_lock);
+    head = &m_head.list;
+    pos = head->next;
+
+    while(pos != head) {
+        node = member_of(pos, camera_q_node, list);
+        pos = pos->next;
+        if ( match(node->data, m_userData) ) {
+            cam_list_del_node(&node->list);
+            m_size--;
+
+            if (NULL != node->data) {
+                if (m_dataFn) {
+                    m_dataFn(node->data, m_userData);
+                }
+                free(node->data);
+            }
+            free(node);
+        }
+    }
+    pthread_mutex_unlock(&m_lock);
+}
+
+}; // namespace qcamera
diff --git a/camera/QCamera2/util/QCameraQueue.h b/camera/QCamera2/util/QCameraQueue.h
new file mode 100644
index 0000000..6e2c759
--- /dev/null
+++ b/camera/QCamera2/util/QCameraQueue.h
@@ -0,0 +1,67 @@
+/* Copyright (c) 2012, The Linux Foundataion. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_QUEUE_H__
+#define __QCAMERA_QUEUE_H__
+
+#include <pthread.h>
+#include "cam_list.h"
+
+namespace qcamera {
+
+typedef void (*release_data_fn)(void* data, void *user_data);
+typedef bool (*match_fn)(void *data, void *user_data);
+
+class QCameraQueue {
+public:
+    QCameraQueue();
+    QCameraQueue(release_data_fn data_rel_fn, void *user_data);
+    virtual ~QCameraQueue();
+    bool enqueue(void *data);
+    bool enqueueWithPriority(void *data);
+    void flush();
+    void flushNodes(match_fn match);
+    void* dequeue(bool bFromHead = true);
+    bool isEmpty();
+private:
+    typedef struct {
+        struct cam_list list;
+        void* data;
+    } camera_q_node;
+
+    camera_q_node m_head; // dummy head
+    int m_size;
+    pthread_mutex_t m_lock;
+    release_data_fn m_dataFn;
+    void * m_userData;
+};
+
+}; // namespace qcamera
+
+#endif /* __QCAMERA_QUEUE_H__ */
diff --git a/camera/QCameraParameters.h b/camera/QCameraParameters.h
new file mode 100644
index 0000000..dd29dda
--- /dev/null
+++ b/camera/QCameraParameters.h
@@ -0,0 +1,257 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+**     http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+//#include <utils/KeyedVector.h>
+//#include <utils/String8.h>
+#include <camera/CameraParameters.h>
+
+namespace android {
+
+struct FPSRange{
+    int minFPS;
+    int maxFPS;
+    FPSRange(){
+        minFPS=0;
+        maxFPS=0;
+    };
+    FPSRange(int min,int max){
+        minFPS=min;
+        maxFPS=max;
+    };
+};
+class QCameraParameters: public CameraParameters
+{
+public:
+#if 1
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params): CameraParameters(params) {};
+    #else
+    QCameraParameters() : CameraParameters() {};
+    QCameraParameters(const String8 &params) { unflatten(params); }
+#endif
+    ~QCameraParameters();
+
+    // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+    // Example value: "800x480,432x320". Read only.
+    static const char KEY_SUPPORTED_HFR_SIZES[];
+    // The mode of preview frame rate.
+    // Example value: "frame-rate-auto, frame-rate-fixed".
+    static const char KEY_PREVIEW_FRAME_RATE_MODE[];
+    static const char KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+    static const char KEY_PREVIEW_FRAME_RATE_AUTO_MODE[];
+    static const char KEY_PREVIEW_FRAME_RATE_FIXED_MODE[];
+
+    static const char KEY_SKIN_TONE_ENHANCEMENT[] ;
+    static const char KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+    //Touch Af/AEC settings.
+    static const char KEY_TOUCH_AF_AEC[];
+    static const char KEY_SUPPORTED_TOUCH_AF_AEC[];
+    //Touch Index for AEC.
+    static const char KEY_TOUCH_INDEX_AEC[];
+    //Touch Index for AF.
+    static const char KEY_TOUCH_INDEX_AF[];
+    // Current auto scene detection mode.
+    // Example value: "off" or SCENE_DETECT_XXX constants. Read/write.
+    static const char KEY_SCENE_DETECT[];
+    // Supported auto scene detection settings.
+    // Example value: "off,backlight,snow/cloudy". Read only.
+    static const char KEY_SUPPORTED_SCENE_DETECT[];
+	   // Returns true if video snapshot is supported. That is, applications
+    static const char KEY_FULL_VIDEO_SNAP_SUPPORTED[];
+    static const char KEY_POWER_MODE_SUPPORTED[];
+
+    static const char KEY_ISO_MODE[];
+    static const char KEY_SUPPORTED_ISO_MODES[];
+    static const char KEY_LENSSHADE[] ;
+    static const char KEY_SUPPORTED_LENSSHADE_MODES[] ;
+
+    static const char KEY_AUTO_EXPOSURE[];
+    static const char KEY_SUPPORTED_AUTO_EXPOSURE[];
+
+    static const char KEY_GPS_LATITUDE_REF[];
+    static const char KEY_GPS_LONGITUDE_REF[];
+    static const char KEY_GPS_ALTITUDE_REF[];
+    static const char KEY_GPS_STATUS[];
+    static const char KEY_EXIF_DATETIME[];
+    static const char KEY_MEMORY_COLOR_ENHANCEMENT[];
+    static const char KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+
+
+    static const char KEY_POWER_MODE[];
+
+    static const char KEY_ZSL[];
+    static const char KEY_SUPPORTED_ZSL_MODES[];
+
+    static const char KEY_CAMERA_MODE[];
+
+    static const char KEY_VIDEO_HIGH_FRAME_RATE[];
+    static const char KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+    static const char KEY_HIGH_DYNAMIC_RANGE_IMAGING[];
+    static const char KEY_SUPPORTED_HDR_IMAGING_MODES[];
+    static const char KEY_AE_BRACKET_HDR[];
+
+
+    // DENOISE
+    static const char KEY_DENOISE[];
+    static const char KEY_SUPPORTED_DENOISE[];
+
+    //Selectable zone AF.
+    static const char KEY_SELECTABLE_ZONE_AF[];
+    static const char KEY_SUPPORTED_SELECTABLE_ZONE_AF[];
+
+    //Face Detection
+    static const char KEY_FACE_DETECTION[];
+    static const char KEY_SUPPORTED_FACE_DETECTION[];
+
+    //Redeye Reduction
+    static const char KEY_REDEYE_REDUCTION[];
+    static const char KEY_SUPPORTED_REDEYE_REDUCTION[];
+    static const char EFFECT_EMBOSS[];
+    static const char EFFECT_SKETCH[];
+    static const char EFFECT_NEON[];
+
+    // Values for Touch AF/AEC
+    static const char TOUCH_AF_AEC_OFF[] ;
+    static const char TOUCH_AF_AEC_ON[] ;
+    static const char SCENE_MODE_ASD[];
+    static const char SCENE_MODE_BACKLIGHT[];
+    static const char SCENE_MODE_FLOWERS[];
+    static const char SCENE_MODE_AR[];
+    static const char SCENE_MODE_HDR[];
+	static const char SCENE_DETECT_OFF[];
+    static const char SCENE_DETECT_ON[];
+    static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+	static const char PIXEL_FORMAT_RAW[];
+    static const char PIXEL_FORMAT_YV12[]; // NV12
+    static const char PIXEL_FORMAT_NV12[]; //NV12
+    // Normal focus mode. Applications should call
+    // CameraHardwareInterface.autoFocus to start the focus in this mode.
+    static const char FOCUS_MODE_NORMAL[];
+    static const char ISO_AUTO[];
+    static const char ISO_HJR[] ;
+    static const char ISO_100[];
+    static const char ISO_200[] ;
+    static const char ISO_400[];
+    static const char ISO_800[];
+    static const char ISO_1600[];
+    // Values for Lens Shading
+    static const char LENSSHADE_ENABLE[] ;
+    static const char LENSSHADE_DISABLE[] ;
+
+    // Values for auto exposure settings.
+    static const char AUTO_EXPOSURE_FRAME_AVG[];
+    static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+    static const char AUTO_EXPOSURE_SPOT_METERING[];
+
+    static const char KEY_SHARPNESS[];
+    static const char KEY_MAX_SHARPNESS[];
+    static const char KEY_CONTRAST[];
+    static const char KEY_MAX_CONTRAST[];
+    static const char KEY_SATURATION[];
+    static const char KEY_MAX_SATURATION[];
+
+    static const char KEY_HISTOGRAM[] ;
+    static const char KEY_SUPPORTED_HISTOGRAM_MODES[] ;
+    // Values for HISTOGRAM
+    static const char HISTOGRAM_ENABLE[] ;
+    static const char HISTOGRAM_DISABLE[] ;
+
+    // Values for SKIN TONE ENHANCEMENT
+    static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+    static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+    // Values for Denoise
+    static const char DENOISE_OFF[] ;
+    static const char DENOISE_ON[] ;
+
+    // Values for auto exposure settings.
+    static const char SELECTABLE_ZONE_AF_AUTO[];
+    static const char SELECTABLE_ZONE_AF_SPOT_METERING[];
+    static const char SELECTABLE_ZONE_AF_CENTER_WEIGHTED[];
+    static const char SELECTABLE_ZONE_AF_FRAME_AVERAGE[];
+
+    // Values for Face Detection settings.
+    static const char FACE_DETECTION_OFF[];
+    static const char FACE_DETECTION_ON[];
+
+    // Values for MCE settings.
+    static const char MCE_ENABLE[];
+    static const char MCE_DISABLE[];
+
+    // Values for ZSL settings.
+    static const char ZSL_OFF[];
+    static const char ZSL_ON[];
+
+    // Values for HDR Bracketing settings.
+    static const char AE_BRACKET_HDR_OFF[];
+    static const char AE_BRACKET_HDR[];
+    static const char AE_BRACKET[];
+
+    // Values for Power mode settings.
+    static const char LOW_POWER[];
+    static const char NORMAL_POWER[];
+
+    // Values for HFR settings.
+    static const char VIDEO_HFR_OFF[];
+    static const char VIDEO_HFR_2X[];
+    static const char VIDEO_HFR_3X[];
+    static const char VIDEO_HFR_4X[];
+
+    // Values for Redeye Reduction settings.
+    static const char REDEYE_REDUCTION_ENABLE[];
+    static const char REDEYE_REDUCTION_DISABLE[];
+    // Values for HDR settings.
+    static const char HDR_ENABLE[];
+    static const char HDR_DISABLE[];
+
+   // Values for Redeye Reduction settings.
+   // static const char REDEYE_REDUCTION_ENABLE[];
+   // static const char REDEYE_REDUCTION_DISABLE[];
+   // Values for HDR settings.
+   //    static const char HDR_ENABLE[];
+   //    static const char HDR_DISABLE[];
+
+
+   static const char KEY_SINGLE_ISP_OUTPUT_ENABLED[];
+   static const char KEY_SUPPORTED_CAMERA_FEATURES[];
+   static const char KEY_MAX_NUM_REQUESTED_FACES[];
+
+    enum {
+        CAMERA_ORIENTATION_UNKNOWN = 0,
+        CAMERA_ORIENTATION_PORTRAIT = 1,
+        CAMERA_ORIENTATION_LANDSCAPE = 2,
+    };
+    int getOrientation() const;
+    void setOrientation(int orientation);
+    void getSupportedHfrSizes(Vector<Size> &sizes) const;
+    void setPreviewFpsRange(int minFPS,int maxFPS);
+	void setPreviewFrameRateMode(const char *mode);
+    const char *getPreviewFrameRateMode() const;
+    void setTouchIndexAec(int x, int y);
+    void getTouchIndexAec(int *x, int *y) const;
+    void setTouchIndexAf(int x, int y);
+    void getTouchIndexAf(int *x, int *y) const;
+    void getMeteringAreaCenter(int * x, int *y) const;
+
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCamera_Intf.h b/camera/QCamera_Intf.h
new file mode 100755
index 0000000..23faa55
--- /dev/null
+++ b/camera/QCamera_Intf.h
@@ -0,0 +1,1147 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *     * Redistributions of source code must retain the above copyright
+ *       notice, this list of conditions and the following disclaimer.
+ *     * Redistributions in binary form must reproduce the above
+ *       copyright notice, this list of conditions and the following
+ *       disclaimer in the documentation and/or other materials provided
+ *       with the distribution.
+ *     * Neither the name of The Linux Foundation nor the names of its
+ *       contributors may be used to endorse or promote products derived
+ *       from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+#include <stdint.h>
+#include <pthread.h>
+#include <inttypes.h>
+
+#define PAD_TO_WORD(a)               (((a)+3)&~3)
+#define PAD_TO_2K(a)                 (((a)+2047)&~2047)
+#define PAD_TO_4K(a)                 (((a)+4095)&~4095)
+#define PAD_TO_8K(a)                 (((a)+8191)&~8191)
+
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X)  (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X)  (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ROI 2
+#define MAX_NUM_PARM 5
+#define MAX_NUM_OPS 2
+#define VIDEO_MAX_PLANES 8
+#define MAX_SNAPSHOT_BUFFERS 5
+#define MAX_EXP_BRACKETING_LENGTH 32
+
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+typedef enum {
+  BACK_CAMERA,
+  FRONT_CAMERA,
+}cam_position_t;
+
+typedef enum {
+  CAM_CTRL_FAILED,        /* Failure in doing operation */
+  CAM_CTRL_SUCCESS,       /* Operation Succeded */
+  CAM_CTRL_INVALID_PARM,  /* Inavlid parameter provided */
+  CAM_CTRL_NOT_SUPPORTED, /* Parameter/operation not supported */
+  CAM_CTRL_ACCEPTED,      /* Parameter accepted */
+  CAM_CTRL_MAX,
+} cam_ctrl_status_t;
+
+typedef enum {
+  CAMERA_YUV_420_NV12,
+  CAMERA_YUV_420_NV21,
+  CAMERA_YUV_420_NV21_ADRENO,
+  CAMERA_BAYER_SBGGR10,
+  CAMERA_RDI,
+  CAMERA_YUV_420_YV12,
+  CAMERA_YUV_422_NV16,
+  CAMERA_YUV_422_NV61,
+  CAMERA_YUV_422_YUYV,
+  CAMERA_SAEC,
+  CAMERA_SAWB,
+  CAMERA_SAFC,
+  CAMERA_SHST,
+} cam_format_t;
+
+typedef enum {
+  CAMERA_PAD_NONE,
+  CAMERA_PAD_TO_WORD,   /*2 bytes*/
+  CAMERA_PAD_TO_LONG_WORD, /*4 bytes*/
+  CAMERA_PAD_TO_8, /*8 bytes*/
+  CAMERA_PAD_TO_16, /*16 bytes*/
+
+  CAMERA_PAD_TO_1K, /*1k bytes*/
+  CAMERA_PAD_TO_2K, /*2k bytes*/
+  CAMERA_PAD_TO_4K,
+  CAMERA_PAD_TO_8K
+} cam_pad_format_t;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+  int fd;         /* origin fd */
+  uint32_t size;
+  uint8_t is_hist; /* is hist mapping? */
+} mm_camera_frame_map_type;
+
+typedef struct {
+  int ext_mode;   /* preview, main, thumbnail, video, raw, etc */
+  int frame_idx;  /* frame index */
+  uint8_t is_hist; /* is hist unmapping? */
+} mm_camera_frame_unmap_type;
+
+typedef enum {
+  CAM_SOCK_MSG_TYPE_FD_MAPPING,
+  CAM_SOCK_MSG_TYPE_FD_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_WDN_START,
+  CAM_SOCK_MSG_TYPE_HDR_START,
+  CAM_SOCK_MSG_TYPE_HIST_MAPPING,
+  CAM_SOCK_MSG_TYPE_HIST_UNMAPPING,
+  CAM_SOCK_MSG_TYPE_MAX
+}mm_camera_socket_msg_type;
+#define MAX_HDR_EXP_FRAME_NUM 5
+typedef struct {
+  unsigned long cookie;
+  int num_hdr_frames;
+  int hdr_main_idx[MAX_HDR_EXP_FRAME_NUM];
+  int hdr_thm_idx[MAX_HDR_EXP_FRAME_NUM];
+  int exp[MAX_HDR_EXP_FRAME_NUM];
+} mm_camera_hdr_start_type;
+
+#define MM_MAX_WDN_NUM 2
+typedef struct {
+  unsigned long cookie;
+  int num_frames;
+  int ext_mode[MM_MAX_WDN_NUM];
+  int frame_idx[MM_MAX_WDN_NUM];
+} mm_camera_wdn_start_type;
+
+typedef struct {
+  mm_camera_socket_msg_type msg_type;
+  union {
+    mm_camera_frame_map_type frame_fd_map;
+    mm_camera_frame_unmap_type frame_fd_unmap;
+    mm_camera_wdn_start_type wdn_start;
+    mm_camera_hdr_start_type hdr_pkg;
+  } payload;
+} cam_sock_packet_t;
+
+typedef enum {
+  CAM_VIDEO_FRAME,
+  CAM_SNAPSHOT_FRAME,
+  CAM_PREVIEW_FRAME,
+}cam_frame_type_t;
+
+
+typedef enum {
+  CAMERA_MODE_2D = (1<<0),
+  CAMERA_MODE_3D = (1<<1),
+  CAMERA_NONZSL_MODE = (1<<2),
+  CAMERA_ZSL_MODE = (1<<3),
+  CAMERA_MODE_MAX = CAMERA_ZSL_MODE,
+} camera_mode_t;
+
+
+typedef struct {
+  int  modes_supported;
+  int8_t camera_id;
+  cam_position_t position;
+  uint32_t sensor_mount_angle;
+}camera_info_t;
+
+typedef struct {
+  camera_mode_t mode;
+  int8_t camera_id;
+  camera_mode_t cammode;
+}config_params_t;
+
+typedef struct {
+  uint32_t len;
+  uint32_t y_offset;
+  uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+  uint32_t len;
+  uint32_t offset;
+} cam_mp_len_offset_t;
+
+typedef struct {
+  int num_planes;
+  union {
+    cam_sp_len_offset_t sp;
+    cam_mp_len_offset_t mp[8];
+  };
+  uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+  uint32_t parm[MAX_NUM_PARM];
+  uint32_t ops[MAX_NUM_OPS];
+  uint8_t yuv_output;
+  uint8_t jpeg_capture;
+  uint32_t max_pict_width;
+  uint32_t max_pict_height;
+  uint32_t max_preview_width;
+  uint32_t max_preview_height;
+  uint32_t max_video_width;
+  uint32_t max_video_height;
+  uint32_t effect;
+  camera_mode_t modes;
+  uint8_t preview_format;
+  uint32_t preview_sizes_cnt;
+  uint32_t thumb_sizes_cnt;
+  uint32_t video_sizes_cnt;
+  uint32_t hfr_sizes_cnt;
+  uint8_t vfe_output_enable;
+  uint8_t hfr_frame_skip;
+  uint32_t default_preview_width;
+  uint32_t default_preview_height;
+  uint32_t bestshot_reconfigure;
+  uint32_t pxlcode;
+}cam_prop_t;
+
+typedef struct {
+  uint16_t video_width;         /* Video width seen by VFE could be different than orig. Ex. DIS */
+  uint16_t video_height;        /* Video height seen by VFE */
+  uint16_t picture_width;       /* Picture width seen by VFE */
+  uint16_t picture_height;      /* Picture height seen by VFE */
+  uint16_t display_width;       /* width of display */
+  uint16_t display_height;      /* height of display */
+  uint16_t orig_video_width;    /* original video width received */
+  uint16_t orig_video_height;   /* original video height received */
+  uint16_t orig_picture_dx;     /* original picture width received */
+  uint16_t orig_picture_dy;     /* original picture height received */
+  uint16_t ui_thumbnail_height; /* Just like orig_picture_dx */
+  uint16_t ui_thumbnail_width;  /* Just like orig_picture_dy */
+  uint16_t thumbnail_height;
+  uint16_t thumbnail_width;
+  uint16_t orig_picture_width;
+  uint16_t orig_picture_height;
+  uint16_t orig_thumb_width;
+  uint16_t orig_thumb_height;
+  uint16_t raw_picture_height;
+  uint16_t raw_picture_width;
+  uint16_t rdi0_height;
+  uint16_t rdi0_width;
+  uint16_t rdi1_height;
+  uint16_t rdi1_width;
+  uint32_t hjr_xtra_buff_for_bayer_filtering;
+  cam_format_t    prev_format;
+  cam_format_t    enc_format;
+  cam_format_t    thumb_format;
+  cam_format_t    main_img_format;
+  cam_format_t    rdi0_format;
+  cam_format_t    rdi1_format;
+  cam_format_t    raw_img_format;
+  cam_pad_format_t prev_padding_format;
+  cam_pad_format_t enc_padding_format;
+  cam_pad_format_t thumb_padding_format;
+  cam_pad_format_t main_padding_format;
+  uint16_t display_luma_width;
+  uint16_t display_luma_height;
+  uint16_t display_chroma_width;
+  uint16_t display_chroma_height;
+  uint16_t video_luma_width;
+  uint16_t video_luma_height;
+  uint16_t video_chroma_width;
+  uint16_t video_chroma_height;
+  uint16_t thumbnail_luma_width;
+  uint16_t thumbnail_luma_height;
+  uint16_t thumbnail_chroma_width;
+  uint16_t thumbnail_chroma_height;
+  uint16_t main_img_luma_width;
+  uint16_t main_img_luma_height;
+  uint16_t main_img_chroma_width;
+  uint16_t main_img_chroma_height;
+  int rotation;
+  cam_frame_len_offset_t display_frame_offset;
+  cam_frame_len_offset_t video_frame_offset;
+  cam_frame_len_offset_t picture_frame_offset;
+  cam_frame_len_offset_t thumb_frame_offset;
+  uint32_t channel_interface_mask;
+} cam_ctrl_dimension_t;
+
+typedef struct {
+  uint16_t type;
+  uint16_t width;
+  uint16_t height;
+} cam_stats_buf_dimension_t;
+
+typedef struct {
+  uint8_t cid;
+  uint8_t dt;
+  uint32_t inst_handle;
+} cam_cid_entry_t;
+
+#define CAM_MAX_CID_NUM    8
+typedef struct {
+  /*should we hard code max CIDs? if not we need to have two CMD*/
+  uint8_t num_cids;
+  cam_cid_entry_t cid_entries[CAM_MAX_CID_NUM];
+} cam_cid_info_t;
+
+typedef struct {
+  /* we still use prev, video, main,
+   * thumb to interprete image types */
+  uint32_t image_mode;                 /* input */
+  cam_format_t format;                 /* input */
+  cam_pad_format_t padding_format;     /* input */
+  int rotation;                        /* input */
+  uint16_t width;                      /* input/output */
+  uint16_t height;                     /* input/output */
+  cam_frame_len_offset_t frame_offset; /* output */
+} cam_frame_resolution_t;
+
+typedef struct {
+  uint32_t instance_hdl; /* instance handler of the stream */
+  uint32_t frame_idx;    /* frame index */
+  uint16_t frame_width;
+  uint16_t frame_height;
+  cam_frame_len_offset_t frame_offset;
+} mm_camera_wnr_frame_info_t;
+
+#define MM_CAMEAR_MAX_STRAEM_BUNDLE 4
+typedef struct {
+    uint8_t num_frames;
+    mm_camera_wnr_frame_info_t frames[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+} mm_camera_wnr_info_t;
+
+typedef struct {
+  uint8_t num;
+  uint32_t stream_handles[MM_CAMEAR_MAX_STRAEM_BUNDLE]; /* instance handler */
+} cam_stream_bundle_t;
+
+/* Add enumenrations at the bottom but before MM_CAMERA_PARM_MAX */
+typedef enum {
+    MM_CAMERA_PARM_PICT_SIZE,
+    MM_CAMERA_PARM_ZOOM_RATIO,
+    MM_CAMERA_PARM_HISTOGRAM,
+    MM_CAMERA_PARM_DIMENSION,
+    MM_CAMERA_PARM_FPS,
+    MM_CAMERA_PARM_FPS_MODE, /*5*/
+    MM_CAMERA_PARM_EFFECT,
+    MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+    MM_CAMERA_PARM_EXPOSURE,
+    MM_CAMERA_PARM_SHARPNESS,
+    MM_CAMERA_PARM_CONTRAST, /*10*/
+    MM_CAMERA_PARM_SATURATION,
+    MM_CAMERA_PARM_BRIGHTNESS,
+    MM_CAMERA_PARM_WHITE_BALANCE,
+    MM_CAMERA_PARM_LED_MODE,
+    MM_CAMERA_PARM_ANTIBANDING, /*15*/
+    MM_CAMERA_PARM_ROLLOFF,
+    MM_CAMERA_PARM_CONTINUOUS_AF,
+    MM_CAMERA_PARM_FOCUS_RECT,
+    MM_CAMERA_PARM_AEC_ROI,
+    MM_CAMERA_PARM_AF_ROI, /*20*/
+    MM_CAMERA_PARM_HJR,
+    MM_CAMERA_PARM_ISO,
+    MM_CAMERA_PARM_BL_DETECTION,
+    MM_CAMERA_PARM_SNOW_DETECTION,
+    MM_CAMERA_PARM_BESTSHOT_MODE, /*25*/
+    MM_CAMERA_PARM_ZOOM,
+    MM_CAMERA_PARM_VIDEO_DIS,
+    MM_CAMERA_PARM_VIDEO_ROT,
+    MM_CAMERA_PARM_SCE_FACTOR,
+    MM_CAMERA_PARM_FD, /*30*/
+    MM_CAMERA_PARM_MODE,
+    /* 2nd 32 bits */
+    MM_CAMERA_PARM_3D_FRAME_FORMAT,
+    MM_CAMERA_PARM_CAMERA_ID,
+    MM_CAMERA_PARM_CAMERA_INFO,
+    MM_CAMERA_PARM_PREVIEW_SIZE, /*35*/
+    MM_CAMERA_PARM_QUERY_FALSH4SNAP,
+    MM_CAMERA_PARM_FOCUS_DISTANCES,
+    MM_CAMERA_PARM_BUFFER_INFO,
+    MM_CAMERA_PARM_JPEG_ROTATION,
+    MM_CAMERA_PARM_JPEG_MAINIMG_QUALITY, /* 40 */
+    MM_CAMERA_PARM_JPEG_THUMB_QUALITY,
+    MM_CAMERA_PARM_ZSL_ENABLE,
+    MM_CAMERA_PARM_FOCAL_LENGTH,
+    MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+    MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE, /* 45 */
+    MM_CAMERA_PARM_MCE,
+    MM_CAMERA_PARM_RESET_LENS_TO_INFINITY,
+    MM_CAMERA_PARM_SNAPSHOTDATA,
+    MM_CAMERA_PARM_HFR,
+    MM_CAMERA_PARM_REDEYE_REDUCTION, /* 50 */
+    MM_CAMERA_PARM_WAVELET_DENOISE,
+    MM_CAMERA_PARM_3D_DISPLAY_DISTANCE,
+    MM_CAMERA_PARM_3D_VIEW_ANGLE,
+    MM_CAMERA_PARM_PREVIEW_FORMAT,
+    MM_CAMERA_PARM_RDI_FORMAT,
+    MM_CAMERA_PARM_HFR_SIZE, /* 55 */
+    MM_CAMERA_PARM_3D_EFFECT,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_RANGE,
+    MM_CAMERA_PARM_3D_MANUAL_CONV_VALUE,
+    MM_CAMERA_PARM_ENABLE_3D_MANUAL_CONVERGENCE,
+    /* These are new parameters defined here */
+    MM_CAMERA_PARM_CH_IMAGE_FMT, /* 60 */       // mm_camera_ch_image_fmt_parm_t
+    MM_CAMERA_PARM_OP_MODE,             // camera state, sub state also
+    MM_CAMERA_PARM_SHARPNESS_CAP,       //
+    MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,  // num shots per snapshot action
+    MM_CAMERA_PARM_LIVESHOT_MAIN,       // enable/disable full size live shot
+    MM_CAMERA_PARM_MAXZOOM, /* 65 */
+    MM_CAMERA_PARM_LUMA_ADAPTATION,     // enable/disable
+    MM_CAMERA_PARM_HDR,
+    MM_CAMERA_PARM_CROP,
+    MM_CAMERA_PARM_MAX_PICTURE_SIZE,
+    MM_CAMERA_PARM_MAX_PREVIEW_SIZE, /* 70 */
+    MM_CAMERA_PARM_ASD_ENABLE,
+    MM_CAMERA_PARM_RECORDING_HINT,
+    MM_CAMERA_PARM_CAF_ENABLE,
+    MM_CAMERA_PARM_FULL_LIVESHOT,
+    MM_CAMERA_PARM_DIS_ENABLE, /* 75 */
+    MM_CAMERA_PARM_AEC_LOCK,
+    MM_CAMERA_PARM_AWB_LOCK,
+    MM_CAMERA_PARM_AF_MTR_AREA,
+    MM_CAMERA_PARM_AEC_MTR_AREA,
+    MM_CAMERA_PARM_LOW_POWER_MODE,
+    MM_CAMERA_PARM_MAX_HFR_MODE, /* 80 */
+    MM_CAMERA_PARM_MAX_VIDEO_SIZE,
+    MM_CAMERA_PARM_DEF_PREVIEW_SIZES,
+    MM_CAMERA_PARM_DEF_VIDEO_SIZES,
+    MM_CAMERA_PARM_DEF_THUMB_SIZES,
+    MM_CAMERA_PARM_DEF_HFR_SIZES,
+    MM_CAMERA_PARM_PREVIEW_SIZES_CNT,
+    MM_CAMERA_PARM_VIDEO_SIZES_CNT,
+    MM_CAMERA_PARM_THUMB_SIZES_CNT,
+    MM_CAMERA_PARM_HFR_SIZES_CNT,
+    MM_CAMERA_PARM_GRALLOC_USAGE,
+    MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, //to check whether both oputputs are
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+    MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+    MM_CAMERA_PARM_FOCUS_MODE,
+    MM_CAMERA_PARM_HFR_FRAME_SKIP,
+    MM_CAMERA_PARM_CH_INTERFACE,
+    //or single output enabled to differentiate 7x27a with others
+    MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+    MM_CAMERA_PARM_MAX_NUM_FACES_DECT,
+    MM_CAMERA_PARM_FPS_RANGE,
+    MM_CAMERA_PARM_CID,
+    MM_CAMERA_PARM_FRAME_RESOLUTION,
+    MM_CAMERA_PARM_RAW_SNAPSHOT_FMT,
+    MM_CAMERA_PARM_FACIAL_FEATURE_INFO,
+    MM_CAMERA_PARM_MOBICAT,
+    MM_CAMERA_PARM_MAX
+} mm_camera_parm_type_t;
+
+typedef enum {
+  STREAM_NONE           =  0x0,
+  STREAM_IMAGE          =  0x1,
+  STREAM_RAW            =  0x2,
+  STREAM_RAW1           =  0x4,
+  STREAM_RAW2           =  0x8,
+} mm_camera_channel_stream_info_t;
+
+typedef enum {
+  CAMERA_SET_PARM_DISPLAY_INFO,
+  CAMERA_SET_PARM_DIMENSION,
+
+  CAMERA_SET_PARM_ZOOM,
+  CAMERA_SET_PARM_SENSOR_POSITION,
+  CAMERA_SET_PARM_FOCUS_RECT,
+  CAMERA_SET_PARM_LUMA_ADAPTATION,
+  CAMERA_SET_PARM_CONTRAST,
+  CAMERA_SET_PARM_BRIGHTNESS,
+  CAMERA_SET_PARM_EXPOSURE_COMPENSATION,
+  CAMERA_SET_PARM_SHARPNESS,
+  CAMERA_SET_PARM_HUE,  /* 10 */
+  CAMERA_SET_PARM_SATURATION,
+  CAMERA_SET_PARM_EXPOSURE,
+  CAMERA_SET_PARM_AUTO_FOCUS,
+  CAMERA_SET_PARM_WB,
+  CAMERA_SET_PARM_EFFECT,
+  CAMERA_SET_PARM_FPS,
+  CAMERA_SET_PARM_FLASH,
+  CAMERA_SET_PARM_NIGHTSHOT_MODE,
+  CAMERA_SET_PARM_REFLECT,
+  CAMERA_SET_PARM_PREVIEW_MODE,  /* 20 */
+  CAMERA_SET_PARM_ANTIBANDING,
+  CAMERA_SET_PARM_RED_EYE_REDUCTION,
+  CAMERA_SET_PARM_FOCUS_STEP,
+  CAMERA_SET_PARM_EXPOSURE_METERING,
+  CAMERA_SET_PARM_AUTO_EXPOSURE_MODE,
+  CAMERA_SET_PARM_ISO,
+  CAMERA_SET_PARM_BESTSHOT_MODE,
+  CAMERA_SET_PARM_ENCODE_ROTATION,
+
+  CAMERA_SET_PARM_PREVIEW_FPS,
+  CAMERA_SET_PARM_AF_MODE,  /* 30 */
+  CAMERA_SET_PARM_HISTOGRAM,
+  CAMERA_SET_PARM_FLASH_STATE,
+  CAMERA_SET_PARM_FRAME_TIMESTAMP,
+  CAMERA_SET_PARM_STROBE_FLASH,
+  CAMERA_SET_PARM_FPS_LIST,
+  CAMERA_SET_PARM_HJR,
+  CAMERA_SET_PARM_ROLLOFF,
+
+  CAMERA_STOP_PREVIEW,
+  CAMERA_START_PREVIEW,
+  CAMERA_START_SNAPSHOT, /* 40 */
+  CAMERA_START_RAW_SNAPSHOT,
+  CAMERA_STOP_SNAPSHOT,
+  CAMERA_EXIT,
+  CAMERA_ENABLE_BSM,
+  CAMERA_DISABLE_BSM,
+  CAMERA_GET_PARM_ZOOM,
+  CAMERA_GET_PARM_MAXZOOM,
+  CAMERA_GET_PARM_ZOOMRATIOS,
+  CAMERA_GET_PARM_AF_SHARPNESS,
+  CAMERA_SET_PARM_LED_MODE, /* 50 */
+  CAMERA_SET_MOTION_ISO,
+  CAMERA_AUTO_FOCUS_CANCEL,
+  CAMERA_GET_PARM_FOCUS_STEP,
+  CAMERA_ENABLE_AFD,
+  CAMERA_PREPARE_SNAPSHOT,
+  CAMERA_SET_FPS_MODE,
+  CAMERA_START_VIDEO,
+  CAMERA_STOP_VIDEO,
+  CAMERA_START_RECORDING,
+  CAMERA_STOP_RECORDING, /* 60 */
+  CAMERA_SET_VIDEO_DIS_PARAMS,
+  CAMERA_SET_VIDEO_ROT_PARAMS,
+  CAMERA_SET_PARM_AEC_ROI,
+  CAMERA_SET_CAF,
+  CAMERA_SET_PARM_BL_DETECTION_ENABLE,
+  CAMERA_SET_PARM_SNOW_DETECTION_ENABLE,
+  CAMERA_SET_PARM_STROBE_FLASH_MODE,
+  CAMERA_SET_PARM_AF_ROI,
+  CAMERA_START_LIVESHOT,
+  CAMERA_SET_SCE_FACTOR, /* 70 */
+  CAMERA_GET_CAPABILITIES,
+  CAMERA_GET_PARM_DIMENSION,
+  CAMERA_GET_PARM_LED_MODE,
+  CAMERA_SET_PARM_FD,
+  CAMERA_GET_PARM_3D_FRAME_FORMAT,
+  CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+  CAMERA_GET_PARM_FOCUS_DISTANCES,
+  CAMERA_START_ZSL,
+  CAMERA_STOP_ZSL,
+  CAMERA_ENABLE_ZSL, /* 80 */
+  CAMERA_GET_PARM_FOCAL_LENGTH,
+  CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+  CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+  CAMERA_SET_PARM_WAVELET_DENOISE,
+  CAMERA_SET_PARM_MCE,
+  CAMERA_ENABLE_STEREO_CAM,
+  CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+  CAMERA_GET_PARM_SNAPSHOTDATA,
+  CAMERA_SET_PARM_HFR,
+  CAMERA_SET_REDEYE_REDUCTION, /* 90 */
+  CAMERA_SET_PARM_3D_DISPLAY_DISTANCE,
+  CAMERA_SET_PARM_3D_VIEW_ANGLE,
+  CAMERA_SET_PARM_3D_EFFECT,
+  CAMERA_SET_PARM_PREVIEW_FORMAT,
+  CAMERA_GET_PARM_3D_DISPLAY_DISTANCE, /* 95 */
+  CAMERA_GET_PARM_3D_VIEW_ANGLE,
+  CAMERA_GET_PARM_3D_EFFECT,
+  CAMERA_GET_PARM_3D_MANUAL_CONV_RANGE,
+  CAMERA_SET_PARM_3D_MANUAL_CONV_VALUE,
+  CAMERA_ENABLE_3D_MANUAL_CONVERGENCE, /* 100 */
+  CAMERA_SET_PARM_HDR,
+  CAMERA_SET_ASD_ENABLE,
+  CAMERA_POSTPROC_ABORT,
+  CAMERA_SET_AEC_MTR_AREA,
+  CAMERA_SET_AEC_LOCK,       /*105*/
+  CAMERA_SET_AWB_LOCK,
+  CAMERA_SET_RECORDING_HINT,
+  CAMERA_SET_PARM_CAF,
+  CAMERA_SET_FULL_LIVESHOT,
+  CAMERA_SET_DIS_ENABLE,  /*110*/
+  CAMERA_GET_PARM_MAX_HFR_MODE,
+  CAMERA_SET_LOW_POWER_MODE,
+  CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+  CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+  CAMERA_GET_PARM_DEF_THUMB_SIZES, /*115*/
+  CAMERA_GET_PARM_DEF_HFR_SIZES,
+  CAMERA_GET_PARM_MAX_LIVESHOT_SIZE,
+  CAMERA_GET_PARM_FPS_RANGE,
+  CAMERA_SET_3A_CONVERGENCE,
+  CAMERA_SET_PREVIEW_HFR, /*120*/
+  CAMERA_GET_MAX_DIMENSION,
+  CAMERA_GET_MAX_NUM_FACES_DECT,
+  CAMERA_SET_CHANNEL_STREAM,
+  CAMERA_GET_CHANNEL_STREAM,
+  CAMERA_SET_PARM_CID, /*125*/
+  CAMERA_GET_PARM_FRAME_RESOLUTION,
+  CAMERA_GET_FACIAL_FEATURE_INFO,
+  CAMERA_GET_PP_MASK, /* get post-processing mask */
+  CAMERA_DO_PP_WNR,   /* do post-process WNR */
+  CAMERA_GET_PARM_HDR,
+  CAMERA_SEND_PP_PIPELINE_CMD, /* send offline pp cmd */
+  CAMERA_SET_BUNDLE, /* set stream bundle */
+  CAMERA_ENABLE_MOBICAT,
+  CAMERA_GET_PARM_MOBICAT,
+  CAMERA_CTRL_PARM_MAX
+} cam_ctrl_type;
+
+typedef enum {
+  CAMERA_ERROR_NO_MEMORY,
+  CAMERA_ERROR_EFS_FAIL,                /* Low-level operation failed */
+  CAMERA_ERROR_EFS_FILE_OPEN,           /* File already opened */
+  CAMERA_ERROR_EFS_FILE_NOT_OPEN,       /* File not opened */
+  CAMERA_ERROR_EFS_FILE_ALREADY_EXISTS, /* File already exists */
+  CAMERA_ERROR_EFS_NONEXISTENT_DIR,     /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_NONEXISTENT_FILE,    /* User directory doesn't exist */
+  CAMERA_ERROR_EFS_BAD_FILE_NAME,       /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_BAD_FILE_HANDLE,     /* Client specified invalid file/directory name*/
+  CAMERA_ERROR_EFS_SPACE_EXHAUSTED,     /* Out of file system space */
+  CAMERA_ERROR_EFS_OPEN_TABLE_FULL,     /* Out of open-file table slots                */
+  CAMERA_ERROR_EFS_OTHER_ERROR,         /* Other error                                 */
+  CAMERA_ERROR_CONFIG,
+  CAMERA_ERROR_EXIF_ENCODE,
+  CAMERA_ERROR_VIDEO_ENGINE,
+  CAMERA_ERROR_IPL,
+  CAMERA_ERROR_INVALID_FORMAT,
+  CAMERA_ERROR_TIMEOUT,
+  CAMERA_ERROR_ESD,
+  CAMERA_ERROR_MAX
+} camera_error_type;
+
+#if defined CAMERA_ANTIBANDING_OFF
+#undef CAMERA_ANTIBANDING_OFF
+#endif
+
+#if defined CAMERA_ANTIBANDING_60HZ
+#undef CAMERA_ANTIBANDING_60HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_50HZ
+#undef CAMERA_ANTIBANDING_50HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_AUTO
+#undef CAMERA_ANTIBANDING_AUTO
+#endif
+
+typedef enum {
+  CAMERA_PP_MASK_TYPE_WNR = 0x01
+} camera_pp_mask_type;
+
+typedef enum {
+  CAMERA_ANTIBANDING_OFF,
+  CAMERA_ANTIBANDING_60HZ,
+  CAMERA_ANTIBANDING_50HZ,
+  CAMERA_ANTIBANDING_AUTO,
+  CAMERA_ANTIBANDING_AUTO_50HZ,
+  CAMERA_ANTIBANDING_AUTO_60HZ,
+  CAMERA_MAX_ANTIBANDING,
+} camera_antibanding_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+  CAMERA_ISO_AUTO = 0,
+  CAMERA_ISO_DEBLUR,
+  CAMERA_ISO_100,
+  CAMERA_ISO_200,
+  CAMERA_ISO_400,
+  CAMERA_ISO_800,
+  CAMERA_ISO_1600,
+  CAMERA_ISO_MAX
+} camera_iso_mode_type;
+
+typedef enum {
+  MM_CAMERA_FACIAL_FEATURE_FD, // facial detection
+  MM_CAMERA_FACIAL_FEATURE_MAX
+} camera_facial_features;
+
+typedef enum {
+  AEC_ROI_OFF,
+  AEC_ROI_ON
+} aec_roi_ctrl_t;
+
+typedef enum {
+  AEC_ROI_BY_INDEX,
+  AEC_ROI_BY_COORDINATE,
+} aec_roi_type_t;
+
+typedef struct {
+  uint32_t x;
+  uint32_t y;
+} cam_coordinate_type_t;
+
+/*
+ * Define DRAW_RECTANGLES to draw rectangles on screen. Just for test purpose.
+ */
+//#define DRAW_RECTANGLES
+
+typedef struct {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+} roi_t;
+
+typedef struct {
+  aec_roi_ctrl_t aec_roi_enable;
+  aec_roi_type_t aec_roi_type;
+  union {
+    cam_coordinate_type_t coordinate;
+    uint32_t aec_roi_idx;
+  } aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+  uint32_t frm_id;
+  uint8_t num_roi;
+  roi_t roi[MAX_ROI];
+  uint8_t is_multiwindow;
+} roi_info_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+typedef struct
+{
+  exif_tag_type_t type;
+  uint8_t copy;
+  uint32_t count;
+  union
+  {
+    char      *_ascii;
+    uint8_t   *_bytes;
+    uint8_t    _byte;
+    uint16_t  *_shorts;
+    uint16_t   _short;
+    uint32_t  *_longs;
+    uint32_t   _long;
+    rat_t     *_rats;
+    rat_t      _rat;
+    uint8_t   *_undefined;
+    int32_t   *_slongs;
+    int32_t    _slong;
+    srat_t    *_srats;
+    srat_t     _srat;
+  } data;
+} exif_tag_entry_t;
+
+typedef struct {
+    uint32_t      tag_id;
+    exif_tag_entry_t  tag_entry;
+} exif_tags_info_t;
+
+
+typedef enum {
+ HDR_BRACKETING_OFF,
+ HDR_MODE,
+ EXP_BRACKETING_MODE
+ } hdr_mode;
+
+typedef struct {
+  hdr_mode mode;
+  uint32_t hdr_enable;
+  uint32_t total_frames;
+  uint32_t total_hal_frames;
+  char values[MAX_EXP_BRACKETING_LENGTH];  /* user defined values */
+} exp_bracketing_t;
+typedef struct {
+  roi_t      mtr_area[MAX_ROI];
+  uint32_t   num_area;
+  int        weight[MAX_ROI];
+} aec_mtr_area_t;
+
+typedef struct {
+  int denoise_enable;
+  int process_plates;
+} denoise_param_t;
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+  #define HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+  int width;
+  int height;
+};
+#endif
+
+typedef struct {
+  uint32_t yoffset;
+  uint32_t cbcr_offset;
+  uint32_t size;
+  struct camera_size_type resolution;
+}cam_buf_info_t;
+
+typedef struct {
+  int x;
+  int y;
+}cam_point_t;
+
+typedef struct {
+  /* AF parameters */
+  uint8_t focus_position;
+  /* AEC parameters */
+  uint32_t line_count;
+  uint8_t luma_target;
+  /* AWB parameters */
+  int32_t r_gain;
+  int32_t b_gain;
+  int32_t g_gain;
+  uint8_t exposure_mode;
+  uint8_t exposure_program;
+  float exposure_time;
+  uint32_t iso_speed;
+} snapshotData_info_t;
+
+
+typedef enum {
+  CAMERA_HFR_MODE_OFF = 1,
+  CAMERA_HFR_MODE_60FPS,
+  CAMERA_HFR_MODE_90FPS,
+  CAMERA_HFR_MODE_120FPS,
+  CAMERA_HFR_MODE_150FPS,
+} camera_hfr_mode_t;
+
+/* frame Q*/
+struct fifo_node
+{
+  struct fifo_node *next;
+  void *f;
+};
+
+struct fifo_queue
+{
+  int num_of_frames;
+  struct fifo_node *front;
+  struct fifo_node *back;
+  pthread_mutex_t mut;
+  pthread_cond_t wait;
+  char* name;
+};
+
+typedef struct {
+  uint32_t buf_len;
+  uint8_t num;
+  uint8_t pmem_type;
+  uint32_t vaddr[8];
+} mm_camera_histo_mem_info_t;
+
+typedef enum {
+  MM_CAMERA_CTRL_EVT_ZOOM_DONE,
+  MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE,
+  MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT,
+  MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE,
+  MM_CAMERA_CTRL_EVT_WDN_DONE, // wavelet denoise done
+  MM_CAMERA_CTRL_EVT_HDR_DONE,
+  MM_CAMERA_CTRL_EVT_ERROR,
+  MM_CAMERA_CTRL_EVT_MAX
+}mm_camera_ctrl_event_type_t;
+
+typedef struct {
+  mm_camera_ctrl_event_type_t evt;
+  cam_ctrl_status_t status;
+  unsigned long cookie;
+} mm_camera_ctrl_event_t;
+
+typedef enum {
+  MM_CAMERA_CH_EVT_STREAMING_ON,
+  MM_CAMERA_CH_EVT_STREAMING_OFF,
+  MM_CAMERA_CH_EVT_STREAMING_ERR,
+  MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE,
+  MM_CAMERA_CH_EVT_DATA_REQUEST_MORE,
+  MM_CAMERA_CH_EVT_MAX
+}mm_camera_ch_event_type_t;
+
+typedef struct {
+  uint32_t ch;
+  mm_camera_ch_event_type_t evt;
+} mm_camera_ch_event_t;
+
+typedef struct {
+  uint32_t index;
+  /* TBD: need more fields for histo stats? */
+} mm_camera_stats_histo_t;
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_stats_histo_t    stats_histo;
+  } e;
+} mm_camera_stats_event_t;
+
+typedef enum {
+  FD_ROI_TYPE_HEADER,
+  FD_ROI_TYPE_DATA
+} fd_roi_type_t;
+
+typedef struct {
+  int fd_mode;
+  int num_fd;
+} fd_set_parm_t;
+
+typedef struct {
+  uint32_t frame_id;
+  int16_t num_face_detected;
+} fd_roi_header_type;
+
+struct fd_rect_t {
+  uint16_t x;
+  uint16_t y;
+  uint16_t dx;
+  uint16_t dy;
+};
+
+typedef struct {
+  struct fd_rect_t face_boundary;
+  uint16_t left_eye_center[2];
+  uint16_t right_eye_center[2];
+  uint16_t mouth_center[2];
+  uint8_t smile_degree;  //0 -100
+  uint8_t smile_confidence;  //
+  uint8_t blink_detected;  // 0 or 1
+  uint8_t is_face_recognised;  // 0 or 1
+  int8_t gaze_angle;  // -90 -45 0 45 90 for head left to rigth tilt
+  int8_t updown_dir;  // -90 to 90
+  int8_t leftright_dir;  //-90 to 90
+  int8_t roll_dir;  // -90 to 90
+  int8_t left_right_gaze;  // -50 to 50
+  int8_t top_bottom_gaze;  // -50 to 50
+  uint8_t left_blink;  // 0 - 100
+  uint8_t right_blink;  // 0 - 100
+  int8_t id;  // unique id for face tracking within view unless view changes
+  int8_t score;  // score of confidence( 0 -100)
+} fd_face_type;
+
+typedef struct {
+  uint32_t frame_id;
+  uint8_t idx;
+  fd_face_type face;
+} fd_roi_data_type;
+
+struct fd_roi_t {
+  fd_roi_type_t type;
+  union {
+    fd_roi_header_type hdr;
+    fd_roi_data_type data;
+  } d;
+};
+
+typedef struct  {
+  uint32_t event_id;
+  union {
+    mm_camera_histo_mem_info_t histo_mem_info;
+    struct fd_roi_t roi;
+  } e;
+} mm_camera_info_event_t;
+
+typedef struct  {
+  uint32_t trans_id;   /* transaction id */
+  uint32_t evt_type;   /* event type */
+  int32_t data_length; /* the length of valid data */
+  uint8_t evt_data[1]; /* buffer that holds the content of private event, must be flatten */
+} mm_camera_private_event_t;
+
+typedef enum {
+  MM_CAMERA_EVT_TYPE_CH,
+  MM_CAMERA_EVT_TYPE_CTRL,
+  MM_CAMERA_EVT_TYPE_STATS,
+  MM_CAMERA_EVT_TYPE_INFO,
+  MM_CAMERA_EVT_TYPE_PRIVATE_EVT,
+  MM_CAMERA_EVT_TYPE_MAX
+} mm_camera_event_type_t;
+
+typedef struct {
+  mm_camera_event_type_t event_type;
+  union {
+    mm_camera_ch_event_t ch;
+    mm_camera_ctrl_event_t ctrl;
+    mm_camera_stats_event_t stats;
+    mm_camera_info_event_t info;
+    mm_camera_private_event_t pri_evt;
+  } e;
+} mm_camera_event_t;
+
+typedef enum {
+  MM_CAMERA_REPRO_CMD_INVALID,
+  MM_CAMERA_REPRO_CMD_OPEN,
+  MM_CAMERA_REPRO_CMD_CONFIG,
+  MM_CAMERA_REPRO_CMD_ATTACH_DETACH,
+  MM_CAMERA_REPRO_CMD_START_STOP,
+  MM_CAMERA_REPRO_CMD_REPROCESS,
+  MM_CAMERA_REPRO_CMD_CLOSE,
+  MM_CAMERA_REPRO_CMD_MAX
+} mmcam_repro_cmd_type_t;
+
+/* re-process isp type defintion */
+typedef enum {
+  MM_CAMERA_REPRO_ISP_NOT_USED,
+  MM_CAMERA_REPRO_ISP_PIX,
+  MM_CAMERA_REPRO_ISP_CROP_AND_SCALING,
+  MM_CAMERA_REPRO_ISP_COLOR_CONVERSION,
+  MM_CAMERA_REPRO_ISP_DNOISE_AND_SHARPNESS,
+  MM_CAMERA_REPRO_ISP_MAX_NUM
+} mm_camera_repro_isp_type_t;
+
+typedef struct {
+  uint32_t addr_offset;
+  uint32_t length;
+  uint32_t data_offset;
+} mm_camera_repro_plane_t;
+
+typedef struct {
+  uint32_t repro_handle;  /* repo isp handle */
+  uint32_t inst_handle; /* instance handle */
+  int8_t   buf_idx;     /* buffer index    */
+  uint32_t frame_id;    /* frame id        */
+  uint32_t frame_len;   /* frame length    */
+  int8_t   num_planes;
+  mm_camera_repro_plane_t planes[VIDEO_MAX_PLANES];
+  struct timeval timestamp;
+} mm_camera_repro_cmd_reprocess_t;
+
+#define MM_CAMERA_MAX_NUM_REPROCESS_DEST 2
+
+typedef struct {
+  uint8_t  isp_type;      /* in: mm_camera_repro_isp_type_t */
+  uint32_t repro_handle;  /* out */
+} mm_camera_repro_cmd_open_t;
+
+typedef struct {
+  int image_mode;
+  int width;
+  int height;
+  cam_format_t format;
+  uint32_t inst_handle; /* stream handler */
+} mm_camera_repro_config_data_t;
+
+typedef struct {
+  uint32_t repro_handle;
+  int num_dest;
+  mm_camera_repro_config_data_t src;
+  mm_camera_repro_config_data_t dest[MM_CAMERA_MAX_NUM_REPROCESS_DEST];
+} mm_camera_repro_cmd_config_t;
+
+typedef struct {
+  uint32_t repro_handle;   /* repro isp handle */
+  uint32_t inst_handle;    /* instance handle of dest stream */
+  uint8_t  attach_flag;    /* flag: attach(TRUE)/detach(FALSE) */
+} mm_camera_repro_cmd_attach_detach_t;
+
+typedef struct {
+  uint32_t repro_handle;   /* repo isp handle */
+  uint32_t dest_handle;    /* Which destination to start/stop */
+  uint8_t  start_flag;     /* flag: start isp(TRUE)/stop isp(FALSE) */
+} mm_camera_repro_cmd_start_stop_t;
+
+typedef struct {
+  /* mm_camera_repro_cmd_type_t */
+  int cmd;
+  /* Union of the possible payloads for
+   * this reprocess command. */
+  union {
+    /* MM_CAMERA_REPRO_CMD_OPEN */
+    mm_camera_repro_cmd_open_t open;
+    /* MM_CAMERA_REPRO_CMD_CONFIG */
+    mm_camera_repro_cmd_config_t config;
+    /* MM_CAMERA_REPRO_CMD_ATTACH_DETACH */
+    mm_camera_repro_cmd_attach_detach_t attach_detach;
+    /* MM_CAMERA_REPRO_CMD_REPROCESS */
+    mm_camera_repro_cmd_reprocess_t reprocess;
+    /* MM_CAMERA_REPRO_CMD_START_STOP */
+    mm_camera_repro_cmd_start_stop_t start_stop;
+    /* MM_CAMERA_REPRO_CMD_CLOSE */
+    uint32_t repro_handle;
+  } payload;
+} mm_camera_repro_cmd_t;
+
+typedef struct {
+  /*input parameter*/
+  int enable;
+  /*output parameter*/
+  uint32_t mobicat_size;
+}mm_cam_mobicat_info_t;
+
+#define MAX_MOBICAT_SIZE 8092
+
+/*
+  WARNING: Since this data structure is huge,
+  never use it as local variable, otherwise, it so easy to cause
+  stack overflow
+  Always use malloc to allocate heap memory for it
+*/
+typedef struct {
+  int max_len;   //telling the client max sizen of tags, here 10k.
+  int data_len;  //client return real size including null "\0".
+  char tags[MAX_MOBICAT_SIZE];
+} cam_exif_tags_t;
+
+/******************************************************************************
+ * Function: exif_set_tag
+ * Description: Inserts or modifies an Exif tag to the Exif Info object. Typical
+ *              use is to call this function multiple times - to insert all the
+ *              desired Exif Tags individually to the Exif Info object and
+ *              then pass the info object to the Jpeg Encoder object so
+ *              the inserted tags would be emitted as tags in the Exif header.
+ * Input parameters:
+ *   obj       - The Exif Info object where the tag would be inserted to or
+ *               modified from.
+ *   tag_id    - The Exif Tag ID of the tag to be inserted/modified.
+ *   p_entry   - The pointer to the tag entry structure which contains the
+ *               details of tag. The pointer can be set to NULL to un-do
+ *               previous insertion for a certain tag.
+ * Return values:
+ *     JPEGERR_SUCCESS
+ *     JPEGERR_ENULLPTR
+ *     JPEGERR_EFAILED
+ * (See jpegerr.h for description of error values.)
+ * Notes: none
+ *****************************************************************************/
+int exif_set_tag(exif_info_obj_t    obj,
+                 exif_tag_id_t      tag_id,
+                 exif_tag_entry_t  *p_entry);
+
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/camera/hdr/include/morpho_api.h b/camera/hdr/include/morpho_api.h
new file mode 100644
index 0000000..565e29f
--- /dev/null
+++ b/camera/hdr/include/morpho_api.h
@@ -0,0 +1,23 @@
+/**

+ * @file     morpho_api.h

+ * @brief    APIŠÖ”’è‹`‚̃}ƒNƒ

+ * @version  1.0.0

+ * @date     Tue Sep 21 17:37:35 2010

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_API_H

+#define MORPHO_API_H

+

+/** 

+ * APIŠÖ”‚ð’è‹`‚·‚é‚Æ‚«‚ÉŽg—p.

+ * Windows‚ÅDLL‚ðì¬‚·‚éÛ“™‚ɏ‘‚«Š·‚¦‚邱‚ƂŐ؂è‘Ö‚¦‰Â”\

+ */

+#if defined(MORPHO_DLL) && defined(_WIN32)

+#define MORPHO_API(type) __declspec(dllexport) extern type

+#else

+#define MORPHO_API(type) extern type

+#endif

+

+#endif /* #ifndef MORPHO_API_H */

diff --git a/camera/hdr/include/morpho_easy_hdr.h b/camera/hdr/include/morpho_easy_hdr.h
new file mode 100644
index 0000000..a941762
--- /dev/null
+++ b/camera/hdr/include/morpho_easy_hdr.h
@@ -0,0 +1,769 @@
+/*******************************************************************

+ * morpho_easy_hdr.h

+ * [CP932/CRLF] { ‚  •„†‰»•ûŽ®Ž©“®”»’è—p }

+ *------------------------------------------------------------------

+ * Copyright (C) 2010-2012 Morpho,Inc.

+ *******************************************************************/

+

+#ifndef MORPHO_EASY_HDR_H

+#define MORPHO_EASY_HDR_H

+

+/*******************************************************************/

+

+#include "morpho_api.h"

+#include "morpho_error.h"

+#include "morpho_image_data.h"

+#include "morpho_rect_int.h"

+

+/*******************************************************************/

+

+#define MORPHO_EASY_HDR_VER "Morpho EasyHDR Ver.2.0.1 2012/07/18"

+

+/*-----------------------------------------------------------------*/

+

+/* (input-limitaion) */

+

+#define MORPHO_EASY_HDR_MIN_IMAGE_WIDTH    100

+#define MORPHO_EASY_HDR_MAX_IMAGE_WIDTH   8192

+#define MORPHO_EASY_HDR_MIN_IMAGE_HEIGHT   100

+#define MORPHO_EASY_HDR_MAX_IMAGE_HEIGHT  8192

+#define MORPHO_EASY_HDR_MIN_NIMAGES   2

+#define MORPHO_EASY_HDR_MAX_NIMAGES  10

+

+/*-----------------------------------------------------------------*/

+

+/* (parameter) */

+

+#define MORPHO_EASY_HDR_DISABLED 0

+#define MORPHO_EASY_HDR_ENABLED  1

+

+#define MORPHO_EASY_HDR_IMAGE_ALIGNMENT_DEFAULT  MORPHO_EASY_HDR_ENABLED

+

+#define MORPHO_EASY_HDR_GHOST_REMOVAL_DEFAULT  MORPHO_EASY_HDR_ENABLED

+

+#define MORPHO_EASY_HDR_AUTO_SCALING_DEFAULT  MORPHO_EASY_HDR_ENABLED

+

+#define MORPHO_EASY_HDR_FACE_DETECTION_DEFAULT  MORPHO_EASY_HDR_ENABLED

+

+#define MORPHO_EASY_HDR_FAIL_SOFT_MERGING_DEFAULT  MORPHO_EASY_HDR_ENABLED

+

+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_MIN      0

+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_MAX     10

+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_DEFAULT  7

+

+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_MIN      0

+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_MAX     10

+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_DEFAULT  6

+

+#define MORPHO_EASY_HDR_MERGE_PARAM_MIN        0

+#define MORPHO_EASY_HDR_MERGE_PARAM_MAX      255

+#define MORPHO_EASY_HDR_MERGE_PARAM1_DEFAULT   0

+#define MORPHO_EASY_HDR_MERGE_PARAM2_DEFAULT 128

+#define MORPHO_EASY_HDR_MERGE_PARAM3_DEFAULT   0

+#define MORPHO_EASY_HDR_MERGE_PARAM4_DEFAULT 255

+

+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_MIN       0

+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_MAX     100

+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_DEFAULT  80

+

+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_MIN       0

+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_MAX     100

+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_DEFAULT  90

+

+#define MORPHO_EASY_HDR_CC_OFFSET_MIN          0

+#define MORPHO_EASY_HDR_CC_OFFSET_MAX        255

+#define MORPHO_EASY_HDR_CC_Y_OFFSET_DEFAULT    0

+#define MORPHO_EASY_HDR_CC_C_OFFSET_DEFAULT    0

+

+#define MORPHO_EASY_HDR_CC_GAIN_MIN        100

+#define MORPHO_EASY_HDR_CC_GAIN_MAX       2000

+#define MORPHO_EASY_HDR_CC_Y_GAIN_DEFAULT 1000

+#define MORPHO_EASY_HDR_CC_C_GAIN_DEFAULT 1000

+

+#define MORPHO_EASY_HDR_CC_GAMMA_MIN        100

+#define MORPHO_EASY_HDR_CC_GAMMA_MAX       2000

+#define MORPHO_EASY_HDR_CC_Y_GAMMA_DEFAULT 1000

+#define MORPHO_EASY_HDR_CC_C_GAMMA_DEFAULT 1000

+

+/*-----------------------------------------------------------------*/

+

+/* (merge-status) */

+

+#define MORPHO_EASY_HDR_OK                             0x00000000

+#define MORPHO_EASY_HDR_ERROR_IMAGE_ALIGNMENT_FAILURE  0x00000001

+#define MORPHO_EASY_HDR_ERROR_EXP_ESTIMATION_FAILURE   0x00000002

+#define MORPHO_EASY_HDR_ERROR_MOSTLY_GHOST             0x00000004

+#define MORPHO_EASY_HDR_ERROR_INTERNAL                 0x80000000

+

+/*******************************************************************/

+

+typedef struct _morpho_EasyHDR morpho_EasyHDR;

+typedef struct _morpho_EasyHDR_Callback morpho_EasyHDR_Callback;

+

+/*-----------------------------------------------------------------*/

+

+/** EasyHDR */

+struct _morpho_EasyHDR

+{

+    void *p; /**< “à•”\‘¢‘̂ւ̃|ƒCƒ“ƒ^ */

+};

+

+/** EasyHDR Callback (for multi-thread processing) */

+struct _morpho_EasyHDR_Callback

+{

+    void *p; /**< ƒR[ƒ‹ƒoƒbƒNŠÖ”‚Ì‘æˆêˆø”‚Æ‚µ‚Ä“n‚³‚ê‚é’l */

+

+    void * (* thread_create )(void *p, int index, void *(*start_routine)(void *arg), void *arg);

+    int    (* thread_destroy)(void *p, void *thread);

+    int    (* thread_join   )(void *p, void *thread, void **value_ptr);

+

+    void * (* mutex_create )(void *p);

+    int    (* mutex_destroy)(void *p, void *mutex);

+    int    (* mutex_lock   )(void *p, void *mutex);

+    int    (* mutex_trylock)(void *p, void *mutex);

+    int    (* mutex_unlock )(void *p, void *mutex);

+

+    void * (* cond_create   )(void *p);

+    int    (* cond_destroy  )(void *p, void *cond);

+    int    (* cond_wait     )(void *p, void *cond, void *lock);

+    int    (* cond_signal   )(void *p, void *cond);

+    int    (* cond_broadcast)(void *p, void *cond);

+};

+

+/*******************************************************************/

+

+#ifdef __cplusplus

+extern "C"

+{

+#endif

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ƒo[ƒWƒ‡ƒ“•¶Žš—ñ‚ðŽæ“¾

+ *

+ * @return ƒo[ƒWƒ‡ƒ“•¶Žš—ñ(MORPHO_EASY_HDR_VER)

+ */

+MORPHO_API(char const *)

+morpho_EasyHDR_getVersion(void);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * •K—v‚ȃƒ‚ƒŠƒTƒCƒY‚ðŽæ“¾

+ *

+ * @param[in]  max_width   “ü—͉摜‚̍ő啝

+ * @param[in]  max_height  “ü—͉摜‚̍ő卂‚³

+ * @param[in]  format      ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ *

+ * @return •K—v‚ȃƒ‚ƒŠƒTƒCƒY(byte)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getBufferSize(

+    int max_width,

+    int max_height,

+    char const *format);

+

+/**

+ * ‰Šú‰»

+ *

+ * Žg—pƒXƒŒƒbƒh”‚É0ˆÈ‰º‚Ì’l‚ðÝ’肵‚½ê‡A

+ * •ªŠ„ŽÀs‚ðs‚¤B

+ *

+ * Žg—pƒXƒŒƒbƒh”‚É1ˆÈã‚Ì’l‚ðÝ’肵‚½ê‡A

+ * ˆêŠ‡ŽÀs‚ðs‚¤B

+ *

+ * Žg—pƒXƒŒƒbƒh”‚É2ˆÈã‚Ì’l‚ðÝ’肵‚½ê‡A

+ * ƒ}ƒ‹ƒ`ƒXƒŒƒbƒh‚É‚æ‚é•À—ñŽÀs(ˆêŠ‡ŽÀs)‚ðs‚¤B

+ * callback ‚É“KØ‚È’l‚ðÝ’è‚·‚é•K—v‚ ‚èB

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚz

+ *     ?_UNKNOWN ¨ 0_INITIALIZED

+ *

+ * @param[in,out]  p            EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     buffer       EasyHDR‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚ւ̃|ƒCƒ“ƒ^

+ * @param[in]      buffer_size  EasyHDR‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚̃TƒCƒY

+ * @param[in]      nthreads     Žg—pƒXƒŒƒbƒh” (ƒRƒA”)

+ * @param[in]      callback     ƒR[ƒ‹ƒoƒbƒNŠÖ”ŒQ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_initialize(

+    morpho_EasyHDR *p,

+    void *buffer,

+    int buffer_size,

+    int nthreads,

+    morpho_EasyHDR_Callback const *callback);

+

+/**

+ * ƒNƒŠ[ƒ“ƒAƒbƒv

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p  EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_finalize(

+    morpho_EasyHDR *p);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬‚ÌŠJŽnEŽÀs

+ * setImageFormat() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚ (ˆêŠ‡ŽÀsŽž)z

+ *     0_INITIALIZED ¨ (1_PROCESSING) ¨ 0_INITIALIZED (ˆ—Š®—¹)

+ *                                     ¨ 2_SUSPENDED   (suspend()ŒÄ‚яo‚µ)

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚ (•ªŠ„ŽÀsŽž)z

+ *     0_INITIALIZED ¨ 3_PAUSED      (ˆ—’†)

+ *                   ¨ 0_INITIALIZED (ˆ—Š®—¹)

+ *

+ * @param[in,out]  p             EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     output_image  Œ‹‰Ê‰æ‘œ (u1–‡–ځv‚Ì“ü—͉摜‚ðŽw’è‰Â”\)

+ * @param[in,out]  input_images  “ü—͉摜ŒQ (ƒGƒ“ƒWƒ“‚É‚æ‚Á‚ď‘‚«Š·‚¦‚ç‚ê‚é)

+ * @param[in]      nimages       “ü—͉摜‚̐”

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_merge(

+    morpho_EasyHDR *p,

+    morpho_ImageData *output_image,

+    morpho_ImageData *input_images[],

+    int nimages);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬‚ÌŒp‘±ŽÀs

+ *

+ * merge() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * •ªŠ„ŽÀsŽž(initialize() ‚Å nthreads ‚É 0 ‚ðŽw’肵‚½‚Æ‚«)‚Ì‚Ý—LŒø

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚ (•ªŠ„ŽÀsŽž)z

+ *     3_PAUSED ¨ 3_PAUSED      (ˆ—’†)

+ *              ¨ 0_INITIALIZED (ˆ—Š®—¹)

+ *

+ * @param[in,out]  p  EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_process(

+    morpho_EasyHDR *p);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬‚Ì’†’f (•ÊƒRƒ“ƒeƒLƒXƒg‚©‚ç‚̌Ăяo‚µ‚É‚æ‚é)

+ * merge() ŽÀs’†‚ÉŽÀs‰Â”\

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚ (ˆêŠ‡ŽÀsŽž)z

+ *     1_PROCESSING ¨ 2_SUSPENDED

+ *

+ * @param[in,out]  p  EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_suspend(

+    morpho_EasyHDR *p);

+

+/**

+ * ‡¬‚̍ĊJ

+ * suspend() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * yŽÀsó‘Ô‚Ì‘JˆÚ (ˆêŠ‡ŽÀsŽž)z

+ *     2_SUSPENDED ¨ (1_PROCESSING) ¨ 0_INITIALIZED (ˆ—Š®—¹)

+ *                                   ¨ 2_SUSPENDED   (suspend()ŒÄ‚яo‚µ)

+ *

+ * @param[in,out]  p  EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_resume(

+    morpho_EasyHDR *p);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‰æ‘œƒtƒH[ƒ}ƒbƒg‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p       EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      format  ‰æ‘œƒtƒH[ƒ}ƒbƒg‚ð‚ ‚ç‚í‚·•¶Žš—ñ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setImageFormat(

+    morpho_EasyHDR *p,

+    char const *format);

+

+/**

+ * ‰æ‘œƒtƒH[ƒ}ƒbƒg‚̎擾

+ * setImageFormat() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p            EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     buffer       ‰æ‘œƒtƒH[ƒ}ƒbƒg‚ð‚ ‚ç‚í‚·•¶Žš—ñ‚ªŠi”[‚³‚ê‚éƒoƒbƒtƒ@

+ * @param[in]      buffer_size  ƒoƒbƒtƒ@‚̃TƒCƒY(I’[•¶ŽšŠÜ‚Þ)

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getImageFormat(

+    morpho_EasyHDR *p,

+    char *buffer,

+    int buffer_size);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ˆÊ’u‡‚킹(Žè‚Ô‚ê•â³)‚Ì—L–³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * value:

+ *   MOR_EASY_HDR_ENABLED  : ˆÊ’u‡‚킹‚ ‚è

+ *   MOR_EASY_HDR_DISABLED : ˆÊ’u‡‚킹‚È‚µ

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setImageAlignmentStatus(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * ˆÊ’u‡‚킹(Žè‚Ô‚ê•â³)‚Ì—L–³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getImageAlignmentStatus(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ƒS[ƒXƒgœ‹Ž(”íŽÊ‘Ì‚Ô‚ê•â³)‚Ì—L–³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * value:

+ *   MOR_EASY_HDR_ENABLED  : ƒS[ƒXƒgœ‹Ž‚ ‚è

+ *   MOR_EASY_HDR_DISABLED : ƒS[ƒXƒgœ‹Ž‚È‚µ

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setGhostRemovalStatus(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * ƒS[ƒXƒgœ‹Ž(”íŽÊ‘Ì‚Ô‚ê•â³)‚Ì—L–³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getGhostRemovalStatus(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * Ž©“®Šg‘å(ƒNƒŠƒbƒsƒ“ƒO)‚Ì—L–³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * value:

+ *   MOR_EASY_HDR_ENABLED  : Ž©“®Šg‘å‚ ‚è

+ *   MOR_EASY_HDR_DISABLED : Ž©“®Šg‘å‚È‚µ

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setAutoScalingStatus(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * Ž©“®Šg‘å(ƒNƒŠƒbƒsƒ“ƒO)‚Ì—L–³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getAutoScalingStatus(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ŠçŒŸo•â³‚Ì—L–³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * value:

+ *   MOR_EASY_HDR_ENABLED  : ŠçŒŸo•â³‚ ‚è

+ *   MOR_EASY_HDR_DISABLED : ŠçŒŸo•â³‚È‚µ

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setFaceDetectionStatus(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * ŠçŒŸo•â³‚Ì—L–³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getFaceDetectionStatus(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * Fail-soft-merging ‚Ì—L–³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * value:

+ *   MOR_EASY_HDR_ENABLED  : Fail-soft-merging ‚ ‚è

+ *   MOR_EASY_HDR_DISABLED : Fail-soft-merging ‚È‚µ

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setFailSoftMergingStatus(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * Fail-soft-merging ‚Ì—L–³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getFailSoftMergingStatus(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ƒS[ƒXƒg”»’芴“xƒŒƒxƒ‹‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setGhostDetectionSensitivityLevel(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * ƒS[ƒXƒg”»’芴“xƒŒƒxƒ‹‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getGhostDetectionSensitivityLevel(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬‚È‚ß‚ç‚©‚³‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setMergeSmoothnessLevel(

+    morpho_EasyHDR *p,

+    int value);

+

+/**

+ * ‡¬‚È‚ß‚ç‚©‚³‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p      EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getMergeSmoothnessLevel(

+    morpho_EasyHDR *p,

+    int *value);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬ƒpƒ‰ƒ[ƒ^‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p       EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      value1  Ý’è’l

+ * @param[in]      value2  Ý’è’l

+ * @param[in]      value3  Ý’è’l

+ * @param[in]      value4  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setMergeParameters(

+    morpho_EasyHDR *p,

+    int value1,

+    int value2,

+    int value3,

+    int value4);

+

+/**

+ * ‡¬ƒpƒ‰ƒ[ƒ^‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p       EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     value1  Ý’è’lŠi”[æ

+ * @param[out]     value2  Ý’è’lŠi”[æ

+ * @param[out]     value3  Ý’è’lŠi”[æ

+ * @param[out]     value4  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getMergeParameters(

+    morpho_EasyHDR *p,

+    int *value1,

+    int *value2,

+    int *value3,

+    int *value4);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * —LŒø—̈æ臒l‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p     EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      rate  Ý’è’l (’†‰› rate % ‹éŒ`)

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setReliableRectRateThreshold(

+    morpho_EasyHDR *p,

+    int rate);

+

+/**

+ * —LŒø—̈æ臒l‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p     EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     rate  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getReliableRectRateThreshold(

+    morpho_EasyHDR *p,

+    int *rate);

+

+/**

+ * —LŒø—̈æ‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ * (—LŒø‚È’l‚ªƒZƒbƒg‚³‚ê‚é‚Ì‚Í merge() Œã)

+ *

+ * @param[in,out]  p     EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     rect  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getReliableRect(

+    morpho_EasyHDR *p,

+    morpho_RectInt *rect);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ƒS[ƒXƒgŠ„‡è‡’l‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p     EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      rate  Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setGhostRateThreshold(

+    morpho_EasyHDR *p,

+    int rate);

+

+/**

+ * ƒS[ƒXƒgŠ„‡è‡’l‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p     EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     rate  Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getGhostRateThreshold(

+    morpho_EasyHDR *p,

+    int *rate);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * F•â³ƒpƒ‰ƒ[ƒ^‚̐ݒè

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p         EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]      y_offset  Ý’è’l

+ * @param[in]      y_gain    Ý’è’l

+ * @param[in]      y_gamma   Ý’è’l

+ * @param[in]      c_offset  Ý’è’l

+ * @param[in]      c_gain    Ý’è’l

+ * @param[in]      c_gamma   Ý’è’l

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_setColorCorrectionParameters(

+    morpho_EasyHDR *p,

+    int y_offset,

+    int y_gain,

+    int y_gamma,

+    int c_offset,

+    int c_gain,

+    int c_gamma);

+

+/**

+ * F•â³ƒpƒ‰ƒ[ƒ^‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p         EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     y_offset  Ý’è’lŠi”[æ

+ * @param[out]     y_gain    Ý’è’lŠi”[æ

+ * @param[out]     y_gamma   Ý’è’lŠi”[æ

+ * @param[out]     c_offset  Ý’è’lŠi”[æ

+ * @param[out]     c_gain    Ý’è’lŠi”[æ

+ * @param[out]     c_gamma   Ý’è’lŠi”[æ

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getColorCorrectionParameters(

+    morpho_EasyHDR *p,

+    int *y_offset,

+    int *y_gain,

+    int *y_gamma,

+    int *c_offset,

+    int *c_gain,

+    int *c_gamma);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ‡¬ƒXƒe[ƒ^ƒX‚̎擾

+ * initialize() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * ƒXƒe[ƒ^ƒXƒR[ƒh

+ *   MORPHO_EASY_HDR_OK

+ *   MORPHO_EASY_HDR_ERROR_*

+ *

+ * @param[in,out]  p  EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ *

+ * @return ƒXƒe[ƒ^ƒXƒR[ƒh (MORPHO_EASMORPHO_EASY_HDR_ERROR_

+ */

+MORPHO_API(int)

+morpho_EasyHDR_getMergeStatus(

+    morpho_EasyHDR *p);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * ƒTƒ€ƒlƒCƒ‹‚̍쐬 (o—͉摜‚̏k¬)

+ * morpho_EasyHDR_setImageFormat() ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out]  p                EasyHDR ƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]     thumbnail_image  o—͉摜

+ * @param[in]      output_image     “ü—͉摜

+ *

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_EasyHDR_makeThumbnail(

+    morpho_EasyHDR *p,

+    morpho_ImageData *thumbnail_image,

+    morpho_ImageData const *output_image);

+

+/*-----------------------------------------------------------------*/

+

+#ifdef __cplusplus

+} /* extern "C" */

+#endif

+

+/*******************************************************************/

+

+#endif /* !MORPHO_EASY_HDR_H */

+

+/*******************************************************************/

+/* [EOF] */

diff --git a/camera/hdr/include/morpho_easy_hdr_ext.h b/camera/hdr/include/morpho_easy_hdr_ext.h
new file mode 100644
index 0000000..2a4bd7c
--- /dev/null
+++ b/camera/hdr/include/morpho_easy_hdr_ext.h
@@ -0,0 +1,19 @@
+#ifndef MORPHO_EASY_HDR_EXT_H
+#define MORPHO_EASY_HDR_EXT_H
+
+#include "morpho_easy_hdr.h"
+/*
+return == 0 : OK
+return != 0 : NG (Please print the return value to check Error types)
+*/
+MORPHO_API(int)
+LINK_mm_camera_HDR(
+    unsigned char* yuvInput01,
+    unsigned char* yuvInput02,
+    unsigned char* yuvInput03,
+    unsigned char* pHDROutImage,
+    int width,
+    int height,
+    int indoor);
+
+#endif //MORPHO_EASY_HDR_EXT_H
diff --git a/camera/hdr/include/morpho_error.h b/camera/hdr/include/morpho_error.h
new file mode 100644
index 0000000..148216f
--- /dev/null
+++ b/camera/hdr/include/morpho_error.h
@@ -0,0 +1,29 @@
+/**

+ * @file     morpho_error.h

+ * @brief    ƒGƒ‰[ƒR[ƒh‚Ì’è‹`

+ * @version  1.0.0

+ * @date     2008-06-09

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_ERROR_H

+#define MORPHO_ERROR_H

+

+/** ƒGƒ‰[ƒR[ƒh .*/

+#define MORPHO_OK                   (0x00000000)  /**< ¬Œ÷ */

+#define MORPHO_DOPROCESS            (0x00000001)  /**< ˆ—’† */

+#define MORPHO_CANCELED             (0x00000002)  /**< ƒLƒƒƒ“ƒZƒ‹‚³‚ꂽ */

+#define MORPHO_SUSPENDED            (0x00000008)  /**< ’†’f‚³‚ꂽ */

+

+#define MORPHO_ERROR_GENERAL_ERROR  (0x80000000)  /**< ˆê”Ê“I‚ȃGƒ‰[. */

+#define MORPHO_ERROR_PARAM          (0x80000001)  /**< ˆø”‚ª•s³. */

+#define MORPHO_ERROR_STATE          (0x80000002)  /**< “à•”ó‘Ô‚âŠÖ”ŒÄo‡˜‚ª•s³. */

+#define MORPHO_ERROR_MALLOC         (0x80000004)  /**< ƒƒ‚ƒŠƒAƒƒP[ƒVƒ‡ƒ“ƒGƒ‰[. */

+#define MORPHO_ERROR_IO             (0x80000008)  /**< “üo—̓Gƒ‰[. */

+#define MORPHO_ERROR_UNSUPPORTED    (0x80000010)  /**< ‹@”\‚ðƒTƒ|[ƒg‚µ‚Ä‚¢‚È‚¢. */

+#define MORPHO_ERROR_NOTFOUND       (0x80000020)  /**< ŒŸõ‘ΏۂªŒ©‚‚©‚ç‚È‚¢ */

+#define MORPHO_ERROR_INTERNAL       (0x80000040)  /**< “à•”ƒGƒ‰[. */

+#define MORPHO_ERROR_UNKNOWN        (0xC0000000)  /**< ã‹LˆÈŠO‚̃Gƒ‰[. */

+

+#endif /* #ifndef MORPHO_ERROR_H */

diff --git a/camera/hdr/include/morpho_get_image_size.h b/camera/hdr/include/morpho_get_image_size.h
new file mode 100644
index 0000000..b0f538f
--- /dev/null
+++ b/camera/hdr/include/morpho_get_image_size.h
@@ -0,0 +1,89 @@
+/**

+ * @file     morpho_get_image_size.h

+ * @brief    ‰æ‘œ‚É•K—v‚ȃƒ‚ƒŠƒTƒCƒY‚ðŽæ“¾‚·‚éŠÖ”

+ * @version  1.0.0

+ * @date     2008-07-01

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_GET_IMAGE_SIZE_H

+#define MORPHO_GET_IMAGE_SIZE_H

+

+#include "morpho_api.h"

+

+#ifdef __cplusplus

+extern "C" {

+#endif

+

+/**

+ * •‚ƍ‚‚³‚ƃtƒH[ƒ}ƒbƒg–¼‚©‚çA‰æ‘œ‚ðŠi”[‚·‚é‚Ì‚É•K—v‚ȃƒ‚ƒŠƒTƒCƒY‚𓾂é.

+ *

+ * @param width       •

+ * @param height      ‚‚³

+ * @param p_format    ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return            ‰æ‘œ‚É•K—v‚ȃƒ‚ƒŠƒTƒCƒY

+ */

+#define morpho_getImageSize mor_noise_reduction_IF_getImageSize

+

+MORPHO_API(int)

+morpho_getImageSize(int width, int height, const char *p_format);

+

+/**

+ * Y‰æ‘œƒf[ƒ^ƒTƒCƒY‚ðŽæ“¾.

+ *

+ * @param width       •

+ * @param height      ‚‚³

+ * @param p_format    ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return            Y‰æ‘œƒf[ƒ^ƒTƒCƒY

+ */

+#define morpho_getImageSizeY mor_noise_reduction_IF_getImageSizeY

+

+MORPHO_API(int)

+morpho_getImageSizeY(int width, int height, const char *p_format);

+

+/**

+ * U‰æ‘œƒf[ƒ^ƒTƒCƒY‚ðŽæ“¾.

+ *

+ * @param width       •

+ * @param height      ‚‚³

+ * @param p_format    ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return            U‰æ‘œƒf[ƒ^ƒTƒCƒY

+ */

+#define morpho_getImageSizeU mor_noise_reduction_IF_getImageSizeU

+

+MORPHO_API(int)

+morpho_getImageSizeU(int width, int height, const char *p_format);

+

+/**

+ * V‰æ‘œƒf[ƒ^ƒTƒCƒY‚ðŽæ“¾.

+ *

+ * @param width       •

+ * @param height      ‚‚³

+ * @param p_format    ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return            V‰æ‘œƒf[ƒ^ƒTƒCƒY

+ */

+#define morpho_getImageSizeV mor_noise_reduction_IF_getImageSizeV

+

+MORPHO_API(int)

+morpho_getImageSizeV(int width, int height, const char *p_format);

+

+/**

+ * UV‰æ‘œƒf[ƒ^ƒTƒCƒY‚ðŽæ“¾.

+ *

+ * @param width       •

+ * @param height      ‚‚³

+ * @param p_format    ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return            UV‰æ‘œƒf[ƒ^ƒTƒCƒY

+ */

+#define morpho_getImageSizeUV mor_noise_reduction_IF_getImageSizeUV

+

+MORPHO_API(int)

+morpho_getImageSizeUV(int width, int height, const char *p_format);

+

+

+#ifdef __cplusplus

+} /* extern "C" { */

+#endif

+

+#endif /* MORPHO_GET_IMAGE_SIZE_H */

diff --git a/camera/hdr/include/morpho_hdr_checker.h b/camera/hdr/include/morpho_hdr_checker.h
new file mode 100644
index 0000000..662cfc1
--- /dev/null
+++ b/camera/hdr/include/morpho_hdr_checker.h
@@ -0,0 +1,155 @@
+/*******************************************************************

+ * morpho_hdr_checker.h

+ * [CP932/CRLF] { ‚  •„†‰»•ûŽ®Ž©“®”»’è—p }

+ *------------------------------------------------------------------

+ * Copyright (C) 2011-2012 Morpho,Inc.

+ *******************************************************************/

+

+#ifndef MORPHO_HDR_CHECKER_H

+#define MORPHO_HDR_CHECKER_H

+

+/*******************************************************************/

+

+#include "morpho_api.h"

+#include "morpho_error.h"

+#include "morpho_image_data.h"

+

+/*******************************************************************/

+

+#define MORPHO_HDR_CHECKER_VER "Morpho DR Checker Ver.1.1.0 2012/1/17"

+

+/*-----------------------------------------------------------------*/

+

+#define MORPHO_HDR_CHECKER_MIN_IMAGE_WIDTH     2

+#define MORPHO_HDR_CHECKER_MAX_IMAGE_WIDTH  8192

+#define MORPHO_HDR_CHECKER_MIN_IMAGE_HEIGHT    2

+#define MORPHO_HDR_CHECKER_MAX_IMAGE_HEIGHT 8192

+

+/*******************************************************************/

+

+typedef struct _morpho_HDRChecker morpho_HDRChecker;

+

+/* HDRŽw•W•]‰¿Ší */

+struct _morpho_HDRChecker

+{

+    void *p; /**< “à•”\‘¢‘̂ւ̃|ƒCƒ“ƒ^ */

+};

+

+/* ”’”ò‚сE•‚‚Ԃꔻ’è‚Ì•qŠ´“x */

+typedef enum {

+    MORPHO_HDR_CHECKER_SENSITIVITY_SENSITIVE,

+    MORPHO_HDR_CHECKER_SENSITIVITY_NORMAL,

+    MORPHO_HDR_CHECKER_SENSITIVITY_INSENSITIVE,

+} MORPHO_HDR_CHECKER_SENSITIVITY;

+

+/*******************************************************************/

+

+#ifdef __cplusplus

+extern "C" {

+#endif

+

+/**

+ * ƒo[ƒWƒ‡ƒ“•¶Žš—ñ‚ðŽæ“¾

+ *

+ * @return ƒo[ƒWƒ‡ƒ“•¶Žš—ñ(MORPHO_EASY_HDR_VER)

+ */

+MORPHO_API(const char*)

+morpho_HDRChecker_getVersion(void);

+

+/**

+ * •K—v‚ȃƒ‚ƒŠƒTƒCƒY‚ðŽæ“¾

+ *

+ * @param[in] width  “ü—͉摜‚Ì•

+ * @param[in] height “ü—͉摜‚̍‚‚³

+ * @param[in] format “ü—͉摜‚̃tƒH[ƒ}ƒbƒg

+ * @return •K—v‚ȃƒ‚ƒŠƒTƒCƒY(byte)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_getBufferSize(

+    int width,

+    int height,

+    const char *format);

+

+/**

+ * ‰Šú‰»

+ *

+ * @param[in,out] p           HDRCheckerƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]     buffer      HDRChecker‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚ւ̃|ƒCƒ“ƒ^

+ * @param[in]     buffer_size HDRChecker‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚̃TƒCƒY

+ * @param[in]     width       “ü—͉摜‚Ì•

+ * @param[in]     height      “ü—͉摜‚̍‚‚³

+ * @param[in]     format      “ü—͉摜‚̃tƒH[ƒ}ƒbƒg

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_initialize(

+    morpho_HDRChecker * const p,

+    void * const buffer,

+    const int buffer_size,

+    const int width,

+    const int height,

+    const char *format);

+

+/**

+ * ƒNƒŠ[ƒ“ƒAƒbƒv

+ * initialize()ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out] p HDRCheckerƒCƒ“ƒXƒ^ƒ“ƒX

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_finalize(

+    morpho_HDRChecker *p);

+

+/*-----------------------------------------------------------------*/

+

+/**

+ * HDRŽw•WŒvŽZ‚Ì•qŠ´«‚̐ݒè

+ * initialize()ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out] p           HDRCheckerƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[in]     sensitivity •qŠ´«(MORPHO_HDR_CHECKER_SENSITIVIY—ñ‹“‘Ì‚ÅŽw’è)

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_setSensitivity(

+    morpho_HDRChecker * const p,

+    MORPHO_HDR_CHECKER_SENSITIVITY sensitivity);

+

+/**

+ * HDRŽw•WŒvŽZ‚Ì•qŠ´«‚̎擾

+ * initialize()ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out] p           HDRCheckerƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]    sensitivity •qŠ´«‚ւ̃|ƒCƒ“ƒ^

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_getSensitivity(

+    morpho_HDRChecker * const p,

+    MORPHO_HDR_CHECKER_SENSITIVITY *sensitivity);

+

+/**

+ * HDRŽw•W‚Ì•]‰¿

+ * initialize()ŽÀsŒã‚ÉŽÀs‰Â”\

+ *

+ * @param[in,out] p      HDRCheckerƒCƒ“ƒXƒ^ƒ“ƒX

+ * @param[out]    result •]‰¿Œ‹‰Ê‚ðŠi”[‚·‚é”z—ñ(—v‘f”4‚Ì”z—ñ)

+ *                       —v‘f‚ª”ñƒ[ƒ‚̏ꍇ‚ɑΉž‚·‚鉺‹L‚̘Io‚̉摜‚ª•K—v‚Æ”»’è

+ *                       {+2, +1, -1, -2}‚̏‡‚É”»’茋‰Ê‚ªŠi”[‚³‚ê‚é

+ * @param[in]     input_image “ü—͉摜

+ * @return ƒGƒ‰[ƒR[ƒh (see morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_HDRChecker_evaluate(

+    morpho_HDRChecker * const p,

+    int * const result,

+    const morpho_ImageData * const input_image);

+

+/*-----------------------------------------------------------------*/

+

+#ifdef __cplusplus

+}

+#endif

+

+#endif /* MORPHO_HDR_CHECKER_H */

diff --git a/camera/hdr/include/morpho_image_data.h b/camera/hdr/include/morpho_image_data.h
new file mode 100644
index 0000000..2fc05aa
--- /dev/null
+++ b/camera/hdr/include/morpho_image_data.h
@@ -0,0 +1,43 @@
+/**

+ * @file     morpho_image_data.h

+ * @brief    ‰æ‘œƒf[ƒ^‚̍\‘¢‘Ì’è‹`

+ * @version  1.0.0

+ * @date     2008-06-09

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_IMAGE_DATA_H

+#define MORPHO_IMAGE_DATA_H

+

+#ifdef __cplusplus

+extern "C" {

+#endif

+

+typedef struct{

+    void * y;               /**< Y‰æ‘œ‚̐擪ƒ|ƒCƒ“ƒ^ */

+    void * u;               /**< U‰æ‘œ‚̐擪ƒ|ƒCƒ“ƒ^ */

+    void * v;               /**< V‰æ‘œ‚̐擪ƒ|ƒCƒ“ƒ^ */

+} morpho_ImageYuvPlanar;

+

+typedef struct{

+    void * y;               /**< Y‰æ‘œ‚̐擪ƒ|ƒCƒ“ƒ^ */

+    void * uv;              /**< UV‰æ‘œ‚̐擪ƒ|ƒCƒ“ƒ^ */

+} morpho_ImageYuvSemiPlanar;

+

+/** ‰æ‘œƒf[ƒ^. */

+typedef struct {

+    int width;              /**< • */

+    int height;             /**< ‚‚³ */

+    union{

+        void * p;           /**< ‰æ‘œƒf[ƒ^‚̐擪ƒ|ƒCƒ“ƒ^ */

+        morpho_ImageYuvPlanar planar;

+        morpho_ImageYuvSemiPlanar semi_planar;

+    } dat;

+} morpho_ImageData;

+

+#ifdef __cplusplus

+}

+#endif

+

+#endif /* #ifndef MORPHO_IMAGE_DATA_H */

diff --git a/camera/hdr/include/morpho_image_data_ex.h b/camera/hdr/include/morpho_image_data_ex.h
new file mode 100644
index 0000000..ef33f35
--- /dev/null
+++ b/camera/hdr/include/morpho_image_data_ex.h
@@ -0,0 +1,51 @@
+/**
+ * @file     morpho_image_data_ex.h
+ * @brief    画像データの構造体定義
+ * @version  1.0.0
+ * @date     2010-03-30
+ *
+ * Copyright (C) 2010-2011 Morpho, Inc.
+ */
+
+#ifndef MORPHO_IMAGE_DATA_EX_H
+#define MORPHO_IMAGE_DATA_EX_H
+
+#include "morpho_image_data.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct{
+    int y;
+    int u;
+    int v;
+} morpho_ImageYuvPlanarPitch;
+
+typedef struct{
+    int y;
+    int uv;
+} morpho_ImageYuvSemiPlanarPitch;
+
+/** 画像データ. */
+typedef struct {
+    int width;              /**< 幅 */
+    int height;             /**< 高さ */
+    union{
+        void *p;            /**< 画像データの先頭ポインタ */
+        morpho_ImageYuvPlanar planar;
+        morpho_ImageYuvSemiPlanar semi_planar;
+    } dat;
+    union{
+        int p;              /**< ラインの先頭から次のライン先頭までのバイト数 */
+        morpho_ImageYuvPlanarPitch planar;
+        morpho_ImageYuvSemiPlanarPitch semi_planar;
+    } pitch;
+} morpho_ImageDataEx;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* #ifndef MORPHO_IMAGE_DATA_EX_H */
diff --git a/camera/hdr/include/morpho_motion_data.h b/camera/hdr/include/morpho_motion_data.h
new file mode 100644
index 0000000..4db78db
--- /dev/null
+++ b/camera/hdr/include/morpho_motion_data.h
@@ -0,0 +1,27 @@
+/**

+ * @file     morpho_motion_data.h

+ * @brief    “®‚«ƒf[ƒ^‚̍\‘¢‘Ì’è‹`

+ * @version  1.0.0

+ * @date     2008-06-09

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_MOTION_DATA_H

+#define MORPHO_MOTION_DATA_H

+

+#ifdef __cplusplus

+extern "C" {

+#endif

+

+/** “®‚«ƒf[ƒ^. */

+typedef struct {

+    int v[6];  /**< “®‚«ƒf[ƒ^ */

+    int fcode; /**< ¬Œ÷:0 / Ž¸”s:0ˆÈŠOiŽ¸”s‚µ‚½Œ´ˆöj */

+} morpho_MotionData;

+

+#ifdef __cplusplus

+}

+#endif

+

+#endif /* #ifndef MORPHO_MOTION_DATA_H */

diff --git a/camera/hdr/include/morpho_noise_reduction.h b/camera/hdr/include/morpho_noise_reduction.h
new file mode 100644
index 0000000..886c2c1
--- /dev/null
+++ b/camera/hdr/include/morpho_noise_reduction.h
@@ -0,0 +1,204 @@
+//====================================================================

+// morpho_noise_reduction.h

+// [SJIS/CRLF] { ‚  •„†‰»•ûŽ®Ž©“®”»’è—p }

+//

+// Copyright(c) 2006-2012 Morpho,Inc.

+//====================================================================

+

+#ifndef MORPHO_NOISE_REDUCTION_H

+# define MORPHO_NOISE_REDUCTION_H

+

+//--------------------------------------------------------------------

+

+# include "morpho_api.h"

+# include "morpho_error.h"

+# include "morpho_image_data.h"

+# include "morpho_motion_data.h"

+# include "morpho_rect_int.h"

+

+//--------------------------------------------------------------------

+

+# ifdef __cplusplus

+extern "C" {

+# endif

+

+//====================================================================

+

+/** ƒo[ƒWƒ‡ƒ“•¶Žš—ñ */

+# define MORPHO_NOISE_REDUCTION_VERSION "Morpho Noise Reduction Ver.0.9.0 2012/08/09"

+

+//--------------------------------------------------------------------

+/** ƒmƒCƒYœ‹ŽŠí */

+typedef struct

+{

+    void *p; /**< “à•”\‘¢‘̂ւ̃|ƒCƒ“ƒ^ */

+} morpho_NoiseReduction;

+

+//--------------------------------------------------------------------

+

+/**

+ * ƒo[ƒWƒ‡ƒ“•¶Žš—ñ‚ðŽæ“¾

+ *

+ * @return ƒo[ƒWƒ‡ƒ“•¶Žš—ñ(MORPHO_IMAGE_STABILIZER_VERSION)

+ */

+MORPHO_API(const char *)

+morpho_NoiseReduction_getVersion(void);

+

+/**

+ * ƒmƒCƒYœ‹Žˆ—‚É•K—v‚ȃƒ‚ƒŠƒTƒCƒY‚ðŽæ“¾

+ * Žw’è‚Å‚«‚éƒtƒH[ƒ}ƒbƒg‚ÍTRM‚ðŽQÆB

+ *

+ * @param[in] width  “ü—͉摜‚Ì•

+ * @param[in] height “ü—͉摜‚̍‚‚³

+ * @param[in] format ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return •K—v‚ȃƒ‚ƒŠƒTƒCƒY(byte)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_getBufferSize(

+    int width,

+    int height,

+    const char *format);

+

+/**

+ * ƒmƒCƒYœ‹ŽŠí‚̏‰Šú‰»

+ *

+ * @param[out] reducer  ƒmƒCƒYœ‹ŽŠí

+ * @param[out] buffer      ƒmƒCƒYœ‹ŽŠí‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚ւ̃|ƒCƒ“ƒ^

+ * @param[in]  buffer_size ƒmƒCƒYœ‹ŽŠí‚ÉŠ„‚è“–‚Ă郁ƒ‚ƒŠ‚̃TƒCƒY.

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_initialize(

+    morpho_NoiseReduction *reducer,

+    void *buffer,

+    int buffer_size);

+

+/**

+ * ƒmƒCƒYœ‹ŽŠí‚̃NƒŠ[ƒ“ƒAƒbƒv

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_finalize(

+    morpho_NoiseReduction *reducer);

+

+/**

+ * ƒmƒCƒYœ‹Žˆ—: ˆ—ŠJŽn

+ * o—͉摜(output_image)‚Í1–‡–Ú‚Ì“ü—͉摜‚Æ“¯‚¶‚Å‚à—Ç‚¢

+ *

+ * @param[in,out] reducer    ƒmƒCƒYœ‹ŽŠí

+ * @param[out]    output_image  o—͉摜

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_start(

+    morpho_NoiseReduction *reducer,

+    morpho_ImageData *output_image);

+

+/**

+ * ƒmƒCƒYœ‹Žˆ—: ƒmƒCƒYœ‹Ž

+ *

+ * @param[in,out] reducer   ƒmƒCƒYœ‹ŽŠí

+ * @param[out]    input_image  o—͉摜

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_reduceNoise(

+    morpho_NoiseReduction *reducer,

+    morpho_ImageData *input_image);

+

+/**

+ * ‰æ‘œƒtƒH[ƒ}ƒbƒg‚ðŽæ“¾

+ * initialize()ŽÀsŒã‚Ɏ擾‰Â”\

+ * ƒoƒbƒtƒ@ƒTƒCƒY‚Í32ˆÈã‚Æ‚·‚邱‚Æ

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[out] format ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ‚ªŠi”[‚³‚ê‚é

+ * @param[in] buffer_size ƒoƒbƒtƒ@ƒTƒCƒY

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_getImageFormat(

+    morpho_NoiseReduction *reducer,

+    char *format,

+    const int buffer_size);

+

+/**

+ * ‹P“xƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ðŽæ“¾

+ * initialize()ŽÀsŒã‚Ɏ擾‰Â”\

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[out] level ‹P“xƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ªŠi”[‚³‚ê‚é

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_getLumaNoiseReductionLevel(

+    morpho_NoiseReduction *reducer,

+    int *level);

+

+/**

+ * ƒNƒƒ}ƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ðŽæ“¾

+ * initialize()ŽÀsŒã‚Ɏ擾‰Â”\

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[out] level ƒNƒƒ}ƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ªŠi”[‚³‚ê‚é

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_getChromaNoiseReductionLevel(

+    morpho_NoiseReduction *reducer,

+    int *level);

+

+/**

+ * ‰æ‘œƒtƒH[ƒ}ƒbƒg‚ðÝ’è

+ * initialize()ŽÀsŒã‚©‚Âstart()ŽÀs‘O‚ɐݒè‰Â”\

+ * Žw’è‚Å‚«‚éƒtƒH[ƒ}ƒbƒg‚ÍTRM‚ðŽQÆB

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[in] format ‰æ‘œƒtƒH[ƒ}ƒbƒg•¶Žš—ñ

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_setImageFormat(

+    morpho_NoiseReduction *reducer,

+    const char *format);

+

+/**

+ * ‹P“xƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ðÝ’è

+ * initialize()ŽÀsŒã‚©‚Âstart()ŽÀs‘O‚ɐݒè‰Â”\

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[in] level ‹P“xƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹(0-7)

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_setLumaNoiseReductionLevel(

+    morpho_NoiseReduction *reducer,

+    int level);

+

+/**

+ * ƒNƒƒ}ƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹‚ðÝ’è

+ * initialize()ŽÀsŒã‚©‚Âstart()ŽÀs‘O‚ɐݒè‰Â”\

+ *

+ * @param[in,out] reducer ƒmƒCƒYœ‹ŽŠí

+ * @param[in] level ƒNƒƒ}ƒmƒCƒYœ‹Ž‹­“xƒŒƒxƒ‹(0-7)

+ * @return ƒGƒ‰[ƒR[ƒh(morpho_error.h)

+ */

+MORPHO_API(int)

+morpho_NoiseReduction_setChromaNoiseReductionLevel(

+    morpho_NoiseReduction *reducer,

+    int level);

+

+//====================================================================

+

+# ifdef __cplusplus

+} // extern "C"

+# endif

+

+//--------------------------------------------------------------------

+

+#endif // !MORPHO_IMAGE_STABILIZER3_H

+

+//====================================================================

+// [EOF]

diff --git a/camera/hdr/include/morpho_noise_reduction_ext.h b/camera/hdr/include/morpho_noise_reduction_ext.h
new file mode 100644
index 0000000..4f15934
--- /dev/null
+++ b/camera/hdr/include/morpho_noise_reduction_ext.h
@@ -0,0 +1,17 @@
+#ifndef MORPHO_NR_EXT_H
+#define MORPHO_NR_EXT_H
+
+#include "morpho_noise_reduction.h"
+/*
+return == 0 : OK
+return != 0 : NG (Please print the return value to check Error types)
+*/
+MORPHO_API(int)
+LINK_mm_camera_morpho_noise_reduction(
+    unsigned char* yuvImage,
+    int width,
+    int height,
+    int y_level,
+    int c_level);
+
+#endif //MORPHO_NR_EXT_H
diff --git a/camera/hdr/include/morpho_rect_int.h b/camera/hdr/include/morpho_rect_int.h
new file mode 100644
index 0000000..b3534a4
--- /dev/null
+++ b/camera/hdr/include/morpho_rect_int.h
@@ -0,0 +1,37 @@
+/**

+ * @file     morpho_rect_int.h

+ * @brief    ‹éŒ`ƒf[ƒ^‚̍\‘¢‘Ì’è‹`

+ * @version  1.0.0

+ * @date     2008-06-09

+ *

+ * Copyright (C) 2006-2012 Morpho, Inc.

+ */

+

+#ifndef MORPHO_RECT_INT_H

+#define MORPHO_RECT_INT_H

+

+#ifdef __cplusplus

+extern "C" {

+#endif

+

+/** ‹éŒ`ƒf[ƒ^. */

+typedef struct {

+    int sx; /**< left */

+    int sy; /**< top */

+    int ex; /**< right */

+    int ey; /**< bottom */

+} morpho_RectInt;

+

+/** ‹éŒ`—̈æ rect ‚̍¶ãÀ•W (l,t) ‚ƉE‰ºÀ•W (r,b) ‚ðÝ’è‚·‚é. */

+#define morpho_RectInt_setRect(rect,l,t,r,b) do { \

+	(rect)->sx=(l);\

+	(rect)->sy=(t);\

+	(rect)->ex=(r);\

+	(rect)->ey=(b);\

+    } while(0)

+

+#ifdef __cplusplus

+}

+#endif

+

+#endif /* #ifndef MORPHO_RECT_INT_H */

diff --git a/camera/mm-image-codec/Android.mk b/camera/mm-image-codec/Android.mk
new file mode 100644
index 0000000..582ddc9
--- /dev/null
+++ b/camera/mm-image-codec/Android.mk
@@ -0,0 +1,3 @@
+ifeq ($(TARGET_ARCH),arm)
+include $(call all-subdir-makefiles)
+endif
diff --git a/camera/mm-image-codec/qexif/qexif.h b/camera/mm-image-codec/qexif/qexif.h
new file mode 100644
index 0000000..91aedde
--- /dev/null
+++ b/camera/mm-image-codec/qexif/qexif.h
@@ -0,0 +1,1728 @@
+/*Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+
+#ifndef __QEXIF_H__
+#define __QEXIF_H__
+
+#include <stdio.h>
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+    uint32_t  num;    // Numerator
+    uint32_t  denom;  // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+    int32_t  num;    // Numerator
+    int32_t  denom;  // Denominator
+
+} srat_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+    EXIF_BYTE      = 1,
+    EXIF_ASCII     = 2,
+    EXIF_SHORT     = 3,
+    EXIF_LONG      = 4,
+    EXIF_RATIONAL  = 5,
+    EXIF_UNDEFINED = 7,
+    EXIF_SLONG     = 9,
+    EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+/* Exif Tag Entry
+ * Used in exif_set_tag as an input argument and
+ * in exif_get_tag as an output argument. */
+typedef struct
+{
+    /* The Data Type of the Tag *
+     * Rational, etc */
+    exif_tag_type_t type;
+
+    /* Copy
+     * This field is used when a user pass this structure to
+     * be stored in an exif_info_t via the exif_set_tag method.
+     * The routine would look like this field and decide whether
+     * it is necessary to make a copy of the data pointed by this
+     * structure (all string and array types).
+     * If this field is set to false, only a pointer to the actual
+     * data is retained and it is the caller's responsibility to
+     * ensure the validity of the data before the exif_info_t object
+     * is destroyed.
+     */
+    uint8_t copy;
+
+    /* Data count
+     * This indicates the number of elements of the data. For example, if
+     * the type is EXIF_BYTE and the count is 1, that means the actual data
+     * is one byte and is accessible by data._byte. If the type is EXIF_BYTE
+     * and the count is more than one, the actual data is contained in an
+     * array and is accessible by data._bytes. In case of EXIF_ASCII, it
+     * indicates the string length and in case of EXIF_UNDEFINED, it indicates
+     * the length of the array.
+     */
+    uint32_t count;
+
+    /* Data
+     * A union which covers all possible data types. The user should pick
+     * the right field to use depending on the data type and the count.
+     * See in-line comment below.
+     */
+    union
+    {
+        char      *_ascii;      // EXIF_ASCII (count indicates string length)
+        uint8_t   *_bytes;      // EXIF_BYTE  (count > 1)
+        uint8_t    _byte;       // EXIF_BYTE  (count = 1)
+        uint16_t  *_shorts;     // EXIF_SHORT (count > 1)
+        uint16_t   _short;      // EXIF_SHORT (count = 1)
+        uint32_t  *_longs;      // EXIF_LONG  (count > 1)
+        uint32_t   _long;       // EXIF_LONG  (count = 1)
+        rat_t     *_rats;       // EXIF_RATIONAL  (count > 1)
+        rat_t      _rat;        // EXIF_RATIONAL  (count = 1)
+        uint8_t   *_undefined;  // EXIF_UNDEFINED (count indicates length)
+        int32_t   *_slongs;     // EXIF_SLONG (count > 1)
+        int32_t    _slong;      // EXIF_SLONG (count = 1)
+        srat_t    *_srats;      // EXIF_SRATIONAL (count > 1)
+        srat_t     _srat;       // EXIF_SRATIONAL (count = 1)
+
+    } data;
+
+} exif_tag_entry_t;
+
+/* =======================================================================
+**                          Macro Definitions
+** ======================================================================= */
+/* Enum defined to let compiler generate unique offset numbers for different
+ * tags - ordering matters! NOT INTENDED to be used by any application. */
+typedef enum
+{
+    // GPS IFD
+    GPS_VERSION_ID = 0,
+    GPS_LATITUDE_REF,
+    GPS_LATITUDE,
+    GPS_LONGITUDE_REF,
+    GPS_LONGITUDE,
+    GPS_ALTITUDE_REF,
+    GPS_ALTITUDE,
+    GPS_TIMESTAMP,
+    GPS_SATELLITES,
+    GPS_STATUS,
+    GPS_MEASUREMODE,
+    GPS_DOP,
+    GPS_SPEED_REF,
+    GPS_SPEED,
+    GPS_TRACK_REF,
+    GPS_TRACK,
+    GPS_IMGDIRECTION_REF,
+    GPS_IMGDIRECTION,
+    GPS_MAPDATUM,
+    GPS_DESTLATITUDE_REF,
+    GPS_DESTLATITUDE,
+    GPS_DESTLONGITUDE_REF,
+    GPS_DESTLONGITUDE,
+    GPS_DESTBEARING_REF,
+    GPS_DESTBEARING,
+    GPS_DESTDISTANCE_REF,
+    GPS_DESTDISTANCE,
+    GPS_PROCESSINGMETHOD,
+    GPS_AREAINFORMATION,
+    GPS_DATESTAMP,
+    GPS_DIFFERENTIAL,
+
+    // TIFF IFD
+    NEW_SUBFILE_TYPE,
+    SUBFILE_TYPE,
+    IMAGE_WIDTH,
+    IMAGE_LENGTH,
+    BITS_PER_SAMPLE,
+    COMPRESSION,
+    PHOTOMETRIC_INTERPRETATION,
+    THRESH_HOLDING,
+    CELL_WIDTH,
+    CELL_HEIGHT,
+    FILL_ORDER,
+    DOCUMENT_NAME,
+    IMAGE_DESCRIPTION,
+    MAKE,
+    MODEL,
+    STRIP_OFFSETS,
+    ORIENTATION,
+    SAMPLES_PER_PIXEL,
+    ROWS_PER_STRIP,
+    STRIP_BYTE_COUNTS,
+    MIN_SAMPLE_VALUE,
+    MAX_SAMPLE_VALUE,
+    X_RESOLUTION,
+    Y_RESOLUTION,
+    PLANAR_CONFIGURATION,
+    PAGE_NAME,
+    X_POSITION,
+    Y_POSITION,
+    FREE_OFFSET,
+    FREE_BYTE_COUNTS,
+    GRAY_RESPONSE_UNIT,
+    GRAY_RESPONSE_CURVE,
+    T4_OPTION,
+    T6_OPTION,
+    RESOLUTION_UNIT,
+    PAGE_NUMBER,
+    TRANSFER_FUNCTION,
+    SOFTWARE,
+    DATE_TIME,
+    ARTIST,
+    HOST_COMPUTER,
+    PREDICTOR,
+    WHITE_POINT,
+    PRIMARY_CHROMATICITIES,
+    COLOR_MAP,
+    HALFTONE_HINTS,
+    TILE_WIDTH,
+    TILE_LENGTH,
+    TILE_OFFSET,
+    TILE_BYTE_COUNTS,
+    INK_SET,
+    INK_NAMES,
+    NUMBER_OF_INKS,
+    DOT_RANGE,
+    TARGET_PRINTER,
+    EXTRA_SAMPLES,
+    SAMPLE_FORMAT,
+    TRANSFER_RANGE,
+    JPEG_PROC,
+    JPEG_INTERCHANGE_FORMAT,
+    JPEG_INTERCHANGE_FORMAT_LENGTH,
+    JPEG_RESTART_INTERVAL,
+    JPEG_LOSSLESS_PREDICTORS,
+    JPEG_POINT_TRANSFORMS,
+    JPEG_Q_TABLES,
+    JPEG_DC_TABLES,
+    JPEG_AC_TABLES,
+    YCBCR_COEFFICIENTS,
+    YCBCR_SUB_SAMPLING,
+    YCBCR_POSITIONING,
+    REFERENCE_BLACK_WHITE,
+    GAMMA,
+    ICC_PROFILE_DESCRIPTOR,
+    SRGB_RENDERING_INTENT,
+    IMAGE_TITLE,
+    COPYRIGHT,
+    EXIF_IFD,
+    ICC_PROFILE,
+    GPS_IFD,
+
+
+    // TIFF IFD (Thumbnail)
+    TN_IMAGE_WIDTH,
+    TN_IMAGE_LENGTH,
+    TN_BITS_PER_SAMPLE,
+    TN_COMPRESSION,
+    TN_PHOTOMETRIC_INTERPRETATION,
+    TN_IMAGE_DESCRIPTION,
+    TN_MAKE,
+    TN_MODEL,
+    TN_STRIP_OFFSETS,
+    TN_ORIENTATION,
+    TN_SAMPLES_PER_PIXEL,
+    TN_ROWS_PER_STRIP,
+    TN_STRIP_BYTE_COUNTS,
+    TN_X_RESOLUTION,
+    TN_Y_RESOLUTION,
+    TN_PLANAR_CONFIGURATION,
+    TN_RESOLUTION_UNIT,
+    TN_TRANSFER_FUNCTION,
+    TN_SOFTWARE,
+    TN_DATE_TIME,
+    TN_ARTIST,
+    TN_WHITE_POINT,
+    TN_PRIMARY_CHROMATICITIES,
+    TN_JPEGINTERCHANGE_FORMAT,
+    TN_JPEGINTERCHANGE_FORMAT_L,
+    TN_YCBCR_COEFFICIENTS,
+    TN_YCBCR_SUB_SAMPLING,
+    TN_YCBCR_POSITIONING,
+    TN_REFERENCE_BLACK_WHITE,
+    TN_COPYRIGHT,
+
+    // EXIF IFD
+    EXPOSURE_TIME,
+    F_NUMBER,
+    EXPOSURE_PROGRAM,
+    SPECTRAL_SENSITIVITY,
+    ISO_SPEED_RATING,
+    OECF,
+    EXIF_VERSION,
+    EXIF_DATE_TIME_ORIGINAL,
+    EXIF_DATE_TIME_DIGITIZED,
+    EXIF_COMPONENTS_CONFIG,
+    EXIF_COMPRESSED_BITS_PER_PIXEL,
+    SHUTTER_SPEED,
+    APERTURE,
+    BRIGHTNESS,
+    EXPOSURE_BIAS_VALUE,
+    MAX_APERTURE,
+    SUBJECT_DISTANCE,
+    METERING_MODE,
+    LIGHT_SOURCE,
+    FLASH,
+    FOCAL_LENGTH,
+    SUBJECT_AREA,
+    EXIF_MAKER_NOTE,
+    EXIF_USER_COMMENT,
+    SUBSEC_TIME,
+    SUBSEC_TIME_ORIGINAL,
+    SUBSEC_TIME_DIGITIZED,
+    EXIF_FLASHPIX_VERSION,
+    EXIF_COLOR_SPACE,
+    EXIF_PIXEL_X_DIMENSION,
+    EXIF_PIXEL_Y_DIMENSION,
+    RELATED_SOUND_FILE,
+    INTEROP,
+    FLASH_ENERGY,
+    SPATIAL_FREQ_RESPONSE,
+    FOCAL_PLANE_X_RESOLUTION,
+    FOCAL_PLANE_Y_RESOLUTION,
+    FOCAL_PLANE_RESOLUTION_UNIT,
+    SUBJECT_LOCATION,
+    EXPOSURE_INDEX,
+    SENSING_METHOD,
+    FILE_SOURCE,
+    SCENE_TYPE,
+    CFA_PATTERN,
+    CUSTOM_RENDERED,
+    EXPOSURE_MODE,
+    WHITE_BALANCE,
+    DIGITAL_ZOOM_RATIO,
+    FOCAL_LENGTH_35MM,
+    SCENE_CAPTURE_TYPE,
+    GAIN_CONTROL,
+    CONTRAST,
+    SATURATION,
+    SHARPNESS,
+    DEVICE_SETTINGS_DESCRIPTION,
+    SUBJECT_DISTANCE_RANGE,
+    IMAGE_UID,
+    PIM,
+
+    EXIF_TAG_MAX_OFFSET
+
+} exif_tag_offset_t;
+
+/* Below are the supported Tags (ID and structure for their data) */
+#define CONSTRUCT_TAGID(offset,ID) (offset << 16 | ID)
+
+// GPS tag version
+// Use EXIFTAGTYPE_GPS_VERSION_ID as the exif_tag_type (EXIF_BYTE)
+// Count should be 4
+#define _ID_GPS_VERSION_ID 0x0000
+#define EXIFTAGID_GPS_VERSION_ID \
+  CONSTRUCT_TAGID(GPS_VERSION_ID, _ID_GPS_VERSION_ID)
+#define EXIFTAGTYPE_GPS_VERSION_ID EXIF_BYTE
+// North or South Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LATITUDE_REF 0x0001
+#define EXIFTAGID_GPS_LATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LATITUDE_REF, _ID_GPS_LATITUDE_REF)
+#define EXIFTAGTYPE_GPS_LATITUDE_REF EXIF_ASCII
+// Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LATITUDE 0x0002
+#define EXIFTAGID_GPS_LATITUDE CONSTRUCT_TAGID(GPS_LATITUDE, _ID_GPS_LATITUDE)
+#define EXIFTAGTYPE_GPS_LATITUDE EXIF_RATIONAL
+// East or West Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LONGITUDE_REF 0x0003
+#define EXIFTAGID_GPS_LONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_LONGITUDE_REF, _ID_GPS_LONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_LONGITUDE_REF EXIF_ASCII
+// Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LONGITUDE 0x0004
+#define EXIFTAGID_GPS_LONGITUDE \
+  CONSTRUCT_TAGID(GPS_LONGITUDE, _ID_GPS_LONGITUDE)
+#define EXIFTAGTYPE_GPS_LONGITUDE EXIF_RATIONAL
+// Altitude reference
+// Use EXIFTAGTYPE_GPS_ALTITUDE_REF as the exif_tag_type (EXIF_BYTE)
+#define _ID_GPS_ALTITUDE_REF 0x0005
+#define EXIFTAGID_GPS_ALTITUDE_REF \
+  CONSTRUCT_TAGID(GPS_ALTITUDE_REF, _ID_GPS_ALTITUDE_REF)
+#define EXIFTAGTYPE_GPS_ALTITUDE_REF EXIF_BYTE
+// Altitude
+// Use EXIFTAGTYPE_GPS_ALTITUDE as the exif_tag_type (EXIF_RATIONAL)
+#define _ID_GPS_ALTITUDE 0x0006
+#define EXIFTAGID_GPS_ALTITUDE CONSTRUCT_TAGID(GPS_ALTITUDE, _ID_GPS_ALTITUDE)
+#define EXIFTAGTYPE_GPS_ALTITUE EXIF_RATIONAL
+// GPS time (atomic clock)
+// Use EXIFTAGTYPE_GPS_TIMESTAMP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_TIMESTAMP 0x0007
+#define EXIFTAGID_GPS_TIMESTAMP \
+  CONSTRUCT_TAGID(GPS_TIMESTAMP, _ID_GPS_TIMESTAMP)
+#define EXIFTAGTYPE_GPS_TIMESTAMP EXIF_RATIONAL
+// GPS Satellites
+// Use EXIFTAGTYPE_GPS_SATELLITES as the exif_tag_type (EXIF_ASCII)
+// Count can be anything.
+#define _ID_GPS_SATELLITES 0x0008
+#define EXIFTAGID_GPS_SATELLITES \
+ CONSTRUCT_TAGID(GPS_SATELLITES, _ID_GPS_SATELLITES)
+#define EXIFTAGTYPE_GPS_SATELLITES EXIF_ASCII
+// GPS Status
+// Use EXIFTAGTYPE_GPS_STATUS as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "A" - Measurement in progress
+// "V" - Measurement Interoperability
+// Other - Reserved
+#define _ID_GPS_STATUS 0x0009
+#define EXIFTAGID_GPS_STATUS CONSTRUCT_TAGID(GPS_STATUS, _ID_GPS_STATUS)
+#define EXIFTATTYPE_GPS_STATUS EXIF_ASCII
+// GPS Measure Mode
+// Use EXIFTAGTYPE_GPS_MEASUREMODE as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "2" - 2-dimensional measurement
+// "3" - 3-dimensional measurement
+// Other - Reserved
+#define _ID_GPS_MEASUREMODE 0x000a
+#define EXIFTAGID_GPS_MEASUREMODE \
+  CONSTRUCT_TAGID(GPS_MEASUREMODE, _ID_GPS_MEASUREMODE)
+#define EXIFTAGTYPE_GPS_MEASUREMODE EXIF_ASCII
+// GPS Measurement precision (DOP)
+// Use EXIFTAGTYPE_GPS_DOP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DOP 0x000b
+#define EXIFTAGID_GPS_DOP CONSTRUCT_TAGID(GPS_DOP, _ID_GPS_DOP)
+#define EXIFTAGTYPE_GPS_DOP EXIF_RATIONAL
+// Speed Unit
+// Use EXIFTAGTYPE_GPS_SPEED_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_SPEED_REF 0x000c
+#define EXIFTAGID_GPS_SPEED_REF \
+  CONSTRUCT_TAGID(GPS_SPEED_REF, _ID_GPS_SPEED_REF)
+#define EXIFTAGTYPE_GPS_SPEED_REF EXIF_ASCII
+// Speed of GPS receiver
+// Use EXIFTAGTYPE_GPS_SPEED as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_SPEED 0x000d
+#define EXIFTAGID_GPS_SPEED CONSTRUCT_TAGID(GPS_SPEED, _ID_GPS_SPEED)
+#define EXIFTAGTYPE_GPS_SPEED EXIF_RATIONAL
+// Reference of direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_TRACK_REF 0x000e
+#define EXIFTAGID_GPS_TRACK_REF \
+  CONSTRUCT_TAGID(GPS_TRACK_REF, _ID_GPS_TRACK_REF)
+#define EXIFTAGTYPE_GPS_TRACK_REF EXIF_ASCII
+// Direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_TRACK 0x000f
+#define EXIFTAGID_GPS_TRACK CONSTRUCT_TAGID(GPS_TRACK, _ID_GPS_TRACK)
+#define EXIFTAGTYPE_GPS_TRACK EXIF_RATIONAL
+// Reference of direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_IMGDIRECTION_REF 0x0010
+#define EXIFTAGID_GPS_IMGDIRECTION_REF \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION_REF, _ID_GPS_IMGDIRECTION_REF)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION_REF EXIF_ASCII
+// Direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_IMGDIRECTION 0x0011
+#define EXIFTAGID_GPS_IMGDIRECTION \
+  CONSTRUCT_TAGID(GPS_IMGDIRECTION, _ID_GPS_IMGDIRECTION)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION EXIF_RATIONAL
+// Geodetic survey data used
+// Use EXIFTAGTYPE_GPS_MAPDATUM as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_GPS_MAPDATUM 0x0012
+#define EXIFTAGID_GPS_MAPDATUM CONSTRUCT_TAGID(GPS_MAPDATUM, _ID_GPS_MAPDATUM)
+#define EXIFTAGTYPE_GPS_MAPDATUM EXIF_ASCII
+// Reference for latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "N" - North latitude
+// "S" - South latitude
+// Other - Reserved
+#define _ID_GPS_DESTLATITUDE_REF 0x0013
+#define EXIFTAGID_GPS_DESTLATITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE_REF, _ID_GPS_DESTLATITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE_REF EXIF_ASCII
+// Latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLATITUDE 0x0014
+#define EXIFTAGID_GPS_DESTLATITUDE \
+  CONSTRUCT_TAGID(GPS_DESTLATITUDE, _ID_GPS_DESTLATITUDE)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE EXIF_RATIONAL
+// Reference for longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "E" - East longitude
+// "W" - West longitude
+// Other - Reserved
+#define _ID_GPS_DESTLONGITUDE_REF 0x0015
+#define EXIFTAGID_GPS_DESTLONGITUDE_REF \
+  CONSTRUCT_TAGID(GPS_DESTLONGITUDE_REF, _ID_GPS_DESTLONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE_REF EXIF_ASCII
+// Longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLONGITUDE 0x0016
+#define EXIFTAGID_GPS_DESTLONGITUDE CONSTRUCT_TAGID(GPS_DESTLONGITUDE, _ID_GPS_DESTLONGITUDE)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE EXIF_RATIONAL
+// Reference for bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_DESTBEARING_REF 0x0017
+#define EXIFTAGID_GPS_DESTBEARING_REF \
+  CONSTRUCT_TAGID(GPS_DESTBEARING_REF, _ID_GPS_DESTBEARING_REF)
+#define EXIFTAGTYPE_GPS_DESTBEARING_REF EXIF_ASCII
+// Bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTBEARING 0x0018
+#define EXIFTAGID_GPS_DESTBEARING \
+  CONSTRUCT_TAGID(GPS_DESTBEARING, _ID_GPS_DESTBEARING)
+#define EXIFTAGTYPE_GPS_DESTBEARING EXIF_RATIONAL
+// Reference for distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_DESTDISTANCE_REF 0x0019
+#define EXIFTAGID_GPS_DESTDISTANCE_REF \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE_REF, _ID_GPS_DESTDISTANCE_REF)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE_REF EXIF_ASCII
+// Distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTDISTANCE 0x001a
+#define EXIFTAGID_GPS_DESTDISTANCE \
+  CONSTRUCT_TAGID(GPS_DESTDISTANCE, _ID_GPS_DESTDISTANCE)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE EXIF_RATIONAL
+// Name of GPS processing method
+// Use EXIFTAGTYPE_GPS_PROCESSINGMETHOD as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_PROCESSINGMETHOD 0x001b
+#define EXIFTAGID_GPS_PROCESSINGMETHOD \
+  CONSTRUCT_TAGID(GPS_PROCESSINGMETHOD, _ID_GPS_PROCESSINGMETHOD)
+#define EXIFTAGTYPE_GPS_PROCESSINGMETHOD EXIF_UNDEFINED
+// Name of GPS area
+// Use EXIFTAGTYPE_GPS_AREAINFORMATION as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_AREAINFORMATION 0x001c
+#define EXIFTAGID_GPS_AREAINFORMATION \
+  CONSTRUCT_TAGID(GPS_AREAINFORMATION, _ID_GPS_AREAINFORMATION)
+#define EXIFTAGTYPE_GPS_AREAINFORMATION EXIF_UNDEFINED
+// GPS date
+// Use EXIFTAGTYPE_GPS_DATESTAMP as the exif_tag_type (EXIF_ASCII)
+// It should be 11 characters long including the null-terminating character.
+#define _ID_GPS_DATESTAMP 0x001d
+#define EXIFTAGID_GPS_DATESTAMP \
+  CONSTRUCT_TAGID(GPS_DATESTAMP, _ID_GPS_DATESTAMP)
+#define EXIFTAGTYPE_GPS_DATESTAMP EXIF_ASCII
+// GPS differential correction
+// Use EXIFTAGTYPE_GPS_DIFFERENTIAL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+// 0 - Measurement without differential correction
+// 1 - Differential correction applied
+// Other - Reserved
+#define _ID_GPS_DIFFERENTIAL 0x001e
+#define EXIFTAGID_GPS_DIFFERENTIAL \
+  CONSTRUCT_TAGID(GPS_DIFFERENTIAL, _ID_GPS_DIFFERENTIAL)
+#define EXIFTAGTYPE_GPS_DIFFERENTIAL EXIF_SHORT
+// Image width
+// Use EXIFTAGTYPE_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_IMAGE_WIDTH CONSTRUCT_TAGID(IMAGE_WIDTH, _ID_IMAGE_WIDTH)
+#define EXIFTAGTYPE_IMAGE_WIDTH EXIF_LONG
+// Image height
+// Use EXIFTAGTYPE_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_IMAGE_LENGTH CONSTRUCT_TAGID(IMAGE_LENGTH, _ID_IMAGE_LENGTH)
+#define EXIFTAGTYPE_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component
+// Use EXIFTAGTYPE_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(BITS_PER_SAMPLE, _ID_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme
+// Use EXIFTAGTYPE_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_COMPRESSION 0x0103
+#define EXIFTAGID_COMPRESSION CONSTRUCT_TAGID(COMPRESSION, _ID_COMPRESSION)
+#define EXIFTAGTYPE_COMPRESSION EXIF_SHORT
+// Pixel composition
+// Use EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(PHOTOMETRIC_INTERPRETATION, _ID_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+
+// Thresholding
+// Use EXIFTAGTYPE_THRESH_HOLDING as the exif_tag_type (EXIF_SHORT)
+//
+//1 = No dithering or halftoning
+//2 = Ordered dither or halftone
+//3 = Randomized dither
+#define _ID_THRESH_HOLDING 0x0107
+#define EXIFTAGID_THRESH_HOLDING \
+  CONSTRUCT_TAGID(THRESH_HOLDING, _ID_THRESH_HOLDING)
+#define EXIFTAGTYPE_THRESH_HOLDING EXIF_SHORT
+
+// Cell Width
+// Use EXIFTAGTYPE_CELL_WIDTH as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_CELL_WIDTH 0x0108
+#define EXIFTAGID_CELL_WIDTH CONSTRUCT_TAGID(CELL_WIDTH, _ID_CELL_WIDTH)
+#define EXIFTAGTYPE_CELL_WIDTH EXIF_SHORT
+// Cell Height
+// Use EXIFTAGTYPE_CELL_HEIGHT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CELL_HEIGHT 0x0109
+#define EXIFTAGID_CELL_HEIGHT CONSTRUCT_TAGID(CELL_HEIGHT, _ID_CELL_HEIGHT)
+#define EXIFTAGTYPE_CELL_HEIGHT EXIF_SHORT
+// Fill Order
+// Use EXIFTAGTYPE_FILL_ORDER as the exif_tag_type (EXIF_SHORT)
+// 	1 = Normal
+//  2 = Reversed
+#define _ID_FILL_ORDER 0x010A
+#define EXIFTAGID_FILL_ORDER CONSTRUCT_TAGID(FILL_ORDER, _ID_FILL_ORDER)
+#define EXIFTAGTYPE_FILL_ORDER EXIF_SHORT
+
+// DOCUMENT NAME
+// Use EXIFTAGTYPE_DOCUMENT_NAME as the exif_tag_type (EXIF_ASCII)
+//
+#define _ID_DOCUMENT_NAME 0x010D
+#define EXIFTAGID_DOCUMENT_NAME CONSTRUCT_TAGID(DOCUMENT_NAME, _ID_DOCUMENT_NAME)
+#define EXIFTAGTYPE_DOCUMENT_NAME EXIF_ASCII
+
+// Image title
+// Use EXIFTAGTYPE_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(IMAGE_DESCRIPTION, _ID_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer
+// Use EXIFTAGTYPE_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MAKE 0x010f
+#define EXIFTAGID_MAKE CONSTRUCT_TAGID(MAKE, _ID_MAKE)
+#define EXIFTAGTYPE_MAKE EXIF_ASCII
+// Image input equipment model
+// Use EXIFTAGTYPE_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MODEL 0x0110
+#define EXIFTAGID_MODEL CONSTRUCT_TAGID(MODEL, _ID_MODEL)
+#define EXIFTAGTYPE_MODEL EXIF_ASCII
+// Image data location
+// Use EXIFTAGTYPE_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_OFFSETS, _ID_STRIP_OFFSETS)
+#define EXIFTAGTYPE_STRIP_OFFSETS EXIF_LONG
+// Orientation of image
+// Use EXIFTAGTYPE_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_ORIENTATION 0x0112
+#define EXIFTAGID_ORIENTATION CONSTRUCT_TAGID(ORIENTATION, _ID_ORIENTATION)
+#define EXIFTAGTYPE_ORIENTATION EXIF_SHORT
+// Number of components
+// Use EXIFTAGTYPE_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(SAMPLES_PER_PIXEL, _ID_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip
+// Use EXIFTAGTYPE_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(ROWS_PER_STRIP, _ID_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip
+// Use EXIFTAGTYPE_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(STRIP_BYTE_COUNTS, _ID_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_STRIP_BYTE_COUNTS EXIF_LONG
+// MinSampleValue
+// Use EXIFTAGTYPE_MIN_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MIN_SAMPLE_VALUE 0x0118
+#define EXIFTAGID_MIN_SAMPLE_VALUE  \
+  CONSTRUCT_TAGID(MIN_SAMPLE_VALUE, _ID_MIN_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MIN_SAMPLE_VALUE EXIF_SHORT
+// MaxSampleValue
+// Use EXIFTAGTYPE_MAX_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MAX_SAMPLE_VALUE 0x0119
+#define EXIFTAGID_MAX_SAMPLE_VALUE CONSTRUCT_TAGID(MAX_SAMPLE_VALUE, _ID_MAX_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MAX_SAMPLE_VALUE EXIF_SHORT
+
+// Image resolution in width direction
+// Use EXIFTAGTYPE_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_X_RESOLUTION 0x011a
+#define EXIFTAGID_X_RESOLUTION \
+  CONSTRUCT_TAGID(X_RESOLUTION, _ID_X_RESOLUTION)
+#define EXIFTAGTYPE_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction
+// Use EXIFTAGTYPE_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_Y_RESOLUTION 0x011b
+#define EXIFTAGID_Y_RESOLUTION \
+  CONSTRUCT_TAGID(Y_RESOLUTION, _ID_Y_RESOLUTION)
+#define EXIFTAGTYPE_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement
+// Use EXIFTAGTYPE_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(PLANAR_CONFIGURATION, _ID_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_PLANAR_CONFIGURATION EXIF_SHORT
+// PageName
+// Use EXIFTAGTYPE_PAGE_NAME as the exif_tag_type (EXIF_ASCII)
+// Count should be 1
+#define _ID_PAGE_NAME 0x011d
+#define EXIFTAGID_PAGE_NAME CONSTRUCT_TAGID(PAGE_NAME, _ID_PAGE_NAME)
+#define EXIFTAGTYPE_PAGE_NAME EXIF_ASCII
+// XPosition
+// Use EXIFTAGTYPE_X_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_X_POSITION 0x011e
+#define EXIFTAGID_X_POSITION CONSTRUCT_TAGID(X_POSITION, _ID_X_POSITION)
+#define EXIFTAGTYPE_X_POSITION EXIF_RATIONAL
+// YPosition
+// Use EXIFTAGTYPE_Y_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_Y_POSITION 0x011f
+#define EXIFTAGID_Y_POSITION CONSTRUCT_TAGID(Y_POSITION, _ID_Y_POSITION)
+#define EXIFTAGTYPE_Y_POSITION EXIF_RATIONAL
+
+// FREE_OFFSET
+// Use EXIFTAGTYPE_FREE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_OFFSET 0x0120
+#define EXIFTAGID_FREE_OFFSET CONSTRUCT_TAGID(FREE_OFFSET, _ID_FREE_OFFSET)
+#define EXIFTAGTYPE_FREE_OFFSET EXIF_LONG
+// FREE_BYTE_COUNTS
+// Use EXIFTAGTYPE_FREE_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_BYTE_COUNTS 0x0121
+#define EXIFTAGID_FREE_BYTE_COUNTS \
+  CONSTRUCT_TAGID(FREE_BYTE_COUNTS, _ID_FREE_BYTE_COUNTS)
+#define EXIFTAGTYPE_FREE_BYTE_COUNTS EXIF_LONG
+
+// GrayResponseUnit
+// Use EXIFTAGTYPE_GRAY_RESPONSE_UNIT as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_UNIT 0x0122
+#define EXIFTAGID_GRAY_RESPONSE_UNIT \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_UNIT, _ID_GRAY_RESPONSE_UNIT)
+#define EXIFTAGTYPE_GRAY_RESPONSE_UNIT EXIF_SHORT
+// GrayResponseCurve
+// Use EXIFTAGTYPE_GRAY_RESPONSE_CURVE  as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_CURVE 0x0123
+#define EXIFTAGID_GRAY_RESPONSE_CURVE \
+  CONSTRUCT_TAGID(GRAY_RESPONSE_CURVE , _ID_GRAY_RESPONSE_CURVE )
+#define EXIFTAGTYPE_GRAY_RESPONSE_CURVE EXIF_SHORT
+
+// T4_OPTION
+// Use EXIFTAGTYPE_T4_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T4_OPTION  0x0124
+#define EXIFTAGID_T4_OPTION CONSTRUCT_TAGID(T4_OPTION, _ID_T4_OPTION)
+#define EXIFTAGTYPE_T4_OPTION EXIF_LONG
+// T6_OPTION
+// Use EXIFTAGTYPE_T6_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T6_OPTION 0x0125
+#define EXIFTAGID_T6_OPTION CONSTRUCT_TAGID(T6_OPTION, _ID_T6_OPTION)
+#define EXIFTAGTYPE_T6_OPTION EXIF_LONG
+
+// Unit of X and Y resolution
+// Use EXIFTAGTYPE_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_RESOLUTION_UNIT 0x0128
+#define EXIFTAGID_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(RESOLUTION_UNIT, _ID_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_RESOLUTION_UNIT EXIF_SHORT
+
+// Page Number
+// Use EXIFTAGTYPE_PAGE_NUMBER  as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PAGE_NUMBER 0x0129
+#define EXIFTAGID_PAGE_NUMBER CONSTRUCT_TAGID(PAGE_NUMBER, _ID_PAGE_NUMBER)
+#define EXIFTAGTYPE_PAGE_NUMBER EXIF_SHORT
+// Transfer function
+// Use EXIFTAGTYPE_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TRANSFER_FUNCTION, _ID_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TRANSFER_FUNCTION EXIF_SHORT
+// Software used
+// Use EXIFTAGTYPE_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SOFTWARE 0x0131
+#define EXIFTAGID_SOFTWARE CONSTRUCT_TAGID(SOFTWARE, _ID_SOFTWARE)
+#define EXIFTAGTYPE_SOFTWARE EXIF_ASCII
+// File change date and time
+// Use EXIFTAGTYPE_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_DATE_TIME 0x0132
+#define EXIFTAGID_DATE_TIME CONSTRUCT_TAGID(DATE_TIME, _ID_DATE_TIME)
+#define EXIFTAGTYPE_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image
+// Use EXIFTAGTYPE_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_ARTIST 0x013b
+#define EXIFTAGID_ARTIST CONSTRUCT_TAGID(ARTIST, _ID_ARTIST)
+#define EXIFTAGTYPE_ARTIST EXIF_ASCII
+// Host Computer Name
+// Use EXIFTAGTYPE_HOST_COMPUTER as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_HOST_COMPUTER 0x013c
+#define EXIFTAGID_HOST_COMPUTER \
+  CONSTRUCT_TAGID(HOST_COMPUTER , _ID_HOST_COMPUTER )
+#define EXIFTAGTYPE_HOST_COMPUTER EXIF_ASCII
+// Predictor
+// Use EXIFTAGTYPE_PREDICTOR as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_PREDICTOR 0x013d
+#define EXIFTAGID_PREDICTOR CONSTRUCT_TAGID(PREDICTOR , _ID_PREDICTOR )
+#define EXIFTAGTYPE_PREDICTOR EXIF_SHORT
+// White point chromaticity
+// Use EXIFTAGTYPE_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_WHITE_POINT 0x013e
+#define EXIFTAGID_WHITE_POINT CONSTRUCT_TAGID(WHITE_POINT, _ID_WHITE_POINT)
+#define EXIFTAGTYPE_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries
+// Use EXIFTAGTYPE_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(PRIMARY_CHROMATICITIES, _ID_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+
+// COLOR_MAP
+// Use EXIFTAGTYPE_COLOR_MAP as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_COLOR_MAP 0x0140
+#define EXIFTAGID_COLOR_MAP CONSTRUCT_TAGID(COLOR_MAP, _ID_COLOR_MAP)
+#define EXIFTAGTYPE_COLOR_MAP EXIF_SHORT
+// HALFTONE_HINTS
+// Use EXIFTAGTYPE_HALFTONE_HINTS as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_HALFTONE_HINTS 0x0141
+#define EXIFTAGID_HALFTONE_HINTS \
+  CONSTRUCT_TAGID(HALFTONE_HINTS, _ID_HALFTONE_HINTS)
+#define EXIFTAGTYPE_HALFTONE_HINTS EXIF_SHORT
+
+// TILE_WIDTH
+// Use EXIFTAGTYPE_TILE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_WIDTH 0x0142
+#define EXIFTAGID_TILE_WIDTH CONSTRUCT_TAGID(TILE_WIDTH, _ID_TILE_WIDTH)
+#define EXIFTAGTYPE_TILE_WIDTH EXIF_LONG
+// TILE_LENGTH
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_LENGTH 0x0143
+#define EXIFTAGID_TILE_LENGTH CONSTRUCT_TAGID(TILE_LENGTH , _ID_TILE_LENGTH )
+#define EXIFTAGTYPE_TILE_LENGTH EXIF_LONG
+// TILE_OFFSET
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_OFFSET 0x0144
+#define EXIFTAGID_TILE_OFFSET CONSTRUCT_TAGID(TILE_OFFSET , _ID_TILE_OFFSET )
+#define EXIFTAGTYPE_TILE_OFFSET EXIF_LONG
+// tile Byte Counts
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_BYTE_COUNTS 0x0145
+#define EXIFTAGID_TILE_BYTE_COUNTS  \
+  CONSTRUCT_TAGID(TILE_BYTE_COUNTS  , _ID_TILE_BYTE_COUNTS  )
+#define EXIFTAGTYPE_TILE_BYTE_COUNTS EXIF_LONG
+
+// INK_SET
+// Use EXIFTAGTYPE_TILE_LENGTH  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_INK_SET 0x014c
+#define EXIFTAGID_INK_SET CONSTRUCT_TAGID(INK_SET , _ID_INK_SET )
+#define EXIFTAGTYPE_INK_SET EXIF_SHORT
+// INK_NAMES
+// Use EXIFTAGTYPE_INK_NAMES  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_INK_NAMES 0x014D
+#define EXIFTAGID_INK_NAMES CONSTRUCT_TAGID(INK_NAMES , _ID_INK_NAMES)
+#define EXIFTAGTYPE_INK_NAMES EXIF_ASCII
+// NUMBER_OF_INKS
+// Use EXIFTAGTYPE_NUMBER_OF_INKS  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_NUMBER_OF_INKS 0x014e
+#define EXIFTAGID_NUMBER_OF_INKS \
+  CONSTRUCT_TAGID(NUMBER_OF_INKS , _ID_NUMBER_OF_INKS )
+#define EXIFTAGTYPE_NUMBER_OF_INKS EXIF_SHORT
+
+// DOT_RANGE
+// Use EXIFTAGTYPE_DOT_RANGE  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_DOT_RANGE 0x0150
+#define EXIFTAGID_DOT_RANGE CONSTRUCT_TAGID(DOT_RANGE , _ID_DOT_RANGE )
+#define EXIFTAGTYPE_DOT_RANGE EXIF_ASCII
+
+// TARGET_PRINTER
+// Use EXIFTAGTYPE_TARGET_PRINTER  as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_TARGET_PRINTER 0x0151
+#define EXIFTAGID_TARGET_PRINTER \
+  CONSTRUCT_TAGID(TARGET_PRINTER , _ID_TARGET_PRINTER)
+#define EXIFTAGTYPE_TARGET_PRINTER EXIF_ASCII
+// EXTRA_SAMPLES
+// Use EXIFTAGTYPE_EXTRA_SAMPLES as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_EXTRA_SAMPLES 0x0152
+#define EXIFTAGID_EXTRA_SAMPLES \
+  CONSTRUCT_TAGID(EXTRA_SAMPLES , _ID_EXTRA_SAMPLES )
+#define EXIFTAGTYPE_EXTRA_SAMPLES EXIF_SHORT
+
+// SAMPLE_FORMAT
+// Use EXIFTAGTYPE_SAMPLE_FORMAT  as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_SAMPLE_FORMAT 0x0153
+#define EXIFTAGID_SAMPLE_FORMAT \
+  CONSTRUCT_TAGID(SAMPLE_FORMAT , _ID_SAMPLE_FORMAT )
+#define EXIFTAGTYPE_SAMPLE_FORMAT EXIF_SHORT
+
+// Table of values that extends the range of the transfer function.
+// Use EXIFTAGTYPE_TRANSFER_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_TRANSFER_RANGE 0x0156
+#define EXIFTAGID_TRANSFER_RANGE \
+  CONSTRUCT_TAGID(TRANSFER_RANGE , _ID_TRANSFER_RANGE )
+#define EXIFTAGTYPE_TRANSFER_RANGE EXIF_SHORT
+
+// JPEG compression process.
+// Use EXIFTAGTYPE_JPEG_PROC as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_JPEG_PROC 0x0200
+#define EXIFTAGID_JPEG_PROC CONSTRUCT_TAGID(JPEG_PROC , _ID_JPEG_PROC )
+#define EXIFTAGTYPE_JPEG_PROC EXIF_SHORT
+
+
+// Offset to JPEG SOI
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT, _ID_JPEG_INTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT_LENGTH 0x0202
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT_LENGTH \
+  CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT_LENGTH, \
+  _ID_JPEG_INTERCHANGE_FORMAT_LENGTH)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH EXIF_LONG
+
+// Length of the restart interval.
+// Use EXIFTAGTYPE_JPEG_RESTART_INTERVAL as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_RESTART_INTERVAL 0x0203
+#define EXIFTAGID_JPEG_RESTART_INTERVAL \
+  CONSTRUCT_TAGID(JPEG_RESTART_INTERVAL, _ID_JPEG_RESTART_INTERVAL)
+#define EXIFTAGTYPE_JPEG_RESTART_INTERVAL EXIF_SHORT
+
+// JPEGLosslessPredictors
+// Use EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_LOSSLESS_PREDICTORS 0x0205
+#define EXIFTAGID_JPEG_LOSSLESS_PREDICTORS  \
+  CONSTRUCT_TAGID(JPEG_LOSSLESS_PREDICTORS, _ID_JPEG_LOSSLESS_PREDICTORS)
+#define EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS EXIF_SHORT
+
+// JPEGPointTransforms
+// Use EXIFTAGTYPE_JPEG_POINT_TRANSFORMS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_POINT_TRANSFORMS 0x0206
+#define EXIFTAGID_JPEG_POINT_TRANSFORMS  \
+  CONSTRUCT_TAGID(JPEG_POINT_TRANSFORMS, _ID_JPEG_POINT_TRANSFORMS)
+#define EXIFTAGTYPE_JPEG_POINT_TRANSFORMS EXIF_SHORT
+
+// JPEG_Q_TABLES
+// Use EXIFTAGTYPE_JPEG_Q_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_Q_TABLES 0x0207
+#define EXIFTAGID_JPEG_Q_TABLES \
+  CONSTRUCT_TAGID(JPEG_Q_TABLES, _ID_JPEG_Q_TABLES)
+#define EXIFTAGTYPE_JPEG_Q_TABLES EXIF_LONG
+// JPEG_DC_TABLES
+// Use EXIFTAGTYPE_JPEG_DC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_DC_TABLES 0x0208
+#define EXIFTAGID_JPEG_DC_TABLES \
+  CONSTRUCT_TAGID(JPEG_DC_TABLES, _ID_JPEG_DC_TABLES)
+#define EXIFTAGTYPE_JPEG_DC_TABLES EXIF_LONG
+// JPEG_AC_TABLES
+// Use EXIFTAGTYPE_JPEG_AC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_AC_TABLES 0x0209
+#define EXIFTAGID_JPEG_AC_TABLES \
+  CONSTRUCT_TAGID(JPEG_AC_TABLES, _ID_JPEG_AC_TABLES)
+#define EXIFTAGTYPE_JPEG_AC_TABLES EXIF_LONG
+
+// Color space transformation matrix coefficients
+// Use EXIFTAGTYPE_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(YCBCR_COEFFICIENTS, _ID_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C
+// Use EXIFTAGTYPE_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_YCBCR_SUB_SAMPLING  \
+  CONSTRUCT_TAGID(YCBCR_SUB_SAMPLING, _ID_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning
+// Use EXIFTAGTYPE_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_YCBCR_POSITIONING  \
+  CONSTRUCT_TAGID(YCBCR_POSITIONING, _ID_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_YCBCR_POSITIONING EXIF_SHORT
+// Pair of black and white reference values
+// Use EXIFTAGTYPE_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(REFERENCE_BLACK_WHITE, _ID_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// GAMMA
+// Use EXIFTAGTYPE_GAMMA as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_GAMMA 0x0301
+#define EXIFTAGID_GAMMA CONSTRUCT_TAGID(GAMMA, _ID_GAMMA)
+#define EXIFTAGTYPE_GAMMA EXIF_RATIONAL
+// Null-terminated character string that identifies an ICC profile.
+// Use EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_ICC_PROFILE_DESCRIPTOR 0x0302
+#define EXIFTAGID_ICC_PROFILE_DESCRIPTOR \
+  CONSTRUCT_TAGID(ICC_PROFILE_DESCRIPTOR, _ID_ICC_PROFILE_DESCRIPTOR)
+#define EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR EXIF_ASCII
+// SRGB_RENDERING_INTENT
+// Use EXIFTAGTYPE_SRGB_RENDERING_INTENT as the exif_tag_type (EXIF_BYTE)
+// Count should be 6
+#define _ID_SRGB_RENDERING_INTENT 0x0303
+#define EXIFTAGID_SRGB_RENDERING_INTENT \
+  CONSTRUCT_TAGID(SRGB_RENDERING_INTENT, _ID_SRGB_RENDERING_INTENT)
+#define EXIFTAGTYPE_SRGB_RENDERING_INTENT EXIF_BYTE
+
+// Null-terminated character string that specifies the title of the image.
+// Use EXIFTAGTYPE_IMAGE_TITLE as the exif_tag_type (EXIF_ASCII		)
+//
+#define _ID_IMAGE_TITLE 0x0320
+#define EXIFTAGID_IMAGE_TITLE CONSTRUCT_TAGID(IMAGE_TITLE, _ID_IMAGE_TITLE)
+#define EXIFTAGTYPE_IMAGE_TITLE EXIF_ASCII
+
+// Copyright holder
+// Use EXIFTAGTYPE_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_COPYRIGHT 0x8298
+#define EXIFTAGID_COPYRIGHT CONSTRUCT_TAGID(COPYRIGHT, _ID_COPYRIGHT)
+#define EXIFTAGTYPE_COPYRIGHT EXIF_ASCII
+// Old Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_NEW_SUBFILE_TYPE 0x00fe
+#define EXIFTAGID_NEW_SUBFILE_TYPE \
+  CONSTRUCT_TAGID(NEW_SUBFILE_TYPE, _ID_NEW_SUBFILE_TYPE)
+#define EXIFTAGTYPE_NEW_SUBFILE_TYPE EXIF_SHORT
+
+// New Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_LONG)
+// Count can be any
+#define _ID_SUBFILE_TYPE 0x00ff
+#define EXIFTAGID_SUBFILE_TYPE CONSTRUCT_TAGID(SUBFILE_TYPE, _ID_SUBFILE_TYPE)
+#define EXIFTAGTYPE_SUBFILE_TYPE EXIF_LONG
+
+// Image width (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_TN_IMAGE_WIDTH \
+  CONSTRUCT_TAGID(TN_IMAGE_WIDTH, _ID_TN_IMAGE_WIDTH)
+#define EXIFTAGTYPE_TN_IMAGE_WIDTH EXIF_LONG
+// Image height (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_TN_IMAGE_LENGTH \
+  CONSTRUCT_TAGID(TN_IMAGE_LENGTH, _ID_TN_IMAGE_LENGTH)
+#define EXIFTAGTYPE_TN_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component (of thumbnail)
+// Use EXIFTAGTYPE_TN_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_TN_BITS_PER_SAMPLE \
+  CONSTRUCT_TAGID(TN_BITS_PER_SAMPLE, _ID_TN_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_TN_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme (of thumbnail)
+// Use EXIFTAGTYPE_TN_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_COMPRESSION 0x0103
+#define EXIFTAGID_TN_COMPRESSION \
+  CONSTRUCT_TAGID(TN_COMPRESSION, _ID_TN_COMPRESSION)
+#define EXIFTAGTYPE_TN_COMPRESSION EXIF_SHORT
+// Pixel composition (of thumbnail)
+// Use EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION as the
+// exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_TN_PHOTOMETRIC_INTERPRETATION \
+  CONSTRUCT_TAGID(TN_PHOTOMETRIC_INTERPRETATION, \
+  _ID_TN_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+// Image title (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_TN_IMAGE_DESCRIPTION \
+  CONSTRUCT_TAGID(TN_IMAGE_DESCRIPTION, _ID_TN_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_TN_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer (of thumbnail)
+// Use EXIFTAGTYPE_TN_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MAKE 0x010f
+#define EXIFTAGID_TN_MAKE CONSTRUCT_TAGID(TN_MAKE, _ID_TN_MAKE)
+#define EXIFTAGTYPE_TN_MAKE EXIF_ASCII
+// Image input equipment model (of thumbnail)
+// Use EXIFTAGTYPE_TN_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MODEL 0x0110
+#define EXIFTAGID_TN_MODEL CONSTRUCT_TAGID(TN_MODEL, _ID_TN_MODEL)
+#define EXIFTAGTYPE_TN_MODEL EXIF_ASCII
+// Image data location (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_TN_STRIP_OFFSETS \
+  CONSTRUCT_TAGID(STRIP_TN_OFFSETS, _ID_TN_STRIP_OFFSETS)
+#define EXIFTAGTYPE_TN_STRIP_OFFSETS EXIF_LONG
+// Orientation of image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_ORIENTATION 0x0112
+#define EXIFTAGID_TN_ORIENTATION \
+  CONSTRUCT_TAGID(TN_ORIENTATION, _ID_TN_ORIENTATION)
+#define EXIFTAGTYPE_TN_ORIENTATION EXIF_SHORT
+// Number of components (of thumbnail)
+// Use EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_TN_SAMPLES_PER_PIXEL \
+  CONSTRUCT_TAGID(TN_SAMPLES_PER_PIXEL, _ID_TN_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_TN_ROWS_PER_STRIP \
+  CONSTRUCT_TAGID(TN_ROWS_PER_STRIP, _ID_TN_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_TN_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage                    when PlanarConfiguration = 1
+//       = SamplesPerPixel * StripsPerImage  when PlanarConfiguration = 2
+#define _ID_TN_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_TN_STRIP_BYTE_COUNTS \
+  CONSTRUCT_TAGID(TN_STRIP_BYTE_COUNTS, _ID_TN_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS EXIF_LONG
+// Image resolution in width direction (of thumbnail)
+// Use EXIFTAGTYPE_TN_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_X_RESOLUTION 0x011a
+#define EXIFTAGID_TN_X_RESOLUTION \
+  CONSTRUCT_TAGID(TN_X_RESOLUTION, _ID_TN_X_RESOLUTION)
+#define EXIFTAGTYPE_TN_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction  (of thumbnail)
+// Use EXIFTAGTYPE_TN_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_Y_RESOLUTION 0x011b
+#define EXIFTAGID_TN_Y_RESOLUTION \
+  CONSTRUCT_TAGID(TN_Y_RESOLUTION, _ID_TN_Y_RESOLUTION)
+#define EXIFTAGTYPE_TN_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement (of thumbnail)
+// Use EXIFTAGTYPE_TN_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_TN_PLANAR_CONFIGURATION \
+  CONSTRUCT_TAGID(TN_PLANAR_CONFIGURATION, _ID_TN_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_TN_PLANAR_CONFIGURATION EXIF_SHORT
+// Unit of X and Y resolution (of thumbnail)
+// Use EXIFTAGTYPE_TN_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_RESOLUTION_UNIT 0x128
+#define EXIFTAGID_TN_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(TN_RESOLUTION_UNIT, _ID_TN_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_TN_RESOLUTION_UNIT EXIF_SHORT
+// Transfer function (of thumbnail)
+// Use EXIFTAGTYPE_TN_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TN_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TN_TRANSFER_FUNCTION \
+  CONSTRUCT_TAGID(TN_TRANSFER_FUNCTION, _ID_TN_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TN_TRANSFER_FUNCTION EXIF_SHORT
+// Software used (of thumbnail)
+// Use EXIFTAGTYPE_TN_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_SOFTWARE 0x0131
+#define EXIFTAGID_TN_SOFTWARE CONSTRUCT_TAGID(TN_SOFTWARE, _ID_TN_SOFTWARE)
+#define EXIFTAGTYPE_TN_SOFTWARE EXIF_ASCII
+// File change date and time (of thumbnail)
+// Use EXIFTAGTYPE_TN_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_TN_DATE_TIME 0x0132
+#define EXIFTAGID_TN_DATE_TIME CONSTRUCT_TAGID(TN_DATE_TIME, _ID_TN_DATE_TIME)
+#define EXIFTAGTYPE_TN_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_ARTIST 0x013b
+#define EXIFTAGID_TN_ARTIST CONSTRUCT_TAGID(TN_ARTIST, _ID_TN_ARTIST)
+#define EXIFTAGTYPE_TN_ARTIST EXIF_ASCII
+// White point chromaticity (of thumbnail)
+// Use EXIFTAGTYPE_TN_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_TN_WHITE_POINT 0x013e
+#define EXIFTAGID_TN_WHITE_POINT \
+  CONSTRUCT_TAGID(TN_WHITE_POINT, _ID_TN_WHITE_POINT)
+#define EXIFTAGTYPE_TN_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries (of thumbnail)
+// Use EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_TN_PRIMARY_CHROMATICITIES \
+  CONSTRUCT_TAGID(TN_PRIMARY_CHROMATICITIES, _ID_TN_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+// Offset to JPEG SOI (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT, _ID_TN_JPEGINTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT_L 0x0202
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT_L \
+  CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT_L, _ID_TN_JPEGINTERCHANGE_FORMAT_L)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L EXIF_LONG
+// Color space transformation matrix coefficients (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_TN_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_TN_YCBCR_COEFFICIENTS \
+  CONSTRUCT_TAGID(TN_YCBCR_COEFFICIENTS, _ID_TN_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_TN_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_TN_YCBCR_SUB_SAMPLING \
+  CONSTRUCT_TAGID(TN_YCBCR_SUB_SAMPLING, _ID_TN_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_TN_YCBCR_POSITIONING \
+  CONSTRUCT_TAGID(TN_YCBCR_POSITIONING, _ID_TN_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_TN_YCBCR_POSITIONING    EXIF_SHORT
+// Pair of black and white reference values (of thumbnail)
+// Use EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_TN_REFERENCE_BLACK_WHITE \
+  CONSTRUCT_TAGID(TN_REFERENCE_BLACK_WHITE, _ID_TN_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// Copyright holder (of thumbnail)
+// Use EXIFTAGTYPE_TN_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_COPYRIGHT 0x8298
+#define EXIFTAGID_TN_COPYRIGHT CONSTRUCT_TAGID(TN_COPYRIGHT, _ID_TN_COPYRIGHT)
+#define EXIFTAGTYPE_TN_COPYRIGHT EXIF_ASCII
+// Exposure time
+// Use EXIFTAGTYPE_EXPOSURE_TIME as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_TIME 0x829a
+#define EXIFTAGID_EXPOSURE_TIME \
+  CONSTRUCT_TAGID(EXPOSURE_TIME, _ID_EXPOSURE_TIME)
+#define EXIFTAGTYPE_EXPOSURE_TIME EXIF_RATIONAL
+// F number
+// Use EXIFTAGTYPE_F_NUMBER as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_F_NUMBER 0x829d
+#define EXIFTAGID_F_NUMBER \
+  CONSTRUCT_TAGID(F_NUMBER, _ID_F_NUMBER)
+#define EXIFTAGTYPE_F_NUMBER EXIF_RATIONAL
+// Exif IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_EXIF_IFD_PTR 0x8769
+#define EXIFTAGID_EXIF_IFD_PTR \
+  CONSTRUCT_TAGID(EXIF_IFD, _ID_EXIF_IFD_PTR)
+#define EXIFTAGTYPE_EXIF_IFD_PTR EXIF_LONG
+
+// ICC_PROFILE (NOT INTENDED to be accessible to user)
+#define _ID_ICC_PROFILE 0x8773
+#define EXIFTAGID_ICC_PROFILE CONSTRUCT_TAGID(ICC_PROFILE, _ID_ICC_PROFILE)
+#define EXIFTAGTYPE_ICC_PROFILE EXIF_LONG
+// Exposure program
+// Use EXIFTAGTYPE_EXPOSURE_PROGRAM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_PROGRAM 0x8822
+#define EXIFTAGID_EXPOSURE_PROGRAM \
+  CONSTRUCT_TAGID(EXPOSURE_PROGRAM, _ID_EXPOSURE_PROGRAM)
+#define EXIFTAGTYPE_EXPOSURE_PROGRAM EXIF_SHORT
+// Spectral sensitivity
+// Use EXIFTAGTYPE_SPECTRAL_SENSITIVITY as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SPECTRAL_SENSITIVITY 0x8824
+#define EXIFTAGID_SPECTRAL_SENSITIVITY \
+  CONSTRUCT_TAGID(SPECTRAL_SENSITIVITY, _ID_SPECTRAL_SENSITIVITY)
+#define EXIFTAGTYPE_SPECTRAL_SENSITIVITY EXIF_ASCII
+// GPS IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_GPS_IFD_PTR 0x8825
+#define EXIFTAGID_GPS_IFD_PTR \
+  CONSTRUCT_TAGID(GPS_IFD, _ID_GPS_IFD_PTR)
+#define EXIFTAGTYPE_GPS_IFD_PTR EXIF_LONG
+// ISO Speed Rating
+// Use EXIFTAGTYPE_ISO_SPEED_RATING as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_ISO_SPEED_RATING 0x8827
+#define EXIFTAGID_ISO_SPEED_RATING \
+  CONSTRUCT_TAGID(ISO_SPEED_RATING, _ID_ISO_SPEED_RATING)
+#define EXIFTAGTYPE_ISO_SPEED_RATING EXIF_SHORT
+// Optoelectric conversion factor
+// Use EXIFTAGTYPE_OECF as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_OECF 0x8828
+#define EXIFTAGID_OECF CONSTRUCT_TAGID(OECF, _ID_OECF)
+#define EXIFTAGTYPE_OECF EXIF_UNDEFINED
+// Exif version
+// Use EXIFTAGTYPE_EXIF_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_VERSION 0x9000
+#define EXIFTAGID_EXIF_VERSION \
+  CONSTRUCT_TAGID(EXIF_VERSION, _ID_EXIF_VERSION)
+#define EXIFTAGTYPE_EXIF_VERSION EXIF_UNDEFINED
+// Date and time of original data gerneration
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_ORIGINAL 0x9003
+#define EXIFTAGID_EXIF_DATE_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_ORIGINAL, _ID_EXIF_DATE_TIME_ORIGINAL)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL EXIF_ASCII
+// Date and time of digital data generation
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_DIGITIZED 0x9004
+#define EXIFTAGID_EXIF_DATE_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(EXIF_DATE_TIME_DIGITIZED, _ID_EXIF_DATE_TIME_DIGITIZED)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED EXIF_ASCII
+// Meaning of each component
+// Use EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_COMPONENTS_CONFIG 0x9101
+#define EXIFTAGID_EXIF_COMPONENTS_CONFIG \
+  CONSTRUCT_TAGID(EXIF_COMPONENTS_CONFIG, _ID_EXIF_COMPONENTS_CONFIG)
+#define EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG EXIF_UNDEFINED
+// Meaning of Image compression mode
+// Use EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXIF_COMPRESSED_BITS_PER_PIXEL 0x9102
+#define EXIFTAGID_EXIF_COMPRESSED_BITS_PER_PIXEL \
+  CONSTRUCT_TAGID(EXIF_COMPRESSED_BITS_PER_PIXEL, _ID_EXIF_COMPRESSED_BITS_PER_PIXEL)
+#define EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL EXIF_RATIONAL
+// Shutter speed
+// Use EXIFTAGTYPE_SHUTTER_SPEED as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_SHUTTER_SPEED 0x9201
+#define EXIFTAGID_SHUTTER_SPEED \
+  CONSTRUCT_TAGID(SHUTTER_SPEED, _ID_SHUTTER_SPEED)
+#define EXIFTAGTYPE_SHUTTER_SPEED EXIF_SRATIONAL
+// Aperture
+// Use EXIFTAGTYPE_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_APERTURE 0x9202
+#define EXIFTAGID_APERTURE CONSTRUCT_TAGID(APERTURE, _ID_APERTURE)
+#define EXIFTAGTYPE_APERTURE EXIF_RATIONAL
+// Brigthness
+// Use EXIFTAGTYPE_BRIGHTNESS as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_BRIGHTNESS 0x9203
+#define EXIFTAGID_BRIGHTNESS CONSTRUCT_TAGID(BRIGHTNESS, _ID_BRIGHTNESS)
+#define EXIFTAGTYPE_BRIGHTNESS EXIF_SRATIONAL
+// Exposure bias
+// Use EXIFTAGTYPE_EXPOSURE_BIAS_VALUE as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_BIAS_VALUE 0x9204
+#define EXIFTAGID_EXPOSURE_BIAS_VALUE \
+  CONSTRUCT_TAGID(EXPOSURE_BIAS_VALUE, _ID_EXPOSURE_BIAS_VALUE)
+#define EXIFTAGTYPE_EXPOSURE_BIAS_VALUE EXIF_SRATIONAL
+// Maximum lens aperture
+// Use EXIFTAGTYPE_MAX_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_MAX_APERTURE 0x9205
+#define EXIFTAGID_MAX_APERTURE CONSTRUCT_TAGID(MAX_APERTURE, _ID_MAX_APERTURE)
+#define EXIFTAGTYPE_MAX_APERTURE EXIF_RATIONAL
+// Subject distance
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE 0x9206
+#define EXIFTAGID_SUBJECT_DISTANCE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE, _ID_SUBJECT_DISTANCE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE EXIF_RATIONAL
+// Metering mode
+// Use EXIFTAGTYPE_METERING_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_METERING_MODE 0x9207
+#define EXIFTAGID_METERING_MODE \
+  CONSTRUCT_TAGID(METERING_MODE, _ID_METERING_MODE)
+#define EXIFTAGTYPE_METERING_MODE EXIF_SHORT
+// Light source
+// Use EXIFTAGTYPE_LIGHT_SOURCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_LIGHT_SOURCE 0x9208
+#define EXIFTAGID_LIGHT_SOURCE CONSTRUCT_TAGID(LIGHT_SOURCE, _ID_LIGHT_SOURCE)
+#define EXIFTAGTYPE_LIGHT_SOURCE EXIF_SHORT
+// Flash
+// Use EXIFTAGTYPE_FLASH as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FLASH 0x9209
+#define EXIFTAGID_FLASH CONSTRUCT_TAGID(FLASH, _ID_FLASH)
+#define EXIFTAGTYPE_FLASH EXIF_SHORT
+// Lens focal length
+// Use EXIFTAGTYPE_FOCAL_LENGTH as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_LENGTH 0x920a
+#define EXIFTAGID_FOCAL_LENGTH CONSTRUCT_TAGID(FOCAL_LENGTH, _ID_FOCAL_LENGTH)
+#define EXIFTAGTYPE_FOCAL_LENGTH EXIF_RATIONAL
+// Subject area
+// Use EXIFTAGTYPE_SUBJECT_AREA as exif_tag_type (EXIF_SHORT)
+// Count should be 2 or 3 or 4
+#define _ID_SUBJECT_AREA 0x9214
+#define EXIFTAGID_SUBJECT_AREA CONSTRUCT_TAGID(SUBJECT_AREA, _ID_SUBJECT_AREA)
+#define EXIFTAGTYPE_SUBJECT_AREA EXIF_SHORT
+// Maker note
+// Use EXIFTAGTYPE_EXIF_MAKER_NOTE as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_MAKER_NOTE 0x927c
+#define EXIFTAGID_EXIF_MAKER_NOTE \
+  CONSTRUCT_TAGID(EXIF_MAKER_NOTE, _ID_EXIF_MAKER_NOTE)
+#define EXIFTAGTYPE_EXIF_MAKER_NOTE EXIF_UNDEFINED
+// User comments
+// Use EXIFTAGTYPE_EXIF_USER_COMMENT as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_USER_COMMENT 0x9286
+#define EXIFTAGID_EXIF_USER_COMMENT \
+  CONSTRUCT_TAGID(EXIF_USER_COMMENT, _ID_EXIF_USER_COMMENT)
+#define EXIFTAGTYPE_EXIF_USER_COMMENT EXIF_UNDEFINED
+// Date time sub-seconds
+// Use EXIFTAGTYPE_SUBSEC_TIME as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME 0x9290
+#define EXIFTAGID_SUBSEC_TIME CONSTRUCT_TAGID(SUBSEC_TIME, _ID_SUBSEC_TIME)
+#define EXIFTAGTYPE_SEBSEC_TIME EXIF_ASCII
+// Date time original sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_ORIGINAL 0x9291
+#define EXIFTAGID_SUBSEC_TIME_ORIGINAL \
+  CONSTRUCT_TAGID(SUBSEC_TIME_ORIGINAL, _ID_SUBSEC_TIME_ORIGINAL)
+#define EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL EXIF_ASCII
+// Date time digitized sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_DIGITIZED 0x9292
+#define EXIFTAGID_SUBSEC_TIME_DIGITIZED \
+  CONSTRUCT_TAGID(SUBSEC_TIME_DIGITIZED, _ID_SUBSEC_TIME_DIGITIZED)
+#define EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED EXIF_ASCII
+// Supported Flashpix version
+// Use EXIFTAGTYPE_EXIF_FLASHPIX_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_FLASHPIX_VERSION 0xa000
+#define EXIFTAGID_EXIF_FLASHPIX_VERSION \
+  CONSTRUCT_TAGID(EXIF_FLASHPIX_VERSION, _ID_EXIF_FLASHPIX_VERSION)
+#define EXIFTAGTYPE_EXIF_FLASHPIX_VERSION EXIF_UNDEFINED
+//  Color space information
+// Use EXIFTAGTYPE_EXIF_COLOR_SPACE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_COLOR_SPACE 0xa001
+#define EXIFTAGID_EXIF_COLOR_SPACE \
+  CONSTRUCT_TAGID(EXIF_COLOR_SPACE, _ID_EXIF_COLOR_SPACE)
+#define EXIFTAGTYPE_EXIF_COLOR_SPACE EXIF_SHORT
+//  Valid image width
+// Use EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_X_DIMENSION 0xa002
+#define EXIFTAGID_EXIF_PIXEL_X_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_X_DIMENSION, _ID_EXIF_PIXEL_X_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION EXIF_SHORT
+// Valid image height
+// Use EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_Y_DIMENSION 0xa003
+#define EXIFTAGID_EXIF_PIXEL_Y_DIMENSION \
+  CONSTRUCT_TAGID(EXIF_PIXEL_Y_DIMENSION, _ID_EXIF_PIXEL_Y_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION  EXIF_SHORT
+// Related audio file
+// Use EXIFTAGTYPE_EXIF_RELATED_SOUND_FILE as the exif_tag_type (EXIF_ASCII)
+// Count should be 13
+#define _ID_RELATED_SOUND_FILE 0xa004
+#define EXIFTAGID_RELATED_SOUND_FILE \
+  CONSTRUCT_TAGID(RELATED_SOUND_FILE, _ID_RELATED_SOUND_FILE)
+#define EXIFTAGTYPE_RELATED_SOUND_FILE EXIF_ASCII
+// Interop IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_INTEROP_IFD_PTR 0xa005
+#define EXIFTAGID_INTEROP_IFD_PTR CONSTRUCT_TAGID(INTEROP, _ID_INTEROP_IFD_PTR)
+#define EXIFTAGTYPE_INTEROP_IFD_PTR EXIF_LONG
+// Flash energy
+// Use EXIFTAGTYPE_EXIF_FLASH_ENERGY as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FLASH_ENERGY 0xa20b
+#define EXIFTAGID_FLASH_ENERGY CONSTRUCT_TAGID(FLASH_ENERGY, _ID_FLASH_ENERGY)
+#define EXIFTAGTYPE_FLASH_ENERGY EXIF_RATIONAL
+// Spatial frequency response
+// Use EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE as exif_tag_type (EXIF_UNDEFINED)
+// Count would be any
+#define _ID_SPATIAL_FREQ_RESPONSE 0xa20c
+#define EXIFTAGID_SPATIAL_FREQ_RESPONSE \
+  CONSTRUCT_TAGID(SPATIAL_FREQ_RESPONSE, _ID_SPATIAL_FREQ_RESPONSE)
+#define EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE EXIF_UNDEFINED
+// Focal plane x resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_X_RESOLUTION 0xa20e
+#define EXIFTAGID_FOCAL_PLANE_X_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_X_RESOLUTION, _ID_FOCAL_PLANE_X_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION EXIF_RATIONAL
+// Focal plane y resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_Y_RESOLUTION 0xa20f
+#define EXIFTAGID_FOCAL_PLANE_Y_RESOLUTION \
+  CONSTRUCT_TAGID(FOCAL_PLANE_Y_RESOLUTION, _ID_FOCAL_PLANE_Y_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION EXIF_RATIONAL
+// Focal plane  resolution unit
+// Use EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT as exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_PLANE_RESOLUTION_UNIT 0xa210
+#define EXIFTAGID_FOCAL_PLANE_RESOLUTION_UNIT \
+  CONSTRUCT_TAGID(FOCAL_PLANE_RESOLUTION_UNIT, _ID_FOCAL_PLANE_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT EXIF_SHORT
+// Subject location
+// Use EXIFTAGTYPE_SUBJECT_LOCATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_SUBJECT_LOCATION 0xa214
+#define EXIFTAGID_SUBJECT_LOCATION \
+  CONSTRUCT_TAGID(SUBJECT_LOCATION, _ID_SUBJECT_LOCATION)
+#define EXIFTAGTYPE_SUBJECT_LOCATION EXIF_SHORT
+// Exposure index
+// Use EXIFTAGTYPE_EXPOSURE_INDEX as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_INDEX 0xa215
+#define EXIFTAGID_EXPOSURE_INDEX \
+  CONSTRUCT_TAGID(EXPOSURE_INDEX, _ID_EXPOSURE_INDEX)
+#define EXIFTAGTYPE_EXPOSURE_INDEX EXIF_RATIONAL
+// Sensing method
+// Use EXIFTAGTYPE_SENSING_METHOD as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SENSING_METHOD 0xa217
+#define EXIFTAGID_SENSING_METHOD \
+  CONSTRUCT_TAGID(SENSING_METHOD, _ID_SENSING_METHOD)
+#define EXIFTAGTYPE_SENSING_METHOD EXIF_SHORT
+// File source
+// Use EXIFTAGTYPE_FILE_SOURCE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_FILE_SOURCE 0xa300
+#define EXIFTAGID_FILE_SOURCE CONSTRUCT_TAGID(FILE_SOURCE, _ID_FILE_SOURCE)
+#define EXIFTAGTYPE_FILE_SOURCE EXIF_UNDEFINED
+// Scene type
+// Use EXIFTAGTYPE_SCENE_TYPE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_SCENE_TYPE 0xa301
+#define EXIFTAGID_SCENE_TYPE CONSTRUCT_TAGID(SCENE_TYPE, _ID_SCENE_TYPE)
+#define EXIFTAGTYPE_SCENE_TYPE EXIF_UNDEFINED
+// CFA pattern
+// Use EXIFTAGTYPE_CFA_PATTERN as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_CFA_PATTERN 0xa302
+#define EXIFTAGID_CFA_PATTERN CONSTRUCT_TAGID(CFA_PATTERN, _ID_CFA_PATTERN)
+#define EXIFTAGTYPE_CFA_PATTERN EXIF_UNDEFINED
+// Custom image processing
+// Use EXIFTAGTYPE_CUSTOM_RENDERED as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CUSTOM_RENDERED 0xa401
+#define EXIFTAGID_CUSTOM_RENDERED \
+  CONSTRUCT_TAGID(CUSTOM_RENDERED, _ID_CUSTOM_RENDERED)
+#define EXIFTAGTYPE_CUSTOM_RENDERED EXIF_SHORT
+// Exposure mode
+// Use EXIFTAGTYPE_EXPOSURE_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_MODE 0xa402
+#define EXIFTAGID_EXPOSURE_MODE \
+  CONSTRUCT_TAGID(EXPOSURE_MODE, _ID_EXPOSURE_MODE)
+#define EXIFTAGTYPE_EXPOSURE_MODE EXIF_SHORT
+// White balance
+// Use EXIFTAGTYPE_WHITE_BALANCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_WHITE_BALANCE 0xa403
+#define EXIFTAGID_WHITE_BALANCE \
+  CONSTRUCT_TAGID(WHITE_BALANCE, _ID_WHITE_BALANCE)
+#define EXIFTAGTYPE_WHITE_BALANCE EXIF_SHORT
+// Digital zoom ratio
+// Use EXIFTAGTYPE_DIGITAL_ZOOM_RATIO as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_DIGITAL_ZOOM_RATIO 0xa404
+#define EXIFTAGID_DIGITAL_ZOOM_RATIO \
+  CONSTRUCT_TAGID(DIGITAL_ZOOM_RATIO, _ID_DIGITAL_ZOOM_RATIO)
+#define EXIFTAGTYPE_DIGITAL_ZOOM_RATIO EXIF_RATIONAL
+// Focal length in 35mm film
+// Use EXIFTAGTYPE_FOCAL_LENGTH_35MM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_LENGTH_35MM 0xa405
+#define EXIFTAGID_FOCAL_LENGTH_35MM CONSTRUCT_TAGID(FOCAL_LENGTH_35MM, _ID_FOCAL_LENGTH_35MM)
+#define EXIFTAGTYPE_FOCAL_LENGTH_35MM EXIF_SHORT
+// Scene capture type
+// Use EXIFTAGTYPE_SCENE_CAPTURE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SCENE_CAPTURE_TYPE 0xa406
+#define EXIFTAGID_SCENE_CAPTURE_TYPE \
+  CONSTRUCT_TAGID(SCENE_CAPTURE_TYPE, _ID_SCENE_CAPTURE_TYPE)
+#define EXIFTAGTYPE_SCENE_CAPTURE_TYPE EXIF_SHORT
+// Gain control
+// Use EXIFTAGTYPE_GAIN_CONTROL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_GAIN_CONTROL 0xa407
+#define EXIFTAGID_GAIN_CONTROL CONSTRUCT_TAGID(GAIN_CONTROL, _ID_GAIN_CONTROL)
+#define EXIFTAGTYPE_GAIN_CONTROL EXIF_SHORT
+// Contrast
+// Use EXIFTAGTYPE_CONTRAST as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CONTRAST 0xa408
+#define EXIFTAGID_CONTRAST CONSTRUCT_TAGID(CONTRAST, _ID_CONTRAST)
+#define EXIFTAGTYPE_CONTRAST EXIF_SHORT
+// Saturation
+// Use EXIFTAGTYPE_SATURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SATURATION  0xa409
+#define EXIFTAGID_SATURATION CONSTRUCT_TAGID(SATURATION, _ID_SATURATION)
+#define EXIFTAGTYPE_SATURATION EXIF_SHORT
+// Sharpness
+// Use EXIFTAGTYPE_SHARPNESS as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SHARPNESS 0xa40a
+#define EXIFTAGID_SHARPNESS CONSTRUCT_TAGID(SHARPNESS, _ID_SHARPNESS)
+#define EXIFTAGTYPE_SHARPNESS EXIF_SHORT
+// Device settings description
+// Use EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION as exif_tag_type (EXIF_UNDEFINED)
+// Count could be any
+#define _ID_DEVICE_SETTINGS_DESCRIPTION 0xa40b
+#define EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION \
+  CONSTRUCT_TAGID(DEVICE_SETTINGS_DESCRIPTION, _ID_DEVICE_SETTINGS_DESCRIPTION)
+#define EXIFTAGTYPE_DEVIC_SETTIGNS_DESCRIPTION EXIF_UNDEFINED
+// Subject distance range
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE_RANGE 0xa40c
+#define EXIFTAGID_SUBJECT_DISTANCE_RANGE \
+  CONSTRUCT_TAGID(SUBJECT_DISTANCE_RANGE, _ID_SUBJECT_DISTANCE_RANGE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE EXIF_SHORT
+// Unique image id
+// Use EXIFTAG_TYPE_IMAGE_UIDas the exif_tag_type (EXIF_ASCII)
+// Count should be 33
+#define _ID_IMAGE_UID 0xa420
+#define EXIFTAGID_IMAGE_UID CONSTRUCT_TAGID(IMAGE_UID, _ID_IMAGE_UID)
+#define EXIFTAGTYPE_IMAGE_UID EXIF_ASCII
+// PIM tag
+// Use EXIFTAGTYPE_PIM_TAG as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_PIM 0xc4a5
+#define EXIFTAGID_PIM_TAG CONSTRUCT_TAGID(PIM, _ID_PIM)
+#define EXIFTAGTYPE_PIM_TAG EXIF_UNDEFINED
+#endif // __QEXIF_H__
+
diff --git a/camera/mm-image-codec/qomx_core/Android.mk b/camera/mm-image-codec/qomx_core/Android.mk
new file mode 100644
index 0000000..ad414c9
--- /dev/null
+++ b/camera/mm-image-codec/qomx_core/Android.mk
@@ -0,0 +1,27 @@
+OMX_CORE_PATH := $(call my-dir)
+
+# ------------------------------------------------------------------------------
+#                Make the shared library (libqomx_core)
+# ------------------------------------------------------------------------------
+
+include $(CLEAR_VARS)
+LOCAL_PATH := $(OMX_CORE_PATH)
+LOCAL_MODULE_TAGS := optional
+
+omx_core_defines:= -Werror \
+                   -g -O0
+
+LOCAL_CFLAGS := $(omx_core_defines)
+
+OMX_HEADER_DIR := frameworks/native/include/media/openmax
+
+LOCAL_C_INCLUDES := $(OMX_HEADER_DIR)
+LOCAL_C_INCLUDES += $(LOCAL_PATH)/../qexif
+
+LOCAL_SRC_FILES := qomx_core.c
+
+LOCAL_MODULE           := libqomx_core
+LOCAL_PRELINK_MODULE   := false
+LOCAL_SHARED_LIBRARIES := libcutils libdl
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/camera/mm-image-codec/qomx_core/QOMX_JpegExtensions.h b/camera/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
new file mode 100644
index 0000000..959e991
--- /dev/null
+++ b/camera/mm-image-codec/qomx_core/QOMX_JpegExtensions.h
@@ -0,0 +1,235 @@
+/*Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef __QOMX_EXTENSIONS_H__
+#define __QOMX_EXTENSIONS_H__
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <OMX_Image.h>
+#include <qexif.h>
+
+/** qomx_image_eventd
+*  Qcom specific events extended from OMX_EVENT
+*  @ OMX_EVENT_THUMBNAIL_DROPPED - Indicates that the thumbnail
+*                                 size id too big to be included
+*                                 in the exif and will be
+*                                 dropped
+**/
+typedef enum {
+ OMX_EVENT_THUMBNAIL_DROPPED = OMX_EventVendorStartUnused+1
+} QOMX_IMAGE_EXT_EVENTS;
+
+/**
+*  The following macros defines the string to be used for
+*  getting the extension indices.
+**/
+#define QOMX_IMAGE_EXT_EXIF_NAME                  "OMX.QCOM.image.exttype.exif"
+#define QOMX_IMAGE_EXT_THUMBNAIL_NAME        "OMX.QCOM.image.exttype.thumbnail"
+#define QOMX_IMAGE_EXT_BUFFER_OFFSET_NAME "OMX.QCOM.image.exttype.bufferOffset"
+#define QOMX_IMAGE_EXT_MOBICAT_NAME            "OMX.QCOM.image.exttype.mobicat"
+#define QOMX_IMAGE_EXT_ENCODING_MODE_NAME        "OMX.QCOM.image.encoding.mode"
+#define QOMX_IMAGE_EXT_WORK_BUFFER_NAME      "OMX.QCOM.image.exttype.workbuffer"
+
+/** QOMX_IMAGE_EXT_INDEXTYPE
+*  This enum is an extension of the OMX_INDEXTYPE enum and
+*  specifies Qcom supported extention indexes. These indexes are
+*  associated with the extension names and can be used as
+*  Indexes in the SetParameter and Getparameter functins to set
+*  or get values from qcom specific data structures
+**/
+typedef enum {
+  //Name: OMX.QCOM.image.exttype.exif
+  QOMX_IMAGE_EXT_EXIF = 0x07F00000,
+
+  //Name: OMX.QCOM.image.exttype.thumbnail
+  QOMX_IMAGE_EXT_THUMBNAIL = 0x07F00001,
+
+  //Name: OMX.QCOM.image.exttype.bufferOffset
+  QOMX_IMAGE_EXT_BUFFER_OFFSET = 0x07F00002,
+
+  //Name: OMX.QCOM.image.exttype.mobicat
+  QOMX_IMAGE_EXT_MOBICAT = 0x07F00003,
+
+  //Name: OMX.QCOM.image.encoding.approach
+  QOMX_IMAGE_EXT_ENCODING_MODE = 0x07F00004,
+
+  //Name: OMX.QCOM.image.exttype.workbuffer
+  QOMX_IMAGE_EXT_WORK_BUFFER = 0x07F00004,
+
+} QOMX_IMAGE_EXT_INDEXTYPE;
+
+/** QOMX_BUFFER_INFO
+*  The structure specifies informaton
+*   associated with the buffers and should be passed as appData
+*   in UseBuffer calls to the OMX component with buffer specific
+*   data. @ fd - FD of the buffer allocated. If the buffer is
+*          allocated on the heap, it can be zero.
+*   @offset - Buffer offset
+**/
+
+typedef struct {
+  OMX_U32 fd;
+  OMX_U32 offset;
+} QOMX_BUFFER_INFO;
+
+/** QEXIF_INFO_DATA
+*   The basic exif structure used to construct
+*   information for a single exif tag.
+*   @tag_entry
+*   @tag_id
+**/
+typedef struct{
+  exif_tag_entry_t tag_entry;
+  exif_tag_id_t tag_id;
+} QEXIF_INFO_DATA;
+
+/**QOMX_EXIF_INFO
+*  The structure contains an array of exif tag
+*  structures(qexif_info_data) and should be passed to the OMX
+*  layer by the OMX client using the extension index.
+*  @exif_data - Array of exif tags
+*  @numOfEntries - Number of exif tags entries being passed in
+*                 the array
+**/
+typedef struct {
+  QEXIF_INFO_DATA *exif_data;
+  OMX_U32 numOfEntries;
+} QOMX_EXIF_INFO;
+
+/**QOMX_YUV_FRAME_INFO
+*  The structre contains all the offsets
+*  associated with the Y and cbcr buffers.
+*  @yOffset - Offset within the Y buffer
+*  @cbcrOffset - Offset within the cb/cr buffer. The array
+*                should be populated in order depending on cb
+*                first or cr first in case of planar data. For
+*                pseusoplanar, only the first array element
+*                needs to be filled and the secnd element should
+*                be set to zero.
+*  @cbcrStartOffset - Start offset of the cb/cr buffer starting
+*                     starting from the Y buffer. The array
+*                     should be populated in order depending on
+*                     cb first or cr first in case of planar
+*                     data. For pseusoplanar, only the first
+*                     array element needs to be filled and the
+*                     secnd element should be set to zero.
+**/
+typedef struct {
+  OMX_U32 yOffset;
+  OMX_U32 cbcrOffset[2];
+  OMX_U32 cbcrStartOffset[2];
+} QOMX_YUV_FRAME_INFO;
+
+/** qomx_thumbnail_info
+*  Includes all information associated with the thumbnail
+*  @input_width - Width of the input thumbnail buffer
+*  @input_height - Heighr of the input thumbnail buffer
+*  @scaling_enabled - Flag indicating if thumbnail scaling is
+*  enabled.
+*  @quality - JPEG Q factor value in the range of 1-100. A factor of 1
+ *               produces the smallest, worst quality images, and a factor
+ *               of 100 produces the largest, best quality images.  A
+ *               typical default is 75 for small good quality images.
+*  @crop_info - Includes the crop width, crop height,
+*               horizontal and vertical offsets.
+*  @output_width - Output Width of the the thumbnail. This is
+*                the width after scaling if scaling is enabled
+*                or width after cropping if only cropping is
+*                enabled or same same input width otherwise
+*  @output_height - Output height of the thumbnail. This is
+*                the height after scaling if scaling is enabled
+*                or height after cropping if only cropping is
+*                enabled or same same input height otherwise
+**/
+typedef struct {
+  OMX_U32 input_width;
+  OMX_U32 input_height;
+  OMX_U8 scaling_enabled;
+  OMX_U32 quality;
+  OMX_CONFIG_RECTTYPE crop_info;
+  OMX_U32 output_width;
+  OMX_U32 output_height;
+  QOMX_YUV_FRAME_INFO tmbOffset;
+} QOMX_THUMBNAIL_INFO;
+
+/**qomx_mobicat
+*  Mobicat data to padded tot he OMX layer
+*  @mobicatData - Mobicate data
+*  @mobicatDataLength - length of the mobicat data
+**/
+typedef struct {
+  OMX_U8 *mobicatData;
+  OMX_U32 mobicatDataLength;
+} QOMX_MOBICAT;
+
+/**qomx_workbuffer
+*  Ion buffer to be used for the H/W encoder
+*  @fd - FD of the buffer allocated
+*  @vaddr - Buffer address
+**/
+typedef struct {
+  int fd;
+  uint8_t *vaddr;
+} QOMX_WORK_BUFFER;
+
+/** QOMX_IMG_COLOR_FORMATTYPE
+*  This enum is an extension of the OMX_COLOR_FORMATTYPE enum.
+*  It specifies Qcom supported color formats.
+**/
+typedef enum QOMX_IMG_COLOR_FORMATTYPE {
+  OMX_QCOM_IMG_COLOR_FormatYVU420SemiPlanar = OMX_COLOR_FormatVendorStartUnused + 0x300,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422SemiPlanar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444SemiPlanar,
+  OMX_QCOM_IMG_COLOR_FormatYVU420Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar,
+  OMX_QCOM_IMG_COLOR_FormatYVU422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYUV422Planar_h1v2,
+  OMX_QCOM_IMG_COLOR_FormatYVU444Planar,
+  OMX_QCOM_IMG_COLOR_FormatYUV444Planar
+} QOMX_IMG_COLOR_FORMATTYPE;
+
+/** QOMX_ENCODING_MODE
+*  This enum is used to select parallel encoding
+*  or sequential encoding for the thumbnail and
+*  main image
+**/
+typedef enum {
+  OMX_Serial_Encoding,
+  OMX_Parallel_Encoding
+} QOMX_ENCODING_MODE;
+
+#ifdef __cplusplus
+ }
+#endif
+
+#endif
diff --git a/camera/mm-image-codec/qomx_core/qomx_core.c b/camera/mm-image-codec/qomx_core/qomx_core.c
new file mode 100644
index 0000000..3fbd3e9
--- /dev/null
+++ b/camera/mm-image-codec/qomx_core/qomx_core.c
@@ -0,0 +1,345 @@
+/*Copyright (c) 2012, 2014, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "qomx_image_core"
+#include <utils/Log.h>
+
+#include "qomx_core.h"
+
+#define BUFF_SIZE 255
+
+static omx_core_t *g_omxcore;
+
+//Map the library name with the component name
+static const comp_info_t g_comp_info[] =
+{
+  { "OMX.qcom.image.jpeg.encoder", "libqomx_jpegenc.so" },
+};
+
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *acompIndex,
+  int *ainstanceIndex);
+
+/*==============================================================================
+* Function : OMX_Init
+* Parameters: None
+* Description: This is the first call that is made to the OMX Core
+* and initializes the OMX IL core
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Init()
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int i = 0;
+  int comp_cnt = sizeof(g_comp_info)/sizeof(g_comp_info[0]);
+
+  /* check if core is created */
+  if (g_omxcore)
+    return rc;
+
+  if (comp_cnt > OMX_COMP_MAX_NUM) {
+    ALOGE("%s:%d] cannot exceed max number of components",
+      __func__, __LINE__);
+    return OMX_ErrorUndefined;
+  }
+  /* create new global object */
+  g_omxcore = malloc(sizeof(omx_core_t));
+  if (g_omxcore) {
+    memset(g_omxcore, 0x0, sizeof(omx_core_t));
+    pthread_mutex_init(&g_omxcore->core_lock, NULL);
+
+    /* populate the library name and component name */
+    for (i = 0; i < comp_cnt; i++) {
+      g_omxcore->component[i].comp_name = g_comp_info[i].comp_name;
+      g_omxcore->component[i].lib_name = g_comp_info[i].lib_name;
+    }
+    g_omxcore->comp_cnt = comp_cnt;
+  } else {
+    rc = OMX_ErrorInsufficientResources;
+  }
+  ALOGI("%s:%d] Complete %d", __func__, __LINE__, comp_cnt);
+  return rc;
+}
+
+/*==============================================================================
+* Function : OMX_Deinit
+* Parameters: None
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit all the OMX components
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_Deinit()
+{
+  if (g_omxcore) {
+    pthread_mutex_destroy(&g_omxcore->core_lock);
+    free(g_omxcore);
+    g_omxcore = NULL;
+  }
+  ALOGI("%s:%d] Complete", __func__, __LINE__);
+  return OMX_ErrorNone;
+}
+
+/*==============================================================================
+* Function : get_comp_from_list
+* Parameters: componentName
+* Return Value : component_index
+* Description: If the componnt is already present in the list, return the
+* component index. If not return the next index to create the component.
+==============================================================================*/
+static int get_comp_from_list(char *comp_name)
+{
+  int index = -1, i = 0;
+
+  if (NULL == comp_name)
+    return -1;
+
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    if (!strcmp(g_omxcore->component[i].comp_name, comp_name)) {
+      index = i;
+      break;
+    }
+  }
+  return index;
+}
+
+/*==============================================================================
+* Function : get_free_inst_idx
+* Parameters: p_comp
+* Return Value : The next instance index if available
+* Description: Get the next available index for to store the new instance of the
+*            component being created.
+*============================================================================*/
+static int get_free_inst_idx(omx_core_component_t *p_comp)
+{
+  int idx = -1, i = 0;
+
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL == p_comp->handle[i]) {
+      idx = i;
+      break;
+    }
+  }
+  return idx;
+}
+
+/*==============================================================================
+* Function : OMX_GetHandle
+* Parameters: handle, componentName, appData, callbacks
+* Return Value : OMX_ERRORTYPE
+* Description: Construct and load the requested omx library
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_GetHandle(
+  OMX_OUT OMX_HANDLETYPE* handle,
+  OMX_IN OMX_STRING componentName,
+  OMX_IN OMX_PTR appData,
+  OMX_IN OMX_CALLBACKTYPE* callBacks)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx = 0, inst_idx = 0;
+  char libName[BUFF_SIZE] = {0};
+  void *p_obj = NULL;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+  OMX_BOOL close_handle = OMX_FALSE;
+
+  comp_idx = get_comp_from_list(componentName);
+  if (comp_idx < 0) {
+    ALOGE("%s:%d] Cannot find the component", __func__, __LINE__);
+    return OMX_ErrorInvalidComponent;
+  }
+
+  if (NULL == handle) {
+    ALOGE("%s:%d] Error invalid input ", __func__, __LINE__);
+    return OMX_ErrorBadParameter;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+
+  pthread_mutex_lock(&g_omxcore->core_lock);
+  *handle = NULL;
+
+  //If component already present get the instance index
+  inst_idx = get_free_inst_idx(p_core_comp);
+  if (inst_idx < 0) {
+    ALOGE("%s:%d] Cannot alloc new instance", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  if (FALSE == p_core_comp->open) {
+    /* load the library */
+    p_core_comp->lib_handle = dlopen(p_core_comp->lib_name, RTLD_NOW);
+    if (NULL == p_core_comp->lib_handle) {
+      ALOGE("%s:%d] Cannot load the library", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+
+    p_core_comp->open = TRUE;
+    /* Init the component and get component functions */
+    p_core_comp->create_comp_func = dlsym(p_core_comp->lib_handle,
+      "create_component_fns");
+    p_core_comp->get_instance = dlsym(p_core_comp->lib_handle, "getInstance");
+
+    close_handle = OMX_TRUE;
+    if (!p_core_comp->create_comp_func || !p_core_comp->get_instance) {
+      ALOGE("%s:%d] Cannot maps the symbols", __func__, __LINE__);
+      rc = OMX_ErrorInvalidComponent;
+      goto error;
+    }
+  }
+
+  /* Call the function from the address to create the obj */
+  p_obj = (*p_core_comp->get_instance)();
+  ALOGI("%s:%d] get instance pts is %p", __func__, __LINE__, p_obj);
+  if (NULL == p_obj) {
+    ALOGE("%s:%d] Error cannot create object", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  /* Call the function from the address to get the func ptrs */
+  p_comp = (*p_core_comp->create_comp_func)(p_obj);
+  if (NULL == p_comp) {
+    ALOGE("%s:%d] Error cannot create component", __func__, __LINE__);
+    rc = OMX_ErrorInvalidComponent;
+    goto error;
+  }
+
+  *handle = p_core_comp->handle[inst_idx] = (OMX_HANDLETYPE)p_comp;
+
+  ALOGD("%s:%d] handle = %x Instanceindex = %d,"
+    "comp_idx %d g_ptr %p", __func__, __LINE__,
+    (int)p_core_comp->handle[inst_idx], inst_idx,
+    comp_idx, g_omxcore);
+
+  p_comp->SetCallbacks(p_comp, callBacks, appData);
+  pthread_mutex_unlock(&g_omxcore->core_lock);
+  ALOGI("%s:%d] Success", __func__, __LINE__);
+  return OMX_ErrorNone;
+
+error:
+
+  if (OMX_TRUE == close_handle) {
+    dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+  }
+  pthread_mutex_unlock(&g_omxcore->core_lock);
+  ALOGE("%s:%d] Error %d", __func__, __LINE__, rc);
+  return rc;
+}
+
+/*==============================================================================
+* Function : getIndexFromComponent
+* Parameters: handle,
+* Return Value : Component present - true or false, Instance Index, Component
+* Index
+* Description: Check if the handle is present in the list and get the component
+* index and instance index for the component handle.
+==============================================================================*/
+static int get_idx_from_handle(OMX_IN OMX_HANDLETYPE *ahComp, int *aCompIdx,
+  int *aInstIdx)
+{
+  int i = 0, j = 0;
+  for (i = 0; i < g_omxcore->comp_cnt; i++) {
+    for (j = 0; j < OMX_COMP_MAX_INSTANCES; j++) {
+      if ((OMX_COMPONENTTYPE *)g_omxcore->component[i].handle[j] ==
+        (OMX_COMPONENTTYPE *)ahComp) {
+        ALOGD("%s:%d] comp_idx %d inst_idx %d", __func__, __LINE__, i, j);
+        *aCompIdx = i;
+        *aInstIdx = j;
+        return TRUE;
+      }
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : is_comp_active
+* Parameters: p_core_comp
+* Return Value : int
+* Description: Check if the component has any active instances
+==============================================================================*/
+static uint8_t is_comp_active(omx_core_component_t *p_core_comp)
+{
+  uint8_t i = 0;
+  for (i = 0; i < OMX_COMP_MAX_INSTANCES; i++) {
+    if (NULL != p_core_comp->handle[i]) {
+      return TRUE;
+    }
+  }
+  return FALSE;
+}
+
+/*==============================================================================
+* Function : OMX_FreeHandle
+* Parameters: hComp
+* Return Value : OMX_ERRORTYPE
+* Description: Deinit the omx component and remove it from the global list
+==============================================================================*/
+OMX_API OMX_ERRORTYPE OMX_APIENTRY OMX_FreeHandle(
+  OMX_IN OMX_HANDLETYPE hComp)
+{
+  OMX_ERRORTYPE rc = OMX_ErrorNone;
+  int comp_idx, inst_idx;
+  OMX_COMPONENTTYPE *p_comp = NULL;
+  omx_core_component_t *p_core_comp = NULL;
+
+  ALOGV("%s:%d] ", __func__, __LINE__);
+  if (hComp == NULL) {
+    return OMX_ErrorBadParameter;
+  }
+
+  p_comp = (OMX_COMPONENTTYPE *)hComp;
+  if (FALSE == get_idx_from_handle(hComp, &comp_idx, &inst_idx)) {
+    ALOGE("%s:%d] Error invalid component", __func__, __LINE__);
+    return OMX_ErrorInvalidComponent;
+  }
+
+  pthread_mutex_lock(&g_omxcore->core_lock);
+  //Deinit the component;
+  rc = p_comp->ComponentDeInit(hComp);
+  if (rc != OMX_ErrorNone) {
+    /* Remove the handle from the comp structure */
+    ALOGE("%s:%d] Error comp deinit failed", __func__, __LINE__);
+    pthread_mutex_unlock(&g_omxcore->core_lock);
+    return OMX_ErrorInvalidComponent;
+  }
+  p_core_comp = &g_omxcore->component[comp_idx];
+  p_core_comp->handle[inst_idx] = NULL;
+  if (!is_comp_active(p_core_comp)) {
+    rc = dlclose(p_core_comp->lib_handle);
+    p_core_comp->lib_handle = NULL;
+    p_core_comp->get_instance = NULL;
+    p_core_comp->create_comp_func = NULL;
+    p_core_comp->open = FALSE;
+  } else {
+    ALOGI("%s:%d] Error Component is still Active", __func__, __LINE__);
+  }
+  pthread_mutex_unlock(&g_omxcore->core_lock);
+  ALOGV("%s:%d] Success", __func__, __LINE__);
+  return rc;
+}
diff --git a/camera/mm-image-codec/qomx_core/qomx_core.h b/camera/mm-image-codec/qomx_core/qomx_core.h
new file mode 100644
index 0000000..f59f7cf
--- /dev/null
+++ b/camera/mm-image-codec/qomx_core/qomx_core.h
@@ -0,0 +1,96 @@
+/*Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+    * Redistributions of source code must retain the above copyright
+      notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+      copyright notice, this list of conditions and the following
+      disclaimer in the documentation and/or other materials provided
+      with the distribution.
+    * Neither the name of The Linux Foundation nor the names of its
+      contributors may be used to endorse or promote products derived
+      from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.*/
+
+#ifndef QOMX_CORE_H
+#define QOMX_CORE_H
+
+#include <stdio.h>
+#include <unistd.h>
+#include <malloc.h>
+#include <pthread.h>
+#include <dlfcn.h>
+#include <stdlib.h>
+#include "OMX_Component.h"
+
+#define TRUE 1
+#define FALSE 0
+#define OMX_COMP_MAX_INSTANCES 3
+#define OMX_CORE_MAX_ROLES 1
+#define OMX_COMP_MAX_NUM 3
+#define OMX_SPEC_VERSION 0x00000101
+
+typedef void *(*get_instance_t)(void);
+typedef void *(*create_comp_func_t)(OMX_PTR aobj);
+
+/** comp_info_t: Structure containing the mapping
+*    between the library name and the corresponding .so name
+*    @comp_name: name of the component
+     @lib_name: Name of the .so library
+**/
+typedef struct comp_info_t {
+  char *comp_name;
+  char *lib_name;
+} comp_info_t;
+
+/** omx_core_component_t: OMX Component structure
+*    @handle: array of number of instances of the component
+*    @roles: array of roles played by the component
+*    @comp_info: Component information such as libname,
+*              component name
+*    @open: Is the component active
+*    @lib_handle: Library handle after dlopen
+*    @obj_ptr: Function ptr to get the instance of the component
+*    @comp_func_ptr: Function ptr to map the functions in the
+*     OMX handle to its respective function implementation in
+*     the component
+**/
+typedef struct _omx_core_component_t {
+  OMX_HANDLETYPE *handle[OMX_COMP_MAX_INSTANCES];  //Instance handle
+  char *roles[OMX_CORE_MAX_ROLES];  //Roles played by the component
+  char *name;  //Component Name
+  uint8_t open;  //Is component active
+  void *lib_handle;
+  get_instance_t get_instance;
+  create_comp_func_t create_comp_func;
+  char *comp_name;
+  char *lib_name;
+} omx_core_component_t;
+
+/** omx_core_t: Global structure that contains all the active
+*   components
+*    @component: array of active components
+*    @is_initialized: Flag to check if the OMX core has been
+*    initialized
+*    @core_lock: Lock to syncronize the omx core operations
+**/
+typedef struct _omx_core_t {
+  omx_core_component_t component[OMX_COMP_MAX_NUM];  //Array of pointers to components
+  int comp_cnt;
+  pthread_mutex_t core_lock;
+} omx_core_t;
+
+#endif
diff --git a/device.mk b/device.mk
index 334ab6b..04b7126 100644
--- a/device.mk
+++ b/device.mk
@@ -21,7 +21,11 @@
 
 
 ifeq ($(TARGET_PREBUILT_KERNEL),)
+ifeq ($(USE_SVELTE_KERNEL),true)
+LOCAL_KERNEL := device/lge/hammerhead_svelte-kernel/zImage-dtb
+else
 LOCAL_KERNEL := device/lge/hammerhead-kernel/zImage-dtb
+endif
 else
 LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
 endif
@@ -56,6 +60,9 @@
     device/lge/hammerhead/mixer_paths.xml:system/etc/mixer_paths.xml
 
 PRODUCT_COPY_FILES += \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/lge/hammerhead/media_codecs.xml:system/etc/media_codecs.xml \
     device/lge/hammerhead/media_profiles.xml:system/etc/media_profiles.xml
 
@@ -83,7 +90,8 @@
     frameworks/native/data/etc/android.hardware.telephony.gsm.xml:system/etc/permissions/android.hardware.telephony.gsm.xml \
     frameworks/native/data/etc/android.hardware.audio.low_latency.xml:system/etc/permissions/android.hardware.audio.low_latency.xml \
     frameworks/native/data/etc/android.hardware.bluetooth_le.xml:system/etc/permissions/android.hardware.bluetooth_le.xml \
-    frameworks/native/data/etc/android.hardware.telephony.cdma.xml:system/etc/permissions/android.hardware.telephony.cdma.xml
+    frameworks/native/data/etc/android.hardware.telephony.cdma.xml:system/etc/permissions/android.hardware.telephony.cdma.xml \
+    frameworks/native/data/etc/android.hardware.ethernet.xml:system/etc/permissions/android.hardware.ethernet.xml
 
 # For GPS
 PRODUCT_COPY_FILES += \
@@ -97,7 +105,7 @@
     device/lge/hammerhead/nfc/libnfc-brcm-20791b05.conf:system/etc/libnfc-brcm-20791b05.conf
 
 PRODUCT_COPY_FILES += \
-    device/lge/hammerhead/thermal-engine-hammerhead.conf:system/etc/thermal-engine.conf
+    device/lge/hammerhead/thermal-engine-8974.conf:system/etc/thermal-engine-8974.conf
 
 PRODUCT_TAGS += dalvik.gc.type-precise
 
@@ -186,7 +194,9 @@
     libgeofence \
     libgps.utils \
     gps.msm8974 \
-    flp.msm8974
+    flp.msm8974 \
+    liblbs_core \
+    flp.conf
 
 # NFC packages
 PRODUCT_PACKAGES += \
@@ -209,12 +219,14 @@
 
 # for off charging mode
 PRODUCT_PACKAGES += \
-    charger \
     charger_res_images
 
 PRODUCT_PACKAGES += \
     bdAddrLoader
 
+PRODUCT_PACKAGES += \
+    power.hammerhead
+
 PRODUCT_PROPERTY_OVERRIDES += \
     ro.opengles.version=196608
 
@@ -296,14 +308,18 @@
 PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
     persist.radio.use_cc_names=true
 
+# If data_no_toggle is 1 then active and dormancy enable at all times.
+# If data_no_toggle is 0 there are no reports if the screen is off.
+PRODUCT_PROPERTY_OVERRIDES += \
+    persist.radio.data_no_toggle=1
+
 # Audio Configuration
 PRODUCT_PROPERTY_OVERRIDES += \
     persist.audio.handset.mic.type=digital \
     persist.audio.dualmic.config=endfire \
     persist.audio.fluence.voicecall=true \
     persist.audio.fluence.voicerec=false \
-    persist.audio.fluence.speaker=false \
-    af.resampler.quality=4
+    persist.audio.fluence.speaker=false
 
 # Setup custom emergency number list based on the MCC. This is needed by RIL
 PRODUCT_PROPERTY_OVERRIDES += \
@@ -330,6 +346,18 @@
 PRODUCT_PROPERTY_OVERRIDES += \
     ro.input.noresample=1
 
+# Modem debugger
+ifneq (,$(filter userdebug eng, $(TARGET_BUILD_VARIANT)))
+PRODUCT_PACKAGES += \
+    QXDMLogger
+
+PRODUCT_COPY_FILES += \
+    device/lge/hammerhead/init.hammerhead.diag.rc.userdebug:root/init.hammerhead.diag.rc
+else
+PRODUCT_COPY_FILES += \
+    device/lge/hammerhead/init.hammerhead.diag.rc.user:root/init.hammerhead.diag.rc
+endif
+
 # setup dalvik vm configs.
 $(call inherit-product, frameworks/native/build/phone-xhdpi-2048-dalvik-heap.mk)
 
diff --git a/init.hammerhead.diag.rc.user b/init.hammerhead.diag.rc.user
new file mode 100644
index 0000000..9d68de7
--- /dev/null
+++ b/init.hammerhead.diag.rc.user
@@ -0,0 +1 @@
+# This file gets copied as /init.shamu.diag.rc
diff --git a/init.hammerhead.diag.rc.userdebug b/init.hammerhead.diag.rc.userdebug
new file mode 100644
index 0000000..eaa9fe6
--- /dev/null
+++ b/init.hammerhead.diag.rc.userdebug
@@ -0,0 +1,6 @@
+# This file gets copied as /init.shamu.diag.rc
+
+on boot
+    # Modem logging collection
+    mkdir /data/diag_logs 0777 system system
+    chmod 666 /dev/diag
diff --git a/init.hammerhead.rc b/init.hammerhead.rc
index 33d7938..edd9b82 100644
--- a/init.hammerhead.rc
+++ b/init.hammerhead.rc
@@ -15,6 +15,7 @@
 #
 
 import init.hammerhead.usb.rc
+import init.hammerhead_svelte.rc
 
 on early-init
     mount debugfs debugfs /sys/kernel/debug
@@ -43,7 +44,6 @@
     mount_all ./fstab.hammerhead
     restorecon_recursive /persist
     setprop ro.crypto.fuse_sdcard true
-    write /sys/kernel/boot_adsp/boot 1
 
 on early-boot
     # set RLIMIT_MEMLOCK to 64MB
@@ -145,6 +145,8 @@
     chmod 664 /sys/class/leds/lcd-backlight/brightness
 
 on post-fs-data
+    write /sys/kernel/boot_adsp/boot 1
+
     mkdir /data/media 0770 media_rw media_rw
 
     # NFC: create data/nfc for nv storage
@@ -289,6 +291,12 @@
     # enable slimport detection
     write /sys/module/slimport/parameters/enable_irq 1
 
+    # enable logging of wake up reasons to kernel logs
+    write /sys/module/msm_show_resume_irq/parameters/debug_mask 1
+
+on property:ro.debuggable=1
+    start ssr_ramdump
+
 service rmt_storage /system/bin/rmt_storage
     class core
     user root
@@ -322,8 +330,7 @@
 
 service p2p_supplicant /system/bin/wpa_supplicant \
     -iwlan0 -Dnl80211 -c/data/misc/wifi/wpa_supplicant.conf \
-    -I/system/etc/wifi/wpa_supplicant_overlay.conf \
-    -O/data/misc/wifi/sockets -N \
+    -I/system/etc/wifi/wpa_supplicant_overlay.conf -N \
     -ip2p0 -Dnl80211 -c/data/misc/wifi/p2p_supplicant.conf \
     -I/system/etc/wifi/p2p_supplicant_overlay.conf \
     -puse_p2p_group_interface=1 -e/data/misc/wifi/entropy.bin \
@@ -340,7 +347,6 @@
 service wpa_supplicant /system/bin/wpa_supplicant \
     -iwlan0 -Dnl80211 -c/data/misc/wifi/wpa_supplicant.conf \
     -I/system/etc/wifi/wpa_supplicant_overlay.conf \
-    -O/data/misc/wifi/sockets \
     -e/data/misc/wifi/entropy.bin -g@android:wpa_wlan0
     #   we will start as root and wpa_supplicant will switch to user wifi
     #   after setting up the capabilities required for WEXT
@@ -371,6 +377,16 @@
     disabled
     oneshot
 
+service dhcpcd_eth0 /system/bin/dhcpcd -aABDKL
+    class late_start
+    disabled
+    oneshot
+
+service iprenew_eth0 /system/bin/dhcpcd -n
+    class late_start
+    disabled
+    oneshot
+
 service dhcpcd_bnep0 /system/bin/dhcpcd -BKLG
     disabled
     oneshot
@@ -400,16 +416,17 @@
     oneshot
 
 service qseecomd /system/bin/qseecomd
-    class late_start
-    user system
-    group system
+    class core
+    user root
+    group root
 
 # virtual sdcard daemon running as media_rw (1023)
 service sdcard /system/bin/sdcard -u 1023 -g 1023 -l /data/media /mnt/shell/emulated
     class late_start
 
-service charger /charger
+service charger /sbin/healthd -c
     class charger
+    critical
     seclabel u:r:healthd:s0
 
 service qcamerasvr /system/bin/mm-qcamera-daemon
@@ -439,9 +456,9 @@
 
 service ssr_ramdump /system/bin/subsystem_ramdump -m -t emmc
     class main
-    disabled
     user root
     group root
+    disabled
 
 service thermal-engine /system/bin/thermal-engine-hh
    class main
diff --git a/init.hammerhead.usb.rc b/init.hammerhead.usb.rc
index 6fdaf22..b184663 100644
--- a/init.hammerhead.usb.rc
+++ b/init.hammerhead.usb.rc
@@ -18,9 +18,9 @@
     write /sys/class/android_usb/android0/f_rndis/wceis 1
 
 on boot
-    write /sys/class/android_usb/android0/iSerial $ro.serialno
-    write /sys/class/android_usb/android0/iManufacturer $ro.product.manufacturer
-    write /sys/class/android_usb/android0/iProduct $ro.product.model
+    write /sys/class/android_usb/android0/iSerial ${ro.serialno}
+    write /sys/class/android_usb/android0/iManufacturer ${ro.product.manufacturer}
+    write /sys/class/android_usb/android0/iProduct ${ro.product.model}
 
 # MTP
 on property:sys.usb.config=mtp
diff --git a/kernel-headers/media/msmb_isp.h b/kernel-headers/media/msmb_isp.h
index 98cbcb3..d19d92c 100644
--- a/kernel-headers/media/msmb_isp.h
+++ b/kernel-headers/media/msmb_isp.h
@@ -183,249 +183,251 @@
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
  STOP_STREAM,
  START_STREAM,
+ STOP_IMMEDIATELY,
 };
-struct msm_vfe_axi_stream_cfg_cmd {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_vfe_axi_stream_cfg_cmd {
  uint8_t num_streams;
  uint32_t stream_handle[MAX_NUM_STREAM];
  enum msm_vfe_axi_stream_cmd cmd;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 enum msm_vfe_axi_stream_update_type {
  ENABLE_STREAM_BUF_DIVERT,
  DISABLE_STREAM_BUF_DIVERT,
- UPDATE_STREAM_FRAMEDROP_PATTERN,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ UPDATE_STREAM_FRAMEDROP_PATTERN,
  UPDATE_STREAM_REQUEST_FRAMES,
 };
 struct msm_vfe_axi_stream_update_cmd {
- uint32_t stream_handle;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t stream_handle;
  enum msm_vfe_axi_stream_update_type update_type;
  enum msm_vfe_frame_skip_pattern skip_pattern;
  uint32_t request_frm_num;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 enum msm_isp_stats_type {
  MSM_ISP_STATS_AEC,
  MSM_ISP_STATS_AF,
- MSM_ISP_STATS_AWB,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ MSM_ISP_STATS_AWB,
  MSM_ISP_STATS_RS,
  MSM_ISP_STATS_CS,
  MSM_ISP_STATS_IHIST,
- MSM_ISP_STATS_SKIN,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ MSM_ISP_STATS_SKIN,
  MSM_ISP_STATS_BG,
  MSM_ISP_STATS_BF,
  MSM_ISP_STATS_BE,
- MSM_ISP_STATS_BHIST,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ MSM_ISP_STATS_BHIST,
  MSM_ISP_STATS_MAX
 };
 struct msm_vfe_stats_stream_request_cmd {
- uint32_t session_id;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t session_id;
  uint32_t stream_id;
  enum msm_isp_stats_type stats_type;
  uint32_t composite_flag;
- uint32_t framedrop_pattern;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t framedrop_pattern;
  uint32_t irq_subsample_pattern;
  uint32_t buffer_offset;
  uint32_t stream_handle;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 struct msm_vfe_stats_stream_release_cmd {
  uint32_t stream_handle;
 };
-struct msm_vfe_stats_stream_cfg_cmd {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_vfe_stats_stream_cfg_cmd {
  uint8_t num_streams;
  uint32_t stream_handle[MSM_ISP_STATS_MAX];
  uint8_t enable;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 enum msm_vfe_reg_cfg_type {
  VFE_WRITE,
  VFE_WRITE_MB,
- VFE_READ,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ VFE_READ,
  VFE_CFG_MASK,
  VFE_WRITE_DMI_16BIT,
  VFE_WRITE_DMI_32BIT,
- VFE_WRITE_DMI_64BIT,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ VFE_WRITE_DMI_64BIT,
  VFE_READ_DMI_16BIT,
  VFE_READ_DMI_32BIT,
  VFE_READ_DMI_64BIT,
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 struct msm_vfe_cfg_cmd2 {
  uint16_t num_cfg;
  uint16_t cmd_len;
- void __user *cfg_data;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ void __user *cfg_data;
  void __user *cfg_cmd;
 };
 struct msm_vfe_reg_rw_info {
- uint32_t reg_offset;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t reg_offset;
  uint32_t cmd_data_offset;
  uint32_t len;
 };
-struct msm_vfe_reg_mask_info {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_vfe_reg_mask_info {
  uint32_t reg_offset;
  uint32_t mask;
  uint32_t val;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 struct msm_vfe_reg_dmi_info {
  uint32_t hi_tbl_offset;
  uint32_t lo_tbl_offset;
- uint32_t len;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t len;
 };
 struct msm_vfe_reg_cfg_cmd {
  union {
- struct msm_vfe_reg_rw_info rw_info;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct msm_vfe_reg_rw_info rw_info;
  struct msm_vfe_reg_mask_info mask_info;
  struct msm_vfe_reg_dmi_info dmi_info;
  } u;
- enum msm_vfe_reg_cfg_type cmd_type;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum msm_vfe_reg_cfg_type cmd_type;
 };
 enum msm_isp_buf_type {
  ISP_PRIVATE_BUF,
- ISP_SHARE_BUF,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ ISP_SHARE_BUF,
  MAX_ISP_BUF_TYPE,
 };
 struct msm_isp_buf_request {
- uint32_t session_id;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t session_id;
  uint32_t stream_id;
  uint8_t num_buf;
  uint32_t handle;
- enum msm_isp_buf_type buf_type;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ enum msm_isp_buf_type buf_type;
 };
 struct msm_isp_qbuf_info {
  uint32_t handle;
- int buf_idx;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ int buf_idx;
  struct v4l2_buffer buffer;
  uint32_t dirty_buf;
 };
-struct msm_vfe_axi_src_state {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_vfe_axi_src_state {
  enum msm_vfe_input_src input_src;
  uint32_t src_active;
 };
-enum msm_isp_event_idx {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+enum msm_isp_event_idx {
  ISP_REG_UPDATE = 0,
  ISP_START_ACK = 1,
  ISP_STOP_ACK = 2,
- ISP_IRQ_VIOLATION = 3,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ ISP_IRQ_VIOLATION = 3,
  ISP_WM_BUS_OVERFLOW = 4,
  ISP_STATS_OVERFLOW = 5,
  ISP_CAMIF_ERROR = 6,
- ISP_SOF = 7,
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ ISP_SOF = 7,
  ISP_EOF = 8,
  ISP_FRAME_DROP = 9,
  ISP_EVENT_MAX = 10
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 #define ISP_EVENT_OFFSET 8
 #define ISP_EVENT_BASE (V4L2_EVENT_PRIVATE_START)
 #define ISP_BUF_EVENT_BASE (ISP_EVENT_BASE + (1 << ISP_EVENT_OFFSET))
-#define ISP_STATS_EVENT_BASE (ISP_EVENT_BASE + (2 << ISP_EVENT_OFFSET))
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ISP_STATS_EVENT_BASE (ISP_EVENT_BASE + (2 << ISP_EVENT_OFFSET))
 #define ISP_EVENT_REG_UPDATE (ISP_EVENT_BASE + ISP_REG_UPDATE)
 #define ISP_EVENT_START_ACK (ISP_EVENT_BASE + ISP_START_ACK)
 #define ISP_EVENT_STOP_ACK (ISP_EVENT_BASE + ISP_STOP_ACK)
-#define ISP_EVENT_IRQ_VIOLATION (ISP_EVENT_BASE + ISP_IRQ_VIOLATION)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ISP_EVENT_IRQ_VIOLATION (ISP_EVENT_BASE + ISP_IRQ_VIOLATION)
 #define ISP_EVENT_WM_BUS_OVERFLOW (ISP_EVENT_BASE + ISP_WM_BUS_OVERFLOW)
 #define ISP_EVENT_STATS_OVERFLOW (ISP_EVENT_BASE + ISP_STATS_OVERFLOW)
 #define ISP_EVENT_CAMIF_ERROR (ISP_EVENT_BASE + ISP_CAMIF_ERROR)
-#define ISP_EVENT_SOF (ISP_EVENT_BASE + ISP_SOF)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ISP_EVENT_SOF (ISP_EVENT_BASE + ISP_SOF)
 #define ISP_EVENT_EOF (ISP_EVENT_BASE + ISP_EOF)
 #define ISP_EVENT_FRAME_DROP (ISP_EVENT_BASE + ISP_FRAME_DROP)
 #define ISP_EVENT_BUF_DIVERT (ISP_BUF_EVENT_BASE)
-#define ISP_EVENT_STATS_NOTIFY (ISP_STATS_EVENT_BASE)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define ISP_EVENT_STATS_NOTIFY (ISP_STATS_EVENT_BASE)
 #define ISP_EVENT_COMP_STATS_NOTIFY (ISP_EVENT_STATS_NOTIFY + MSM_ISP_STATS_MAX)
 struct msm_isp_buf_event {
  uint32_t session_id;
- uint32_t stream_id;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t stream_id;
  uint32_t handle;
  int8_t buf_idx;
 };
-struct msm_isp_stats_event {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_isp_stats_event {
  uint32_t stats_mask;
  uint8_t stats_buf_idxs[MSM_ISP_STATS_MAX];
 };
-struct msm_isp_stream_ack {
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+struct msm_isp_stream_ack {
  uint32_t session_id;
  uint32_t stream_id;
  uint32_t handle;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 struct msm_isp_event_data {
  struct timeval timestamp;
  struct timeval mono_timestamp;
- uint32_t frame_id;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t frame_id;
  union {
  struct msm_isp_stream_ack stream_ack;
  enum msm_vfe_input_src input_src;
- struct msm_isp_stats_event stats;
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct msm_isp_stats_event stats;
  uint32_t irq_status_mask;
  struct msm_isp_buf_event buf_done;
  } u;
-};
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
 #define V4L2_PIX_FMT_QBGGR8 v4l2_fourcc('Q', 'B', 'G', '8')
 #define V4L2_PIX_FMT_QGBRG8 v4l2_fourcc('Q', 'G', 'B', '8')
 #define V4L2_PIX_FMT_QGRBG8 v4l2_fourcc('Q', 'G', 'R', '8')
-#define V4L2_PIX_FMT_QRGGB8 v4l2_fourcc('Q', 'R', 'G', '8')
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define V4L2_PIX_FMT_QRGGB8 v4l2_fourcc('Q', 'R', 'G', '8')
 #define V4L2_PIX_FMT_QBGGR10 v4l2_fourcc('Q', 'B', 'G', '0')
 #define V4L2_PIX_FMT_QGBRG10 v4l2_fourcc('Q', 'G', 'B', '0')
 #define V4L2_PIX_FMT_QGRBG10 v4l2_fourcc('Q', 'G', 'R', '0')
-#define V4L2_PIX_FMT_QRGGB10 v4l2_fourcc('Q', 'R', 'G', '0')
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define V4L2_PIX_FMT_QRGGB10 v4l2_fourcc('Q', 'R', 'G', '0')
 #define V4L2_PIX_FMT_QBGGR12 v4l2_fourcc('Q', 'B', 'G', '2')
 #define V4L2_PIX_FMT_QGBRG12 v4l2_fourcc('Q', 'G', 'B', '2')
 #define V4L2_PIX_FMT_QGRBG12 v4l2_fourcc('Q', 'G', 'R', '2')
-#define V4L2_PIX_FMT_QRGGB12 v4l2_fourcc('Q', 'R', 'G', '2')
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define V4L2_PIX_FMT_QRGGB12 v4l2_fourcc('Q', 'R', 'G', '2')
 #define VIDIOC_MSM_VFE_REG_CFG   _IOWR('V', BASE_VIDIOC_PRIVATE, struct msm_vfe_cfg_cmd2)
 #define VIDIOC_MSM_ISP_REQUEST_BUF   _IOWR('V', BASE_VIDIOC_PRIVATE+1, struct msm_isp_buf_request)
 #define VIDIOC_MSM_ISP_ENQUEUE_BUF   _IOWR('V', BASE_VIDIOC_PRIVATE+2, struct msm_isp_qbuf_info)
-#define VIDIOC_MSM_ISP_RELEASE_BUF   _IOWR('V', BASE_VIDIOC_PRIVATE+3, struct msm_isp_buf_request)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define VIDIOC_MSM_ISP_RELEASE_BUF   _IOWR('V', BASE_VIDIOC_PRIVATE+3, struct msm_isp_buf_request)
 #define VIDIOC_MSM_ISP_REQUEST_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+4, struct msm_vfe_axi_stream_request_cmd)
 #define VIDIOC_MSM_ISP_CFG_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+5, struct msm_vfe_axi_stream_cfg_cmd)
 #define VIDIOC_MSM_ISP_RELEASE_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+6, struct msm_vfe_axi_stream_release_cmd)
-#define VIDIOC_MSM_ISP_INPUT_CFG   _IOWR('V', BASE_VIDIOC_PRIVATE+7, struct msm_vfe_input_cfg)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define VIDIOC_MSM_ISP_INPUT_CFG   _IOWR('V', BASE_VIDIOC_PRIVATE+7, struct msm_vfe_input_cfg)
 #define VIDIOC_MSM_ISP_SET_SRC_STATE   _IOWR('V', BASE_VIDIOC_PRIVATE+8, struct msm_vfe_axi_src_state)
 #define VIDIOC_MSM_ISP_REQUEST_STATS_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+9,   struct msm_vfe_stats_stream_request_cmd)
 #define VIDIOC_MSM_ISP_CFG_STATS_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+10, struct msm_vfe_stats_stream_cfg_cmd)
-#define VIDIOC_MSM_ISP_RELEASE_STATS_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+11,   struct msm_vfe_stats_stream_release_cmd)
 /* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define VIDIOC_MSM_ISP_RELEASE_STATS_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+11,   struct msm_vfe_stats_stream_release_cmd)
 #define VIDIOC_MSM_ISP_UPDATE_STREAM   _IOWR('V', BASE_VIDIOC_PRIVATE+13, struct msm_vfe_axi_stream_update_cmd)
 #define VIDIOC_MSM_ISP_CONFIG_DONE   _IOWR('V', BASE_VIDIOC_PRIVATE+14, int)
 #endif
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
diff --git a/kernel-headers/sound/compress_offload.h b/kernel-headers/sound/compress_offload.h
new file mode 100644
index 0000000..c1f39ec
--- /dev/null
+++ b/kernel-headers/sound/compress_offload.h
@@ -0,0 +1,116 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ ***   This header was automatically generated from a Linux kernel header
+ ***   of the same name, to make information necessary for userspace to
+ ***   call into the kernel available to libc.  It contains only constants,
+ ***   structures, and macros generated from the original header, and thus,
+ ***   contains no copyrightable information.
+ ***
+ ***   To edit the content of this header, modify the corresponding
+ ***   source file (e.g. under external/kernel-headers/original/) then
+ ***   run bionic/libc/kernel/tools/update_all.py
+ ***
+ ***   Any manual change here will be lost the next time this script will
+ ***   be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef __COMPRESS_OFFLOAD_H
+#define __COMPRESS_OFFLOAD_H
+#include <linux/types.h>
+#include <sound/asound.h>
+
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#include <sound/compress_params.h>
+#define SNDRV_COMPRESS_VERSION SNDRV_PROTOCOL_VERSION(0, 1, 1)
+struct snd_compressed_buffer {
+ __u32 fragment_size;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 fragments;
+};
+struct snd_compr_params {
+ struct snd_compressed_buffer buffer;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct snd_codec codec;
+ __u8 no_wake_mode;
+};
+struct snd_compr_tstamp {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 byte_offset;
+ __u32 copied_total;
+ snd_pcm_uframes_t pcm_frames;
+ snd_pcm_uframes_t pcm_io_frames;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 sampling_rate;
+ uint64_t timestamp;
+};
+struct snd_compr_avail {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u64 avail;
+ struct snd_compr_tstamp tstamp;
+};
+enum snd_compr_direction {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ SND_COMPRESS_PLAYBACK = 0,
+ SND_COMPRESS_CAPTURE
+};
+struct snd_compr_caps {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 num_codecs;
+ __u32 direction;
+ __u32 min_fragment_size;
+ __u32 max_fragment_size;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 min_fragments;
+ __u32 max_fragments;
+ __u32 codecs[MAX_NUM_CODECS];
+ __u32 reserved[11];
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+struct snd_compr_codec_caps {
+ __u32 codec;
+ __u32 num_descriptors;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct snd_codec_desc descriptor[MAX_NUM_CODEC_DESCRIPTORS];
+};
+enum {
+ SNDRV_COMPRESS_ENCODER_PADDING = 1,
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ SNDRV_COMPRESS_ENCODER_DELAY = 2,
+};
+struct snd_compr_metadata {
+ __u32 key;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 value[8];
+};
+struct snd_compr_audio_info {
+ uint32_t frame_size;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ uint32_t reserved[15];
+};
+#define SNDRV_COMPRESS_IOCTL_VERSION _IOR('C', 0x00, int)
+#define SNDRV_COMPRESS_GET_CAPS _IOWR('C', 0x10, struct snd_compr_caps)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SNDRV_COMPRESS_GET_CODEC_CAPS _IOWR('C', 0x11,  struct snd_compr_codec_caps)
+#define SNDRV_COMPRESS_SET_PARAMS _IOW('C', 0x12, struct snd_compr_params)
+#define SNDRV_COMPRESS_GET_PARAMS _IOR('C', 0x13, struct snd_codec)
+#define SNDRV_COMPRESS_SET_METADATA _IOW('C', 0x14,  struct snd_compr_metadata)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SNDRV_COMPRESS_GET_METADATA _IOWR('C', 0x15,  struct snd_compr_metadata)
+#define SNDRV_COMPRESS_TSTAMP _IOR('C', 0x20, struct snd_compr_tstamp)
+#define SNDRV_COMPRESS_AVAIL _IOR('C', 0x21, struct snd_compr_avail)
+#define SNDRV_COMPRESS_PAUSE _IO('C', 0x30)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SNDRV_COMPRESS_RESUME _IO('C', 0x31)
+#define SNDRV_COMPRESS_START _IO('C', 0x32)
+#define SNDRV_COMPRESS_STOP _IO('C', 0x33)
+#define SNDRV_COMPRESS_DRAIN _IO('C', 0x34)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SNDRV_COMPRESS_NEXT_TRACK _IO('C', 0x35)
+#define SNDRV_COMPRESS_PARTIAL_DRAIN _IO('C', 0x36)
+#define SND_COMPR_TRIGGER_DRAIN 7
+#define SND_COMPR_TRIGGER_NEXT_TRACK 8
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_COMPR_TRIGGER_PARTIAL_DRAIN 9
+#endif
diff --git a/kernel-headers/sound/compress_params.h b/kernel-headers/sound/compress_params.h
new file mode 100644
index 0000000..f4e520c
--- /dev/null
+++ b/kernel-headers/sound/compress_params.h
@@ -0,0 +1,265 @@
+/****************************************************************************
+ ****************************************************************************
+ ***
+ ***   This header was automatically generated from a Linux kernel header
+ ***   of the same name, to make information necessary for userspace to
+ ***   call into the kernel available to libc.  It contains only constants,
+ ***   structures, and macros generated from the original header, and thus,
+ ***   contains no copyrightable information.
+ ***
+ ***   To edit the content of this header, modify the corresponding
+ ***   source file (e.g. under external/kernel-headers/original/) then
+ ***   run bionic/libc/kernel/tools/update_all.py
+ ***
+ ***   Any manual change here will be lost the next time this script will
+ ***   be run. You've been warned!
+ ***
+ ****************************************************************************
+ ****************************************************************************/
+#ifndef __SND_COMPRESS_PARAMS_H
+#define __SND_COMPRESS_PARAMS_H
+#define MAX_NUM_CODECS 32
+#define MAX_NUM_CODEC_DESCRIPTORS 32
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define MAX_NUM_BITRATES 32
+#define MAX_NUM_FRAMES_PER_BUFFER 1
+#define COMPRESSED_META_DATA_MODE 0x10
+#define META_DATA_LEN_BYTES 36
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define Q6_AC3_DECODER 0x00010BF6
+#define Q6_EAC3_DECODER 0x00010C3C
+#define Q6_DTS 0x00010D88
+#define Q6_DTS_LBR 0x00010DBB
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_PCM ((__u32) 0x00000001)
+#define SND_AUDIOCODEC_MP3 ((__u32) 0x00000002)
+#define SND_AUDIOCODEC_AMR ((__u32) 0x00000003)
+#define SND_AUDIOCODEC_AMRWB ((__u32) 0x00000004)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_AMRWBPLUS ((__u32) 0x00000005)
+#define SND_AUDIOCODEC_AAC ((__u32) 0x00000006)
+#define SND_AUDIOCODEC_WMA ((__u32) 0x00000007)
+#define SND_AUDIOCODEC_REAL ((__u32) 0x00000008)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_VORBIS ((__u32) 0x00000009)
+#define SND_AUDIOCODEC_FLAC ((__u32) 0x0000000A)
+#define SND_AUDIOCODEC_IEC61937 ((__u32) 0x0000000B)
+#define SND_AUDIOCODEC_G723_1 ((__u32) 0x0000000C)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_G729 ((__u32) 0x0000000D)
+#define SND_AUDIOCODEC_AC3 ((__u32) 0x0000000E)
+#define SND_AUDIOCODEC_DTS ((__u32) 0x0000000F)
+#define SND_AUDIOCODEC_AC3_PASS_THROUGH ((__u32) 0x00000010)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_WMA_PRO ((__u32) 0x00000011)
+#define SND_AUDIOCODEC_DTS_PASS_THROUGH ((__u32) 0x00000012)
+#define SND_AUDIOCODEC_DTS_LBR ((__u32) 0x00000013)
+#define SND_AUDIOCODEC_DTS_TRANSCODE_LOOPBACK ((__u32) 0x00000014)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCODEC_MAX SND_AUDIOCODEC_DTS_TRANSCODE_LOOPBACK
+#define SND_AUDIOPROFILE_PCM ((__u32) 0x00000001)
+#define SND_AUDIOCHANMODE_MP3_MONO ((__u32) 0x00000001)
+#define SND_AUDIOCHANMODE_MP3_STEREO ((__u32) 0x00000002)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOCHANMODE_MP3_JOINTSTEREO ((__u32) 0x00000004)
+#define SND_AUDIOCHANMODE_MP3_DUAL ((__u32) 0x00000008)
+#define SND_AUDIOPROFILE_AMR ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AMR_DTX_OFF ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_AMR_VAD1 ((__u32) 0x00000002)
+#define SND_AUDIOMODE_AMR_VAD2 ((__u32) 0x00000004)
+#define SND_AUDIOSTREAMFORMAT_UNDEFINED ((__u32) 0x00000000)
+#define SND_AUDIOSTREAMFORMAT_CONFORMANCE ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOSTREAMFORMAT_IF1 ((__u32) 0x00000002)
+#define SND_AUDIOSTREAMFORMAT_IF2 ((__u32) 0x00000004)
+#define SND_AUDIOSTREAMFORMAT_FSF ((__u32) 0x00000008)
+#define SND_AUDIOSTREAMFORMAT_RTPPAYLOAD ((__u32) 0x00000010)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOSTREAMFORMAT_ITU ((__u32) 0x00000020)
+#define SND_AUDIOPROFILE_AMRWB ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AMRWB_DTX_OFF ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AMRWB_VAD1 ((__u32) 0x00000002)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_AMRWB_VAD2 ((__u32) 0x00000004)
+#define SND_AUDIOPROFILE_AMRWBPLUS ((__u32) 0x00000001)
+#define SND_AUDIOPROFILE_AAC ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AAC_MAIN ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_AAC_LC ((__u32) 0x00000002)
+#define SND_AUDIOMODE_AAC_SSR ((__u32) 0x00000004)
+#define SND_AUDIOMODE_AAC_LTP ((__u32) 0x00000008)
+#define SND_AUDIOMODE_AAC_HE ((__u32) 0x00000010)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_AAC_SCALABLE ((__u32) 0x00000020)
+#define SND_AUDIOMODE_AAC_ERLC ((__u32) 0x00000040)
+#define SND_AUDIOMODE_AAC_LD ((__u32) 0x00000080)
+#define SND_AUDIOMODE_AAC_HE_PS ((__u32) 0x00000100)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_AAC_HE_MPS ((__u32) 0x00000200)
+#define SND_AUDIOSTREAMFORMAT_MP2ADTS ((__u32) 0x00000001)
+#define SND_AUDIOSTREAMFORMAT_MP4ADTS ((__u32) 0x00000002)
+#define SND_AUDIOSTREAMFORMAT_MP4LOAS ((__u32) 0x00000004)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOSTREAMFORMAT_MP4LATM ((__u32) 0x00000008)
+#define SND_AUDIOSTREAMFORMAT_ADIF ((__u32) 0x00000010)
+#define SND_AUDIOSTREAMFORMAT_MP4FF ((__u32) 0x00000020)
+#define SND_AUDIOSTREAMFORMAT_RAW ((__u32) 0x00000040)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOPROFILE_WMA7 ((__u32) 0x00000001)
+#define SND_AUDIOPROFILE_WMA8 ((__u32) 0x00000002)
+#define SND_AUDIOPROFILE_WMA9 ((__u32) 0x00000004)
+#define SND_AUDIOPROFILE_WMA10 ((__u32) 0x00000008)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_WMA_LEVEL1 ((__u32) 0x00000001)
+#define SND_AUDIOMODE_WMA_LEVEL2 ((__u32) 0x00000002)
+#define SND_AUDIOMODE_WMA_LEVEL3 ((__u32) 0x00000004)
+#define SND_AUDIOMODE_WMA_LEVEL4 ((__u32) 0x00000008)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_WMAPRO_LEVELM0 ((__u32) 0x00000010)
+#define SND_AUDIOMODE_WMAPRO_LEVELM1 ((__u32) 0x00000020)
+#define SND_AUDIOMODE_WMAPRO_LEVELM2 ((__u32) 0x00000040)
+#define SND_AUDIOMODE_WMAPRO_LEVELM3 ((__u32) 0x00000080)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOSTREAMFORMAT_WMA_ASF ((__u32) 0x00000001)
+#define SND_AUDIOSTREAMFORMAT_WMA_NOASF_HDR ((__u32) 0x00000002)
+#define SND_AUDIOPROFILE_REALAUDIO ((__u32) 0x00000001)
+#define SND_AUDIOMODE_REALAUDIO_G2 ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_REALAUDIO_8 ((__u32) 0x00000002)
+#define SND_AUDIOMODE_REALAUDIO_10 ((__u32) 0x00000004)
+#define SND_AUDIOMODE_REALAUDIO_SURROUND ((__u32) 0x00000008)
+#define SND_AUDIOPROFILE_VORBIS ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_VORBIS ((__u32) 0x00000001)
+#define SND_AUDIOPROFILE_FLAC ((__u32) 0x00000001)
+#define SND_AUDIOMODE_FLAC_LEVEL0 ((__u32) 0x00000001)
+#define SND_AUDIOMODE_FLAC_LEVEL1 ((__u32) 0x00000002)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_FLAC_LEVEL2 ((__u32) 0x00000004)
+#define SND_AUDIOMODE_FLAC_LEVEL3 ((__u32) 0x00000008)
+#define SND_AUDIOMODE_FLAC_LEVEL4 ((__u32) 0x00000010)
+#define SND_AUDIOMODE_FLAC_LEVEL5 ((__u32) 0x00000020)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_FLAC_LEVEL6 ((__u32) 0x00000040)
+#define SND_AUDIOMODE_FLAC_LEVEL7 ((__u32) 0x00000080)
+#define SND_AUDIOMODE_FLAC_LEVEL8 ((__u32) 0x00000100)
+#define SND_AUDIOSTREAMFORMAT_FLAC ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOSTREAMFORMAT_FLAC_OGG ((__u32) 0x00000002)
+#define SND_AUDIOPROFILE_IEC61937 ((__u32) 0x00000001)
+#define SND_AUDIOPROFILE_IEC61937_SPDIF ((__u32) 0x00000002)
+#define SND_AUDIOMODE_IEC_REF_STREAM_HEADER ((__u32) 0x00000000)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_IEC_LPCM ((__u32) 0x00000001)
+#define SND_AUDIOMODE_IEC_AC3 ((__u32) 0x00000002)
+#define SND_AUDIOMODE_IEC_MPEG1 ((__u32) 0x00000004)
+#define SND_AUDIOMODE_IEC_MP3 ((__u32) 0x00000008)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_IEC_MPEG2 ((__u32) 0x00000010)
+#define SND_AUDIOMODE_IEC_AACLC ((__u32) 0x00000020)
+#define SND_AUDIOMODE_IEC_DTS ((__u32) 0x00000040)
+#define SND_AUDIOMODE_IEC_ATRAC ((__u32) 0x00000080)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_IEC_SACD ((__u32) 0x00000100)
+#define SND_AUDIOMODE_IEC_EAC3 ((__u32) 0x00000200)
+#define SND_AUDIOMODE_IEC_DTS_HD ((__u32) 0x00000400)
+#define SND_AUDIOMODE_IEC_MLP ((__u32) 0x00000800)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_IEC_DST ((__u32) 0x00001000)
+#define SND_AUDIOMODE_IEC_WMAPRO ((__u32) 0x00002000)
+#define SND_AUDIOMODE_IEC_REF_CXT ((__u32) 0x00004000)
+#define SND_AUDIOMODE_IEC_HE_AAC ((__u32) 0x00008000)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_IEC_HE_AAC2 ((__u32) 0x00010000)
+#define SND_AUDIOMODE_IEC_MPEG_SURROUND ((__u32) 0x00020000)
+#define SND_AUDIOPROFILE_G723_1 ((__u32) 0x00000001)
+#define SND_AUDIOMODE_G723_1_ANNEX_A ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_G723_1_ANNEX_B ((__u32) 0x00000002)
+#define SND_AUDIOMODE_G723_1_ANNEX_C ((__u32) 0x00000004)
+#define SND_AUDIOPROFILE_G729 ((__u32) 0x00000001)
+#define SND_AUDIOMODE_G729_ANNEX_A ((__u32) 0x00000001)
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+#define SND_AUDIOMODE_G729_ANNEX_B ((__u32) 0x00000002)
+#define SND_RATECONTROLMODE_CONSTANTBITRATE ((__u32) 0x00000001)
+#define SND_RATECONTROLMODE_VARIABLEBITRATE ((__u32) 0x00000002)
+struct snd_enc_wma {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 super_block_align;
+ __u32 bits_per_sample;
+ __u32 channelmask;
+ __u32 encodeopt;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 encodeopt1;
+ __u32 encodeopt2;
+};
+struct snd_enc_vorbis {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __s32 quality;
+ __u32 managed;
+ __u32 max_bit_rate;
+ __u32 min_bit_rate;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 downmix;
+};
+struct snd_enc_real {
+ __u32 quant_bits;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 start_region;
+ __u32 num_regions;
+};
+struct snd_enc_flac {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 num;
+ __u32 gain;
+};
+struct snd_enc_generic {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 bw;
+ __s32 reserved[15];
+};
+union snd_codec_options {
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct snd_enc_wma wma;
+ struct snd_enc_vorbis vorbis;
+ struct snd_enc_real real;
+ struct snd_enc_flac flac;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ struct snd_enc_generic generic;
+};
+struct snd_codec_desc {
+ __u32 max_ch;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 sample_rates;
+ __u32 bit_rate[MAX_NUM_BITRATES];
+ __u32 num_bitrates;
+ __u32 rate_control;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 profiles;
+ __u32 modes;
+ __u32 formats;
+ __u32 min_buffer;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 reserved[15];
+};
+struct snd_codec {
+ __u32 id;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 ch_in;
+ __u32 ch_out;
+ __u32 sample_rate;
+ __u32 bit_rate;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 rate_control;
+ __u32 profile;
+ __u32 level;
+ __u32 ch_mode;
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+ __u32 format;
+ __u32 align;
+ union snd_codec_options options;
+ __u32 reserved[3];
+/* WARNING: DO NOT EDIT, AUTO-GENERATED CODE - SEE TOP FOR INSTRUCTIONS */
+};
+#endif
diff --git a/media_codecs.xml b/media_codecs.xml
index dd6114e..4584459 100644
--- a/media_codecs.xml
+++ b/media_codecs.xml
@@ -15,6 +15,8 @@
 -->
 
 <MediaCodecs>
+    <Include href="media_codecs_google_audio.xml" />
+    <Include href="media_codecs_google_telephony.xml" />
     <Encoders>
         <MediaCodec name="OMX.qcom.video.encoder.mpeg4" type="video/mp4v-es" >
             <Quirk name="requires-allocate-on-input-ports" />
@@ -36,21 +38,8 @@
             <Quirk name="requires-allocate-on-output-ports"/>
             <Quirk name="requires-loaded-to-idle-after-allocation"/>
         </MediaCodec>
-        <MediaCodec name="OMX.google.amrnb.encoder" type="audio/3gpp" />
-        <MediaCodec name="OMX.google.aac.encoder" type="audio/mp4a-latm" />
-        <MediaCodec name="OMX.google.amrwb.encoder" type="audio/amr-wb" />
-        <MediaCodec name="OMX.google.flac.encoder" type="audio/flac" />
-        <MediaCodec name="OMX.google.vp8.encoder" type="video/x-vnd.on2.vp8" />
     </Encoders>
     <Decoders>
-        <MediaCodec name="OMX.google.vorbis.decoder" type="audio/vorbis" />
-        <MediaCodec name="OMX.google.gsm.decoder" type="audio/gsm" />
-        <MediaCodec name="OMX.google.mp3.decoder" type="audio/mpeg" />
-        <MediaCodec name="OMX.google.amrnb.decoder" type="audio/3gpp" />
-        <MediaCodec name="OMX.google.amrwb.decoder" type="audio/amr-wb" />
-        <MediaCodec name="OMX.google.aac.decoder" type="audio/mp4a-latm" />
-        <MediaCodec name="OMX.google.g711.alaw.decoder" type="audio/g711-alaw" />
-        <MediaCodec name="OMX.google.g711.mlaw.decoder" type="audio/g711-mlaw" />
         <MediaCodec name="OMX.qcom.video.decoder.avc" type="video/avc" >
             <Quirk name="requires-allocate-on-input-ports" />
             <Quirk name="requires-allocate-on-output-ports"/>
@@ -71,10 +60,6 @@
             <Quirk name="requires-allocate-on-output-ports" />
             <Quirk name="defers-output-buffer-allocation" />
         </MediaCodec>
-        <MediaCodec name="OMX.google.vp8.decoder" type="video/x-vnd.on2.vp8" />
-        <MediaCodec name="OMX.google.vp9.decoder" type="video/x-vnd.on2.vp9" />
-        <MediaCodec name="OMX.google.h264.decoder" type="video/avc" />
-        <MediaCodec name="OMX.google.h263.decoder" type="video/3gpp" />
-        <MediaCodec name="OMX.google.mpeg4.decoder" type="video/mp4v-es" />
     </Decoders>
+    <Include href="media_codecs_google_video.xml" />
 </MediaCodecs>
diff --git a/mixer_paths.xml b/mixer_paths.xml
index 78fa53c..ef4e652 100644
--- a/mixer_paths.xml
+++ b/mixer_paths.xml
@@ -129,9 +129,11 @@
     <ctl name="AIF1_CAP Mixer SLIM TX8" value="0"/>
     <ctl name="CLASS_H_DSM MUX" value="ZERO" />
     <ctl name="SLIMBUS_0_RX Audio Mixer MultiMedia4" value="0" />
-    <ctl name="HDMI Mixer MultiMedia4" value="0" />
     <ctl name="SEC_AUX_PCM_RX Audio Mixer MultiMedia4" value="0" />
-
+    <ctl name="AUX PCM SampleRate" value="8000" />
+    <!-- echo reference -->
+    <ctl name="EC_REF_RX" value="NONE" />
+    <!-- echo reference end -->
 
     <!-- These are audio route (FE to BE) specific mixer settings -->
     <path name="deep-buffer-playback">
@@ -151,6 +153,11 @@
         <ctl name="SEC_AUX_PCM_RX Audio Mixer MultiMedia1" value="1" />
     </path>
 
+    <path name="deep-buffer-playback bt-sco-wb">
+        <ctl name="AUX PCM SampleRate" value="16000" />
+        <path name="deep-buffer-playback bt-sco" />
+    </path>
+
     <path name="low-latency-playback">
         <ctl name="SLIMBUS_0_RX Audio Mixer MultiMedia5" value="1" />
     </path>
@@ -163,6 +170,11 @@
         <ctl name="SEC_AUX_PCM_RX Audio Mixer MultiMedia5" value="1" />
     </path>
 
+    <path name="low-latency-playback bt-sco-wb">
+        <ctl name="AUX PCM SampleRate" value="16000" />
+        <path name="low-latency-playback bt-sco" />
+    </path>
+
     <path name="low-latency-playback speaker-and-hdmi">
         <path name="low-latency-playback hdmi" />
         <path name="low-latency-playback" />
@@ -184,6 +196,11 @@
         <ctl name="SEC_AUX_PCM_RX Audio Mixer MultiMedia4" value="1" />
     </path>
 
+    <path name="compress-offload-playback bt-sco-wb">
+        <ctl name="AUX PCM SampleRate" value="16000" />
+        <path name="compress-offload-playback bt-sco" />
+    </path>
+
     <path name="compress-offload-playback speaker-and-hdmi">
         <path name="compress-offload-playback hdmi" />
         <path name="compress-offload-playback" />
@@ -197,6 +214,11 @@
         <ctl name="MultiMedia1 Mixer SEC_AUX_PCM_UL_TX" value="1" />
     </path>
 
+    <path name="audio-record bt-sco-wb">
+        <ctl name="AUX PCM SampleRate" value="16000" />
+        <path name="audio-record bt-sco" />
+    </path>
+
     <path name="low-latency-record">
         <ctl name="MultiMedia5 Mixer SLIM_0_TX" value="1" />
     </path>
@@ -216,6 +238,11 @@
         <ctl name="Voice_Tx Mixer SEC_AUX_PCM_TX_Voice" value="1" />
     </path>
 
+    <path name="voice-call bt-sco-wb">
+        <ctl name="AUX PCM SampleRate" value="16000" />
+        <path name="voice-call bt-sco" />
+    </path>
+
     <!-- These are actual sound device specific mixer settings -->
     <path name="adc1">
         <ctl name="AIF1_CAP Mixer SLIM TX7" value="1"/>
@@ -391,6 +418,10 @@
         <ctl name="SLIM_0_TX Channels" value="Two" />
     </path>
 
+    <path name="echo-reference">
+        <ctl name="EC_REF_RX" value="SLIM_RX" />
+    </path>
+
     <path name="dmic-endfire">
         <path name="speaker-dmic-endfire" />
         <ctl name="IIR1 INP1 MUX" value="DEC6" />
@@ -476,4 +507,10 @@
         <path name="adc1" />
     </path>
 
+    <path name="bt-sco-mic-wb">
+    </path>
+
+    <path name="bt-sco-headset-wb">
+    </path>
+
 </mixer>
diff --git a/nfc/libnfc-brcm.conf b/nfc/libnfc-brcm.conf
index 4840a54..6f2a7c1 100644
--- a/nfc/libnfc-brcm.conf
+++ b/nfc/libnfc-brcm.conf
@@ -157,9 +157,9 @@
 ###############################################################################
 # When screen is turned off, specify the desired power state of the controller.
 # 0: power-off-sleep state; DEFAULT
-# 1: full-power state
+# 1: full-power state (note: this is still low-power ("snooze") on BRCM devices.
 # 2: screen-off card-emulation (CE4/CE3/CE1 modes are used)
-#SCREEN_OFF_POWER_STATE=0
+SCREEN_OFF_POWER_STATE=1
 
 ###############################################################################
 # SPD Debug mode
@@ -245,3 +245,13 @@
 #                   Technology A active mode
 #                   Technology F active mode
 #POLL_FREQUENCY={01:01:01:01:01:01:01:01}
+
+###############################################################################
+# Choose the presence-check algorithm for type-4 tag.  If not defined,
+# the default value is 1.
+# 0  NFA_RW_PRES_CHK_DEFAULT; Let stack selects an algorithm
+# 1  NFA_RW_PRES_CHK_I_BLOCK; ISO-DEP protocol's empty I-block
+# 2  NFA_RW_PRES_CHK_RESET; Deactivate to Sleep, then re-activate
+# 3  NFA_RW_PRES_CHK_RB_CH0; Type-4 tag protocol's ReadBinary command on channel 0
+# 4  NFA_RW_PRES_CHK_RB_CH3; Type-4 tag protocol's ReadBinary command on channel 3
+PRESENCE_CHECK_ALGORITHM=1
diff --git a/original-kernel-headers/media/msmb_isp.h b/original-kernel-headers/media/msmb_isp.h
index d7ce405..4dedb4d 100644
--- a/original-kernel-headers/media/msmb_isp.h
+++ b/original-kernel-headers/media/msmb_isp.h
@@ -154,6 +154,7 @@
 enum msm_vfe_axi_stream_cmd {
 	STOP_STREAM,
 	START_STREAM,
+	STOP_IMMEDIATELY,
 };
 
 struct msm_vfe_axi_stream_cfg_cmd {
diff --git a/original-kernel-headers/sound/compress_offload.h b/original-kernel-headers/sound/compress_offload.h
new file mode 100644
index 0000000..3b2dace
--- /dev/null
+++ b/original-kernel-headers/sound/compress_offload.h
@@ -0,0 +1,201 @@
+/*
+ *  compress_offload.h - compress offload header definations
+ *
+ *  Copyright (C) 2011 Intel Corporation
+ *  Authors:	Vinod Koul <vinod.koul@linux.intel.com>
+ *		Pierre-Louis Bossart <pierre-louis.bossart@linux.intel.com>
+ *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ *
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; version 2 of the License.
+ *
+ *  This program is distributed in the hope that it will be useful, but
+ *  WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License along
+ *  with this program; if not, write to the Free Software Foundation, Inc.,
+ *  59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
+ *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ *
+ */
+#ifndef __COMPRESS_OFFLOAD_H
+#define __COMPRESS_OFFLOAD_H
+
+#include <linux/types.h>
+#include <sound/asound.h>
+#include <sound/compress_params.h>
+
+
+#define SNDRV_COMPRESS_VERSION SNDRV_PROTOCOL_VERSION(0, 1, 1)
+/**
+ * struct snd_compressed_buffer: compressed buffer
+ * @fragment_size: size of buffer fragment in bytes
+ * @fragments: number of such fragments
+ */
+struct snd_compressed_buffer {
+	__u32 fragment_size;
+	__u32 fragments;
+};
+
+/**
+ * struct snd_compr_params: compressed stream params
+ * @buffer: buffer description
+ * @codec: codec parameters
+ * @no_wake_mode: dont wake on fragment elapsed
+ */
+struct snd_compr_params {
+	struct snd_compressed_buffer buffer;
+	struct snd_codec codec;
+	__u8 no_wake_mode;
+};
+
+/**
+ * struct snd_compr_tstamp: timestamp descriptor
+ * @byte_offset: Byte offset in ring buffer to DSP
+ * @copied_total: Total number of bytes copied from/to ring buffer to/by DSP
+ * @pcm_frames: Frames decoded or encoded by DSP. This field will evolve by
+ *	large steps and should only be used to monitor encoding/decoding
+ *	progress. It shall not be used for timing estimates.
+ * @pcm_io_frames: Frames rendered or received by DSP into a mixer or an audio
+ * output/input. This field should be used for A/V sync or time estimates.
+ * @sampling_rate: sampling rate of audio
+ */
+struct snd_compr_tstamp {
+	__u32 byte_offset;
+	__u32 copied_total;
+	snd_pcm_uframes_t pcm_frames;
+	snd_pcm_uframes_t pcm_io_frames;
+	__u32 sampling_rate;
+	uint64_t timestamp;
+};
+
+/**
+ * struct snd_compr_avail: avail descriptor
+ * @avail: Number of bytes available in ring buffer for writing/reading
+ * @tstamp: timestamp infomation
+ */
+struct snd_compr_avail {
+	__u64 avail;
+	struct snd_compr_tstamp tstamp;
+};
+
+enum snd_compr_direction {
+	SND_COMPRESS_PLAYBACK = 0,
+	SND_COMPRESS_CAPTURE
+};
+
+/**
+ * struct snd_compr_caps: caps descriptor
+ * @codecs: pointer to array of codecs
+ * @direction: direction supported. Of type snd_compr_direction
+ * @min_fragment_size: minimum fragment supported by DSP
+ * @max_fragment_size: maximum fragment supported by DSP
+ * @min_fragments: min fragments supported by DSP
+ * @max_fragments: max fragments supported by DSP
+ * @num_codecs: number of codecs supported
+ * @reserved: reserved field
+ */
+struct snd_compr_caps {
+	__u32 num_codecs;
+	__u32 direction;
+	__u32 min_fragment_size;
+	__u32 max_fragment_size;
+	__u32 min_fragments;
+	__u32 max_fragments;
+	__u32 codecs[MAX_NUM_CODECS];
+	__u32 reserved[11];
+};
+
+/**
+ * struct snd_compr_codec_caps: query capability of codec
+ * @codec: codec for which capability is queried
+ * @num_descriptors: number of codec descriptors
+ * @descriptor: array of codec capability descriptor
+ */
+struct snd_compr_codec_caps {
+	__u32 codec;
+	__u32 num_descriptors;
+	struct snd_codec_desc descriptor[MAX_NUM_CODEC_DESCRIPTORS];
+};
+
+/**
+ * @SNDRV_COMPRESS_ENCODER_PADDING: no of samples appended by the encoder at the
+ * end of the track
+ * @SNDRV_COMPRESS_ENCODER_DELAY: no of samples inserted by the encoder at the
+ * beginning of the track
+ */
+enum {
+	SNDRV_COMPRESS_ENCODER_PADDING = 1,
+	SNDRV_COMPRESS_ENCODER_DELAY = 2,
+};
+
+/**
+ * struct snd_compr_metadata: compressed stream metadata
+ * @key: key id
+ * @value: key value
+ */
+struct snd_compr_metadata {
+	 __u32 key;
+	 __u32 value[8];
+};
+
+/**
+ * struct snd_compr_audio_info: compressed input audio information
+ * @frame_size: legth of the encoded frame with valid data
+ * @reserved: reserved for furture use
+ */
+struct snd_compr_audio_info {
+	uint32_t frame_size;
+	uint32_t reserved[15];
+};
+
+/**
+ * compress path ioctl definitions
+ * SNDRV_COMPRESS_GET_CAPS: Query capability of DSP
+ * SNDRV_COMPRESS_GET_CODEC_CAPS: Query capability of a codec
+ * SNDRV_COMPRESS_SET_PARAMS: Set codec and stream parameters
+ * Note: only codec params can be changed runtime and stream params cant be
+ * SNDRV_COMPRESS_GET_PARAMS: Query codec params
+ * SNDRV_COMPRESS_TSTAMP: get the current timestamp value
+ * SNDRV_COMPRESS_AVAIL: get the current buffer avail value.
+ * This also queries the tstamp properties
+ * SNDRV_COMPRESS_PAUSE: Pause the running stream
+ * SNDRV_COMPRESS_RESUME: resume a paused stream
+ * SNDRV_COMPRESS_START: Start a stream
+ * SNDRV_COMPRESS_STOP: stop a running stream, discarding ring buffer content
+ * and the buffers currently with DSP
+ * SNDRV_COMPRESS_DRAIN: Play till end of buffers and stop after that
+ * SNDRV_COMPRESS_IOCTL_VERSION: Query the API version
+ */
+#define SNDRV_COMPRESS_IOCTL_VERSION	_IOR('C', 0x00, int)
+#define SNDRV_COMPRESS_GET_CAPS		_IOWR('C', 0x10, struct snd_compr_caps)
+#define SNDRV_COMPRESS_GET_CODEC_CAPS	_IOWR('C', 0x11,\
+						struct snd_compr_codec_caps)
+#define SNDRV_COMPRESS_SET_PARAMS	_IOW('C', 0x12, struct snd_compr_params)
+#define SNDRV_COMPRESS_GET_PARAMS	_IOR('C', 0x13, struct snd_codec)
+#define SNDRV_COMPRESS_SET_METADATA	_IOW('C', 0x14,\
+						 struct snd_compr_metadata)
+#define SNDRV_COMPRESS_GET_METADATA	_IOWR('C', 0x15,\
+						 struct snd_compr_metadata)
+#define SNDRV_COMPRESS_TSTAMP		_IOR('C', 0x20, struct snd_compr_tstamp)
+#define SNDRV_COMPRESS_AVAIL		_IOR('C', 0x21, struct snd_compr_avail)
+#define SNDRV_COMPRESS_PAUSE		_IO('C', 0x30)
+#define SNDRV_COMPRESS_RESUME		_IO('C', 0x31)
+#define SNDRV_COMPRESS_START		_IO('C', 0x32)
+#define SNDRV_COMPRESS_STOP		_IO('C', 0x33)
+#define SNDRV_COMPRESS_DRAIN		_IO('C', 0x34)
+#define SNDRV_COMPRESS_NEXT_TRACK	_IO('C', 0x35)
+#define SNDRV_COMPRESS_PARTIAL_DRAIN	_IO('C', 0x36)
+/*
+ * TODO
+ * 1. add mmap support
+ *
+ */
+#define SND_COMPR_TRIGGER_DRAIN 7 /*FIXME move this to pcm.h */
+#define SND_COMPR_TRIGGER_NEXT_TRACK 8
+#define SND_COMPR_TRIGGER_PARTIAL_DRAIN 9
+#endif
diff --git a/original-kernel-headers/sound/compress_params.h b/original-kernel-headers/sound/compress_params.h
new file mode 100644
index 0000000..866c0f9
--- /dev/null
+++ b/original-kernel-headers/sound/compress_params.h
@@ -0,0 +1,419 @@
+/*
+ *  compress_params.h - codec types and parameters for compressed data
+ *  streaming interface
+ *
+ *  Copyright (C) 2011 Intel Corporation
+ *  Authors:	Pierre-Louis Bossart <pierre-louis.bossart@linux.intel.com>
+ *              Vinod Koul <vinod.koul@linux.intel.com>
+ *
+ *  ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ *
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; version 2 of the License.
+ *
+ *  This program is distributed in the hope that it will be useful, but
+ *  WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ *  General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License along
+ *  with this program; if not, write to the Free Software Foundation, Inc.,
+ *  59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
+ *
+ * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ *
+ * The definitions in this file are derived from the OpenMAX AL version 1.1
+ * and OpenMAX IL v 1.1.2 header files which contain the copyright notice below.
+ *
+ * Copyright (c) 2007-2010 The Khronos Group Inc.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and/or associated documentation files (the
+ * "Materials "), to deal in the Materials without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Materials, and to
+ * permit persons to whom the Materials are furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included
+ * in all copies or substantial portions of the Materials.
+ *
+ * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
+ *
+ */
+#ifndef __SND_COMPRESS_PARAMS_H
+#define __SND_COMPRESS_PARAMS_H
+
+/* AUDIO CODECS SUPPORTED */
+#define MAX_NUM_CODECS 32
+#define MAX_NUM_CODEC_DESCRIPTORS 32
+#define MAX_NUM_BITRATES 32
+
+/* compressed TX */
+#define MAX_NUM_FRAMES_PER_BUFFER 1
+#define COMPRESSED_META_DATA_MODE 0x10
+#define META_DATA_LEN_BYTES 36
+#define Q6_AC3_DECODER	0x00010BF6
+#define Q6_EAC3_DECODER 0x00010C3C
+#define Q6_DTS		0x00010D88
+#define Q6_DTS_LBR	0x00010DBB
+
+/* Codecs are listed linearly to allow for extensibility */
+#define SND_AUDIOCODEC_PCM                   ((__u32) 0x00000001)
+#define SND_AUDIOCODEC_MP3                   ((__u32) 0x00000002)
+#define SND_AUDIOCODEC_AMR                   ((__u32) 0x00000003)
+#define SND_AUDIOCODEC_AMRWB                 ((__u32) 0x00000004)
+#define SND_AUDIOCODEC_AMRWBPLUS             ((__u32) 0x00000005)
+#define SND_AUDIOCODEC_AAC                   ((__u32) 0x00000006)
+#define SND_AUDIOCODEC_WMA                   ((__u32) 0x00000007)
+#define SND_AUDIOCODEC_REAL                  ((__u32) 0x00000008)
+#define SND_AUDIOCODEC_VORBIS                ((__u32) 0x00000009)
+#define SND_AUDIOCODEC_FLAC                  ((__u32) 0x0000000A)
+#define SND_AUDIOCODEC_IEC61937              ((__u32) 0x0000000B)
+#define SND_AUDIOCODEC_G723_1                ((__u32) 0x0000000C)
+#define SND_AUDIOCODEC_G729                  ((__u32) 0x0000000D)
+#define SND_AUDIOCODEC_AC3                   ((__u32) 0x0000000E)
+#define SND_AUDIOCODEC_DTS                   ((__u32) 0x0000000F)
+#define SND_AUDIOCODEC_AC3_PASS_THROUGH      ((__u32) 0x00000010)
+#define SND_AUDIOCODEC_WMA_PRO               ((__u32) 0x00000011)
+#define SND_AUDIOCODEC_DTS_PASS_THROUGH      ((__u32) 0x00000012)
+#define SND_AUDIOCODEC_DTS_LBR               ((__u32) 0x00000013)
+#define SND_AUDIOCODEC_DTS_TRANSCODE_LOOPBACK ((__u32) 0x00000014)
+#define SND_AUDIOCODEC_MAX                   SND_AUDIOCODEC_DTS_TRANSCODE_LOOPBACK
+
+/*
+ * Profile and modes are listed with bit masks. This allows for a
+ * more compact representation of fields that will not evolve
+ * (in contrast to the list of codecs)
+ */
+
+#define SND_AUDIOPROFILE_PCM                 ((__u32) 0x00000001)
+
+/* MP3 modes are only useful for encoders */
+#define SND_AUDIOCHANMODE_MP3_MONO           ((__u32) 0x00000001)
+#define SND_AUDIOCHANMODE_MP3_STEREO         ((__u32) 0x00000002)
+#define SND_AUDIOCHANMODE_MP3_JOINTSTEREO    ((__u32) 0x00000004)
+#define SND_AUDIOCHANMODE_MP3_DUAL           ((__u32) 0x00000008)
+
+#define SND_AUDIOPROFILE_AMR                 ((__u32) 0x00000001)
+
+/* AMR modes are only useful for encoders */
+#define SND_AUDIOMODE_AMR_DTX_OFF            ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AMR_VAD1               ((__u32) 0x00000002)
+#define SND_AUDIOMODE_AMR_VAD2               ((__u32) 0x00000004)
+
+#define SND_AUDIOSTREAMFORMAT_UNDEFINED	     ((__u32) 0x00000000)
+#define SND_AUDIOSTREAMFORMAT_CONFORMANCE    ((__u32) 0x00000001)
+#define SND_AUDIOSTREAMFORMAT_IF1            ((__u32) 0x00000002)
+#define SND_AUDIOSTREAMFORMAT_IF2            ((__u32) 0x00000004)
+#define SND_AUDIOSTREAMFORMAT_FSF            ((__u32) 0x00000008)
+#define SND_AUDIOSTREAMFORMAT_RTPPAYLOAD     ((__u32) 0x00000010)
+#define SND_AUDIOSTREAMFORMAT_ITU            ((__u32) 0x00000020)
+
+#define SND_AUDIOPROFILE_AMRWB               ((__u32) 0x00000001)
+
+/* AMRWB modes are only useful for encoders */
+#define SND_AUDIOMODE_AMRWB_DTX_OFF          ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AMRWB_VAD1             ((__u32) 0x00000002)
+#define SND_AUDIOMODE_AMRWB_VAD2             ((__u32) 0x00000004)
+
+#define SND_AUDIOPROFILE_AMRWBPLUS           ((__u32) 0x00000001)
+
+#define SND_AUDIOPROFILE_AAC                 ((__u32) 0x00000001)
+
+/* AAC modes are required for encoders and decoders */
+#define SND_AUDIOMODE_AAC_MAIN               ((__u32) 0x00000001)
+#define SND_AUDIOMODE_AAC_LC                 ((__u32) 0x00000002)
+#define SND_AUDIOMODE_AAC_SSR                ((__u32) 0x00000004)
+#define SND_AUDIOMODE_AAC_LTP                ((__u32) 0x00000008)
+#define SND_AUDIOMODE_AAC_HE                 ((__u32) 0x00000010)
+#define SND_AUDIOMODE_AAC_SCALABLE           ((__u32) 0x00000020)
+#define SND_AUDIOMODE_AAC_ERLC               ((__u32) 0x00000040)
+#define SND_AUDIOMODE_AAC_LD                 ((__u32) 0x00000080)
+#define SND_AUDIOMODE_AAC_HE_PS              ((__u32) 0x00000100)
+#define SND_AUDIOMODE_AAC_HE_MPS             ((__u32) 0x00000200)
+
+/* AAC formats are required for encoders and decoders */
+#define SND_AUDIOSTREAMFORMAT_MP2ADTS        ((__u32) 0x00000001)
+#define SND_AUDIOSTREAMFORMAT_MP4ADTS        ((__u32) 0x00000002)
+#define SND_AUDIOSTREAMFORMAT_MP4LOAS        ((__u32) 0x00000004)
+#define SND_AUDIOSTREAMFORMAT_MP4LATM        ((__u32) 0x00000008)
+#define SND_AUDIOSTREAMFORMAT_ADIF           ((__u32) 0x00000010)
+#define SND_AUDIOSTREAMFORMAT_MP4FF          ((__u32) 0x00000020)
+#define SND_AUDIOSTREAMFORMAT_RAW            ((__u32) 0x00000040)
+
+#define SND_AUDIOPROFILE_WMA7                ((__u32) 0x00000001)
+#define SND_AUDIOPROFILE_WMA8                ((__u32) 0x00000002)
+#define SND_AUDIOPROFILE_WMA9                ((__u32) 0x00000004)
+#define SND_AUDIOPROFILE_WMA10               ((__u32) 0x00000008)
+
+#define SND_AUDIOMODE_WMA_LEVEL1             ((__u32) 0x00000001)
+#define SND_AUDIOMODE_WMA_LEVEL2             ((__u32) 0x00000002)
+#define SND_AUDIOMODE_WMA_LEVEL3             ((__u32) 0x00000004)
+#define SND_AUDIOMODE_WMA_LEVEL4             ((__u32) 0x00000008)
+#define SND_AUDIOMODE_WMAPRO_LEVELM0         ((__u32) 0x00000010)
+#define SND_AUDIOMODE_WMAPRO_LEVELM1         ((__u32) 0x00000020)
+#define SND_AUDIOMODE_WMAPRO_LEVELM2         ((__u32) 0x00000040)
+#define SND_AUDIOMODE_WMAPRO_LEVELM3         ((__u32) 0x00000080)
+
+#define SND_AUDIOSTREAMFORMAT_WMA_ASF        ((__u32) 0x00000001)
+/*
+ * Some implementations strip the ASF header and only send ASF packets
+ * to the DSP
+ */
+#define SND_AUDIOSTREAMFORMAT_WMA_NOASF_HDR  ((__u32) 0x00000002)
+
+#define SND_AUDIOPROFILE_REALAUDIO           ((__u32) 0x00000001)
+
+#define SND_AUDIOMODE_REALAUDIO_G2           ((__u32) 0x00000001)
+#define SND_AUDIOMODE_REALAUDIO_8            ((__u32) 0x00000002)
+#define SND_AUDIOMODE_REALAUDIO_10           ((__u32) 0x00000004)
+#define SND_AUDIOMODE_REALAUDIO_SURROUND     ((__u32) 0x00000008)
+
+#define SND_AUDIOPROFILE_VORBIS              ((__u32) 0x00000001)
+
+#define SND_AUDIOMODE_VORBIS                 ((__u32) 0x00000001)
+
+#define SND_AUDIOPROFILE_FLAC                ((__u32) 0x00000001)
+
+/*
+ * Define quality levels for FLAC encoders, from LEVEL0 (fast)
+ * to LEVEL8 (best)
+ */
+#define SND_AUDIOMODE_FLAC_LEVEL0            ((__u32) 0x00000001)
+#define SND_AUDIOMODE_FLAC_LEVEL1            ((__u32) 0x00000002)
+#define SND_AUDIOMODE_FLAC_LEVEL2            ((__u32) 0x00000004)
+#define SND_AUDIOMODE_FLAC_LEVEL3            ((__u32) 0x00000008)
+#define SND_AUDIOMODE_FLAC_LEVEL4            ((__u32) 0x00000010)
+#define SND_AUDIOMODE_FLAC_LEVEL5            ((__u32) 0x00000020)
+#define SND_AUDIOMODE_FLAC_LEVEL6            ((__u32) 0x00000040)
+#define SND_AUDIOMODE_FLAC_LEVEL7            ((__u32) 0x00000080)
+#define SND_AUDIOMODE_FLAC_LEVEL8            ((__u32) 0x00000100)
+
+#define SND_AUDIOSTREAMFORMAT_FLAC           ((__u32) 0x00000001)
+#define SND_AUDIOSTREAMFORMAT_FLAC_OGG       ((__u32) 0x00000002)
+
+/* IEC61937 payloads without CUVP and preambles */
+#define SND_AUDIOPROFILE_IEC61937            ((__u32) 0x00000001)
+/* IEC61937 with S/PDIF preambles+CUVP bits in 32-bit containers */
+#define SND_AUDIOPROFILE_IEC61937_SPDIF      ((__u32) 0x00000002)
+
+/*
+ * IEC modes are mandatory for decoders. Format autodetection
+ * will only happen on the DSP side with mode 0. The PCM mode should
+ * not be used, the PCM codec should be used instead.
+ */
+#define SND_AUDIOMODE_IEC_REF_STREAM_HEADER  ((__u32) 0x00000000)
+#define SND_AUDIOMODE_IEC_LPCM		     ((__u32) 0x00000001)
+#define SND_AUDIOMODE_IEC_AC3		     ((__u32) 0x00000002)
+#define SND_AUDIOMODE_IEC_MPEG1		     ((__u32) 0x00000004)
+#define SND_AUDIOMODE_IEC_MP3		     ((__u32) 0x00000008)
+#define SND_AUDIOMODE_IEC_MPEG2		     ((__u32) 0x00000010)
+#define SND_AUDIOMODE_IEC_AACLC		     ((__u32) 0x00000020)
+#define SND_AUDIOMODE_IEC_DTS		     ((__u32) 0x00000040)
+#define SND_AUDIOMODE_IEC_ATRAC		     ((__u32) 0x00000080)
+#define SND_AUDIOMODE_IEC_SACD		     ((__u32) 0x00000100)
+#define SND_AUDIOMODE_IEC_EAC3		     ((__u32) 0x00000200)
+#define SND_AUDIOMODE_IEC_DTS_HD	     ((__u32) 0x00000400)
+#define SND_AUDIOMODE_IEC_MLP		     ((__u32) 0x00000800)
+#define SND_AUDIOMODE_IEC_DST		     ((__u32) 0x00001000)
+#define SND_AUDIOMODE_IEC_WMAPRO	     ((__u32) 0x00002000)
+#define SND_AUDIOMODE_IEC_REF_CXT            ((__u32) 0x00004000)
+#define SND_AUDIOMODE_IEC_HE_AAC	     ((__u32) 0x00008000)
+#define SND_AUDIOMODE_IEC_HE_AAC2	     ((__u32) 0x00010000)
+#define SND_AUDIOMODE_IEC_MPEG_SURROUND	     ((__u32) 0x00020000)
+
+#define SND_AUDIOPROFILE_G723_1              ((__u32) 0x00000001)
+
+#define SND_AUDIOMODE_G723_1_ANNEX_A         ((__u32) 0x00000001)
+#define SND_AUDIOMODE_G723_1_ANNEX_B         ((__u32) 0x00000002)
+#define SND_AUDIOMODE_G723_1_ANNEX_C         ((__u32) 0x00000004)
+
+#define SND_AUDIOPROFILE_G729                ((__u32) 0x00000001)
+
+#define SND_AUDIOMODE_G729_ANNEX_A           ((__u32) 0x00000001)
+#define SND_AUDIOMODE_G729_ANNEX_B           ((__u32) 0x00000002)
+
+/* <FIXME: multichannel encoders aren't supported for now. Would need
+   an additional definition of channel arrangement> */
+
+/* VBR/CBR definitions */
+#define SND_RATECONTROLMODE_CONSTANTBITRATE  ((__u32) 0x00000001)
+#define SND_RATECONTROLMODE_VARIABLEBITRATE  ((__u32) 0x00000002)
+
+/* Encoder options */
+
+struct snd_enc_wma {
+	__u32 super_block_align; /* WMA Type-specific data */
+	__u32 bits_per_sample;
+	__u32 channelmask;
+	__u32 encodeopt;
+	__u32 encodeopt1;
+	__u32 encodeopt2;
+};
+
+
+/**
+ * struct snd_enc_vorbis
+ * @quality: Sets encoding quality to n, between -1 (low) and 10 (high).
+ * In the default mode of operation, the quality level is 3.
+ * Normal quality range is 0 - 10.
+ * @managed: Boolean. Set  bitrate  management  mode. This turns off the
+ * normal VBR encoding, but allows hard or soft bitrate constraints to be
+ * enforced by the encoder. This mode can be slower, and may also be
+ * lower quality. It is primarily useful for streaming.
+ * @max_bit_rate: Enabled only if managed is TRUE
+ * @min_bit_rate: Enabled only if managed is TRUE
+ * @downmix: Boolean. Downmix input from stereo to mono (has no effect on
+ * non-stereo streams). Useful for lower-bitrate encoding.
+ *
+ * These options were extracted from the OpenMAX IL spec and Gstreamer vorbisenc
+ * properties
+ *
+ * For best quality users should specify VBR mode and set quality levels.
+ */
+
+struct snd_enc_vorbis {
+	__s32 quality;
+	__u32 managed;
+	__u32 max_bit_rate;
+	__u32 min_bit_rate;
+	__u32 downmix;
+};
+
+
+/**
+ * struct snd_enc_real
+ * @quant_bits: number of coupling quantization bits in the stream
+ * @start_region: coupling start region in the stream
+ * @num_regions: number of regions value
+ *
+ * These options were extracted from the OpenMAX IL spec
+ */
+
+struct snd_enc_real {
+	__u32 quant_bits;
+	__u32 start_region;
+	__u32 num_regions;
+};
+
+/**
+ * struct snd_enc_flac
+ * @num: serial number, valid only for OGG formats
+ *	needs to be set by application
+ * @gain: Add replay gain tags
+ *
+ * These options were extracted from the FLAC online documentation
+ * at http://flac.sourceforge.net/documentation_tools_flac.html
+ *
+ * To make the API simpler, it is assumed that the user will select quality
+ * profiles. Additional options that affect encoding quality and speed can
+ * be added at a later stage if needed.
+ *
+ * By default the Subset format is used by encoders.
+ *
+ * TAGS such as pictures, etc, cannot be handled by an offloaded encoder and are
+ * not supported in this API.
+ */
+
+struct snd_enc_flac {
+	__u32 num;
+	__u32 gain;
+};
+
+struct snd_enc_generic {
+	__u32 bw;	/* encoder bandwidth */
+	__s32 reserved[15];
+};
+
+union snd_codec_options {
+	struct snd_enc_wma wma;
+	struct snd_enc_vorbis vorbis;
+	struct snd_enc_real real;
+	struct snd_enc_flac flac;
+	struct snd_enc_generic generic;
+};
+
+/** struct snd_codec_desc - description of codec capabilities
+ * @max_ch: Maximum number of audio channels
+ * @sample_rates: Sampling rates in Hz, use SNDRV_PCM_RATE_xxx for this
+ * @bit_rate: Indexed array containing supported bit rates
+ * @num_bitrates: Number of valid values in bit_rate array
+ * @rate_control: value is specified by SND_RATECONTROLMODE defines.
+ * @profiles: Supported profiles. See SND_AUDIOPROFILE defines.
+ * @modes: Supported modes. See SND_AUDIOMODE defines
+ * @formats: Supported formats. See SND_AUDIOSTREAMFORMAT defines
+ * @min_buffer: Minimum buffer size handled by codec implementation
+ * @reserved: reserved for future use
+ *
+ * This structure provides a scalar value for profiles, modes and stream
+ * format fields.
+ * If an implementation supports multiple combinations, they will be listed as
+ * codecs with different descriptors, for example there would be 2 descriptors
+ * for AAC-RAW and AAC-ADTS.
+ * This entails some redundancy but makes it easier to avoid invalid
+ * configurations.
+ *
+ */
+
+struct snd_codec_desc {
+	__u32 max_ch;
+	__u32 sample_rates;
+	__u32 bit_rate[MAX_NUM_BITRATES];
+	__u32 num_bitrates;
+	__u32 rate_control;
+	__u32 profiles;
+	__u32 modes;
+	__u32 formats;
+	__u32 min_buffer;
+	__u32 reserved[15];
+};
+
+/** struct snd_codec
+ * @id: Identifies the supported audio encoder/decoder.
+ *		See SND_AUDIOCODEC macros.
+ * @ch_in: Number of input audio channels
+ * @ch_out: Number of output channels. In case of contradiction between
+ *		this field and the channelMode field, the channelMode field
+ *		overrides.
+ * @sample_rate: Audio sample rate of input data
+ * @bit_rate: Bitrate of encoded data. May be ignored by decoders
+ * @rate_control: Encoding rate control. See SND_RATECONTROLMODE defines.
+ *               Encoders may rely on profiles for quality levels.
+ *		 May be ignored by decoders.
+ * @profile: Mandatory for encoders, can be mandatory for specific
+ *		decoders as well. See SND_AUDIOPROFILE defines.
+ * @level: Supported level (Only used by WMA at the moment)
+ * @ch_mode: Channel mode for encoder. See SND_AUDIOCHANMODE defines
+ * @format: Format of encoded bistream. Mandatory when defined.
+ *		See SND_AUDIOSTREAMFORMAT defines.
+ * @align: Block alignment in bytes of an audio sample.
+ *		Only required for PCM or IEC formats.
+ * @options: encoder-specific settings
+ * @reserved: reserved for future use
+ */
+
+struct snd_codec {
+	__u32 id;
+	__u32 ch_in;
+	__u32 ch_out;
+	__u32 sample_rate;
+	__u32 bit_rate;
+	__u32 rate_control;
+	__u32 profile;
+	__u32 level;
+	__u32 ch_mode;
+	__u32 format;
+	__u32 align;
+	union snd_codec_options options;
+	__u32 reserved[3];
+};
+
+#endif
diff --git a/overlay/frameworks/base/core/res/res/values/config.xml b/overlay/frameworks/base/core/res/res/values/config.xml
index 380d225..9192a94 100644
--- a/overlay/frameworks/base/core/res/res/values/config.xml
+++ b/overlay/frameworks/base/core/res/res/values/config.xml
@@ -148,6 +148,14 @@
            <item>"bt-pan"</item>
     </string-array>
 
+    <!-- Max number of scan filters supported by blutooth controller. 0 if the
+         device does not support hardware scan filters-->
+    <integer translatable="false" name="config_bluetooth_max_scan_filters">1</integer>
+
+    <!-- Max number of advertisers supported by bluetooth controller. 0 if the
+         device does not support multiple advertisement-->
+    <integer translatable="false" name="config_bluetooth_max_advertisers">4</integer>
+
     <!-- Array of allowable ConnectivityManager network types for tethering -->
     <!-- Common options are [1, 4] for TYPE_WIFI and TYPE_MOBILE_DUN or
          [0,1,5,7] for TYPE_MOBILE, TYPE_WIFI, TYPE_MOBILE_HIPRI and TYPE_BLUETOOTH -->
@@ -179,6 +187,7 @@
         <item>"mobile_cbs,12,0,2,60000,true"</item>
         <item>"mobile_ia,14,0,2,-1,true"</item>
         <item>"bluetooth,7,7,2,-1,true"</item>
+        <item>"ethernet,9,9,9,-1,true"</item>
     </string-array>
 
     <!-- This string array should be overridden by the device to present a list of radio
@@ -190,6 +199,7 @@
         <item>"1,1"</item>
         <item>"0,1"</item>
         <item>"7,1"</item>
+        <item>"9,1"</item>
     </string-array>
 
     <!-- Whether WiFi display is supported by this device.
@@ -258,4 +268,9 @@
 
     <!-- MMS user agent prolfile url -->
     <string name="config_mms_user_agent_profile_url" translatable="false">http://gsm.lge.com/html/gsm/Nexus5-M3.xml</string>
+
+    <!--  Maximum number of supported users -->
+    <integer name="config_multiuserMaximumUsers">4</integer>
+    <!--  Whether Multiuser UI should be shown -->
+    <bool name="config_enableMultiUserUI">true</bool>
 </resources>
diff --git a/overlay/frameworks/base/core/res/res/xml/power_profile.xml b/overlay/frameworks/base/core/res/res/xml/power_profile.xml
index 196d96a..9389538 100644
--- a/overlay/frameworks/base/core/res/res/xml/power_profile.xml
+++ b/overlay/frameworks/base/core/res/res/xml/power_profile.xml
@@ -19,23 +19,23 @@
 <device name="Android">
     <!-- All values are in mAh except as noted -->
     <item name="none">0</item>
-    <item name="screen.on">48.07</item>
-    <item name="screen.full">221.90</item>
-    <item name="bluetooth.active">93.52</item>
+    <item name="screen.on">82.75</item>
+    <item name="screen.full">201.16</item>
+    <item name="bluetooth.active">51.55</item>
     <item name="bluetooth.on">0.79</item>
     <item name="wifi.on">3.5</item>
     <item name="wifi.active">73.24</item>
     <item name="wifi.scan">75.48</item>
     <item name="dsp.audio">0.1</item>
     <item name="dsp.video">0.1</item>
-    <item name="gps.on">90.8</item>
+    <item name="gps.on">76.23</item>
     <item name="radio.active">185.19</item>
     <!-- The current consumed by the radio when it is scanning for a signal -->
     <item name="radio.scanning">99.2</item>
     <!-- Current consumed by the radio at different signal strengths, when paging -->
     <array name="radio.on"> <!-- Strength 0 to BINS-1 -->
-        <value>1.16</value>
-        <value>2.15</value>
+        <value>4.8</value>
+        <value>1.11</value>
     </array>
     <!-- Different CPU speeds as reported in
          /sys/devices/system/cpu/cpu0/cpufreq/stats/time_in_state -->
diff --git a/overlay/packages/services/Telephony/res/values-mcc310-mnc120/config.xml b/overlay/packages/services/Telephony/res/values-mcc310-mnc120/config.xml
index 3b42ab8..4d2f648 100644
--- a/overlay/packages/services/Telephony/res/values-mcc310-mnc120/config.xml
+++ b/overlay/packages/services/Telephony/res/values-mcc310-mnc120/config.xml
@@ -36,6 +36,6 @@
     <!-- Display carrier settings -->
     <bool name="config_carrier_settings_enable">true</bool>
     <!-- carrier settings menu -->
-    <string name="carrier_settings" translatable="false">com.lge.update</string>
-    <string name="carrier_settings_menu" translatable="false">com.lge.update.SprintUpdateSetting</string>
+    <string name="carrier_settings" translatable="false">com.android.sdm.plugins.sprintdm</string>
+    <string name="carrier_settings_menu" translatable="false">com.android.sdm.plugins.sprintdm.SprintCarrierSettingsActivity</string>
 </resources>
diff --git a/power/Android.mk b/power/Android.mk
new file mode 100644
index 0000000..60ffb11
--- /dev/null
+++ b/power/Android.mk
@@ -0,0 +1,25 @@
+#
+# Copyright 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_MODULE_RELATIVE_PATH := hw
+LOCAL_SRC_FILES := power_hammerhead.c
+LOCAL_SHARED_LIBRARIES := liblog libcutils
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := power.hammerhead
+include $(BUILD_SHARED_LIBRARY)
diff --git a/power/power_hammerhead.c b/power/power_hammerhead.c
new file mode 100644
index 0000000..a9f497c
--- /dev/null
+++ b/power/power_hammerhead.c
@@ -0,0 +1,409 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <errno.h>
+#include <string.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <sys/socket.h>
+#include <sys/un.h>
+#include <fcntl.h>
+#include <dlfcn.h>
+#include <cutils/uevent.h>
+#include <errno.h>
+#include <sys/poll.h>
+#include <pthread.h>
+#include <linux/netlink.h>
+#include <stdlib.h>
+#include <stdbool.h>
+
+#define LOG_TAG "PowerHAL"
+#include <utils/Log.h>
+
+#include <hardware/hardware.h>
+#include <hardware/power.h>
+
+#define STATE_ON "state=1"
+#define STATE_OFF "state=0"
+#define STATE_HDR_ON "state=2"
+#define STATE_HDR_OFF "state=3"
+
+#define MAX_LENGTH         50
+#define BOOST_SOCKET       "/dev/socket/pb"
+
+#define UEVENT_MSG_LEN 1024
+#define TOTAL_CPUS 4
+#define RETRY_TIME_CHANGING_FREQ 20
+#define SLEEP_USEC_BETWN_RETRY 200
+#define LOW_POWER_MAX_FREQ "729600"
+#define LOW_POWER_MIN_FREQ "300000"
+#define NORMAL_MAX_FREQ "2265600"
+#define UEVENT_STRING "online@/devices/system/cpu/"
+
+static int client_sockfd;
+static struct sockaddr_un client_addr;
+static int last_state = -1;
+
+static struct pollfd pfd;
+static char *cpu_path_min[] = {
+    "/sys/devices/system/cpu/cpu0/cpufreq/scaling_min_freq",
+    "/sys/devices/system/cpu/cpu1/cpufreq/scaling_min_freq",
+    "/sys/devices/system/cpu/cpu2/cpufreq/scaling_min_freq",
+    "/sys/devices/system/cpu/cpu3/cpufreq/scaling_min_freq",
+};
+static char *cpu_path_max[] = {
+    "/sys/devices/system/cpu/cpu0/cpufreq/scaling_max_freq",
+    "/sys/devices/system/cpu/cpu1/cpufreq/scaling_max_freq",
+    "/sys/devices/system/cpu/cpu2/cpufreq/scaling_max_freq",
+    "/sys/devices/system/cpu/cpu3/cpufreq/scaling_max_freq",
+};
+static bool freq_set[TOTAL_CPUS];
+static bool low_power_mode = false;
+static pthread_mutex_t low_power_mode_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static void socket_init()
+{
+    if (!client_sockfd) {
+        client_sockfd = socket(PF_UNIX, SOCK_DGRAM, 0);
+        if (client_sockfd < 0) {
+            ALOGE("%s: failed to open: %s", __func__, strerror(errno));
+            return;
+        }
+        memset(&client_addr, 0, sizeof(struct sockaddr_un));
+        client_addr.sun_family = AF_UNIX;
+        snprintf(client_addr.sun_path, UNIX_PATH_MAX, BOOST_SOCKET);
+    }
+}
+
+static int sysfs_write(const char *path, char *s)
+{
+    char buf[80];
+    int len;
+    int fd = open(path, O_WRONLY);
+
+    if (fd < 0) {
+        strerror_r(errno, buf, sizeof(buf));
+        ALOGE("Error opening %s: %s\n", path, buf);
+        return -1;
+    }
+
+    len = write(fd, s, strlen(s));
+    if (len < 0) {
+        strerror_r(errno, buf, sizeof(buf));
+        ALOGE("Error writing to %s: %s\n", path, buf);
+        return -1;
+    }
+
+    close(fd);
+    return 0;
+}
+
+static int uevent_event()
+{
+    char msg[UEVENT_MSG_LEN];
+    char *cp;
+    int n, cpu, ret, retry = RETRY_TIME_CHANGING_FREQ;
+
+    n = recv(pfd.fd, msg, UEVENT_MSG_LEN, MSG_DONTWAIT);
+    if (n <= 0) {
+        return -1;
+    }
+    if (n >= UEVENT_MSG_LEN) {   /* overflow -- discard */
+        return -1;
+    }
+
+    cp = msg;
+
+    if (strstr(cp, UEVENT_STRING)) {
+        n = strlen(cp);
+        errno = 0;
+        cpu = strtol(cp + n - 1, NULL, 10);
+
+        if (errno == EINVAL || errno == ERANGE || cpu < 0 || cpu >= TOTAL_CPUS) {
+            return -1;
+        }
+
+        pthread_mutex_lock(&low_power_mode_lock);
+        if (low_power_mode && !freq_set[cpu]) {
+            while (retry) {
+                sysfs_write(cpu_path_min[cpu], LOW_POWER_MIN_FREQ);
+                ret = sysfs_write(cpu_path_max[cpu], LOW_POWER_MAX_FREQ);
+                if (!ret) {
+                    freq_set[cpu] = true;
+                    break;
+                }
+                usleep(SLEEP_USEC_BETWN_RETRY);
+                retry--;
+           }
+        } else if (!low_power_mode && freq_set[cpu]) {
+             while (retry) {
+                  ret = sysfs_write(cpu_path_max[cpu], NORMAL_MAX_FREQ);
+                  if (!ret) {
+                      freq_set[cpu] = false;
+                      break;
+                  }
+                  usleep(SLEEP_USEC_BETWN_RETRY);
+                  retry--;
+             }
+        }
+        pthread_mutex_unlock(&low_power_mode_lock);
+    }
+    return 0;
+}
+
+void *thread_uevent(__attribute__((unused)) void *x)
+{
+    while (1) {
+        int nevents, ret;
+
+        nevents = poll(&pfd, 1, -1);
+
+        if (nevents == -1) {
+            if (errno == EINTR)
+                continue;
+            ALOGE("powerhal: thread_uevent: poll_wait failed\n");
+            break;
+        }
+        ret = uevent_event();
+        if (ret < 0)
+            ALOGE("Error processing the uevent event");
+    }
+    return NULL;
+}
+
+static void uevent_init()
+{
+    struct sockaddr_nl client;
+    pthread_t tid;
+    pfd.fd = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_KOBJECT_UEVENT);
+
+    if (pfd.fd < 0) {
+        ALOGE("%s: failed to open: %s", __func__, strerror(errno));
+        return;
+    }
+    memset(&client, 0, sizeof(struct sockaddr_nl));
+    pthread_create(&tid, NULL, thread_uevent, NULL);
+    client.nl_family = AF_NETLINK;
+    client.nl_pid = tid;
+    client.nl_groups = -1;
+    pfd.events = POLLIN;
+    bind(pfd.fd, (void *)&client, sizeof(struct sockaddr_nl));
+    return;
+}
+
+static void power_init(__attribute__((unused)) struct power_module *module)
+{
+    ALOGI("%s", __func__);
+    socket_init();
+    uevent_init();
+}
+
+static void sync_thread(int off)
+{
+    int rc;
+    pid_t client;
+    char data[MAX_LENGTH];
+
+    if (client_sockfd < 0) {
+        ALOGE("%s: boost socket not created", __func__);
+        return;
+    }
+
+    client = getpid();
+
+    if (!off) {
+        snprintf(data, MAX_LENGTH, "2:%d", client);
+        rc = sendto(client_sockfd, data, strlen(data), 0,
+            (const struct sockaddr *)&client_addr, sizeof(struct sockaddr_un));
+    } else {
+        snprintf(data, MAX_LENGTH, "3:%d", client);
+        rc = sendto(client_sockfd, data, strlen(data), 0,
+            (const struct sockaddr *)&client_addr, sizeof(struct sockaddr_un));
+    }
+
+    if (rc < 0) {
+        ALOGE("%s: failed to send: %s", __func__, strerror(errno));
+    }
+}
+
+static void enc_boost(int off)
+{
+    int rc;
+    pid_t client;
+    char data[MAX_LENGTH];
+
+    if (client_sockfd < 0) {
+        ALOGE("%s: boost socket not created", __func__);
+        return;
+    }
+
+    client = getpid();
+
+    if (!off) {
+        snprintf(data, MAX_LENGTH, "5:%d", client);
+        rc = sendto(client_sockfd, data, strlen(data), 0,
+            (const struct sockaddr *)&client_addr, sizeof(struct sockaddr_un));
+    } else {
+        snprintf(data, MAX_LENGTH, "6:%d", client);
+        rc = sendto(client_sockfd, data, strlen(data), 0,
+            (const struct sockaddr *)&client_addr, sizeof(struct sockaddr_un));
+    }
+
+    if (rc < 0) {
+        ALOGE("%s: failed to send: %s", __func__, strerror(errno));
+    }
+}
+
+static void process_video_encode_hint(void *metadata)
+{
+
+    socket_init();
+
+    if (client_sockfd < 0) {
+        ALOGE("%s: boost socket not created", __func__);
+        return;
+    }
+
+    if (metadata) {
+        if (!strncmp(metadata, STATE_ON, sizeof(STATE_ON))) {
+            /* Video encode started */
+            sync_thread(1);
+            enc_boost(1);
+        } else if (!strncmp(metadata, STATE_OFF, sizeof(STATE_OFF))) {
+            /* Video encode stopped */
+            sync_thread(0);
+            enc_boost(0);
+        }  else if (!strncmp(metadata, STATE_HDR_ON, sizeof(STATE_HDR_ON))) {
+            /* HDR usecase started */
+        } else if (!strncmp(metadata, STATE_HDR_OFF, sizeof(STATE_HDR_OFF))) {
+            /* HDR usecase stopped */
+        }else
+            return;
+    } else {
+        return;
+    }
+}
+
+
+static void touch_boost()
+{
+    int rc;
+    pid_t client;
+    char data[MAX_LENGTH];
+
+    if (client_sockfd < 0) {
+        ALOGE("%s: boost socket not created", __func__);
+        return;
+    }
+
+    client = getpid();
+
+    snprintf(data, MAX_LENGTH, "1:%d", client);
+    rc = sendto(client_sockfd, data, strlen(data), 0,
+        (const struct sockaddr *)&client_addr, sizeof(struct sockaddr_un));
+    if (rc < 0) {
+        ALOGE("%s: failed to send: %s", __func__, strerror(errno));
+    }
+}
+
+static void power_set_interactive(__attribute__((unused)) struct power_module *module, int on)
+{
+    if (last_state == -1) {
+        last_state = on;
+    } else {
+        if (last_state == on)
+            return;
+        else
+            last_state = on;
+    }
+
+    ALOGV("%s %s", __func__, (on ? "ON" : "OFF"));
+    if (on) {
+        sync_thread(0);
+        touch_boost();
+    } else {
+        sync_thread(1);
+    }
+}
+
+static void power_hint( __attribute__((unused)) struct power_module *module,
+                      power_hint_t hint, __attribute__((unused)) void *data)
+{
+    int cpu, ret;
+
+    switch (hint) {
+        case POWER_HINT_INTERACTION:
+            ALOGV("POWER_HINT_INTERACTION");
+            touch_boost();
+            break;
+#if 0
+        case POWER_HINT_VSYNC:
+            ALOGV("POWER_HINT_VSYNC %s", (data ? "ON" : "OFF"));
+            break;
+#endif
+        case POWER_HINT_VIDEO_ENCODE:
+            process_video_encode_hint(data);
+            break;
+
+        case POWER_HINT_LOW_POWER:
+             pthread_mutex_lock(&low_power_mode_lock);
+             if (data) {
+                 low_power_mode = true;
+                 for (cpu = 0; cpu < TOTAL_CPUS; cpu++) {
+                     sysfs_write(cpu_path_min[cpu], LOW_POWER_MIN_FREQ);
+                     ret = sysfs_write(cpu_path_max[cpu], LOW_POWER_MAX_FREQ);
+                     if (!ret) {
+                         freq_set[cpu] = true;
+                     }
+                 }
+                 // reduces the refresh rate
+                 system("service call SurfaceFlinger 1016 i32 1");
+             } else {
+                 low_power_mode = false;
+                 for (cpu = 0; cpu < TOTAL_CPUS; cpu++) {
+                     ret = sysfs_write(cpu_path_max[cpu], NORMAL_MAX_FREQ);
+                     if (!ret) {
+                         freq_set[cpu] = false;
+                     }
+                 }
+                 // restores the refresh rate
+                 system("service call SurfaceFlinger 1016 i32 0");
+             }
+             pthread_mutex_unlock(&low_power_mode_lock);
+             break;
+        default:
+             break;
+    }
+}
+
+static struct hw_module_methods_t power_module_methods = {
+    .open = NULL,
+};
+
+struct power_module HAL_MODULE_INFO_SYM = {
+    .common = {
+        .tag = HARDWARE_MODULE_TAG,
+        .module_api_version = POWER_MODULE_API_VERSION_0_2,
+        .hal_api_version = HARDWARE_HAL_API_VERSION,
+        .id = POWER_HARDWARE_MODULE_ID,
+        .name = "Hammerhead Power HAL",
+        .author = "The Android Open Source Project",
+        .methods = &power_module_methods,
+    },
+
+    .init = power_init,
+    .setInteractive = power_set_interactive,
+    .powerHint = power_hint,
+};
diff --git a/proprietary-blobs.txt b/proprietary-blobs.txt
index eaffaa8..5aa1535 100644
--- a/proprietary-blobs.txt
+++ b/proprietary-blobs.txt
@@ -16,9 +16,8 @@
 
 /system/app/qcrilmsgtunnel.apk
 /system/app/shutdownlistener.apk
-/system/app/SprintHiddenMenu.apk
+/system/app/SprintDM.apk
 /system/app/TimeService.apk
-/system/app/UpdateSetting.apk
 /system/bin/bridgemgrd
 /system/bin/diag_klog
 /system/bin/diag_mdlog
@@ -39,22 +38,20 @@
 /system/bin/time_daemon
 /system/bin/usbhub
 /system/bin/usbhub_init
-/system/etc/Bluetooth_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Bluetooth_cal.acdb
 /system/etc/firmware/cpp_firmware_v1_1_1.fw
 /system/etc/firmware/cpp_firmware_v1_1_6.fw
 /system/etc/firmware/cpp_firmware_v1_2_0.fw
-/system/etc/General_cal.acdb
-/system/etc/Global_cal.acdb
-/system/etc/Handset_cal.acdb
-/system/etc/Hdmi_cal.acdb
-/system/etc/Headset_cal.acdb
+/system/etc/acdbdata/MTP/MTP_General_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Global_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Handset_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Hdmi_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Headset_cal.acdb
 /system/etc/permissions/qcrilhook.xml
-/system/etc/permissions/serviceitems.xml
 /system/etc/qcril.db
 /system/etc/sensor_def_hh.conf
-/system/etc/Speaker_cal.acdb
+/system/etc/acdbdata/MTP/MTP_Speaker_cal.acdb
 /system/framework/qcrilhook.jar
-/system/framework/serviceitems.jar
 /system/lib/hw/flp.msm8974.so
 /system/lib/hw/gps.msm8974.so
 /system/lib/libadsprpc.so
@@ -159,7 +156,8 @@
 /system/vendor/lib/libizat_core.so
 /system/vendor/lib/libjpegdhw.so
 /system/vendor/lib/libjpegehw.so
-/system/vendor/lib/libllvm-a3xx.so
+/system/vendor/lib/libllvm-qcom.so
+/system/vendor/lib/libbccQTI.so
 /system/vendor/lib/libloc_api_v02.so
 /system/vendor/lib/libloc_ds_api.so
 /system/vendor/lib/libmmcamera2_c2d_module.so
diff --git a/recovery/Android.mk b/recovery/Android.mk
deleted file mode 100644
index 1216adf..0000000
--- a/recovery/Android.mk
+++ /dev/null
@@ -1,13 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_MODULE_TAGS := eng
-LOCAL_C_INCLUDES += bootable/recovery
-LOCAL_SRC_FILES := recovery_ui.cpp
-
-# should match TARGET_RECOVERY_UI_LIB set in BoardConfig.mk
-LOCAL_MODULE := librecovery_ui_hammerhead
-
-include $(BUILD_STATIC_LIBRARY)
-
-include $(CLEAR_VARS)
diff --git a/recovery/recovery_ui.cpp b/recovery/recovery_ui.cpp
deleted file mode 100644
index 98bb24f..0000000
--- a/recovery/recovery_ui.cpp
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Copyright 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <linux/input.h>
-#include <sys/stat.h>
-#include <errno.h>
-#include <string.h>
-
-#include "common.h"
-#include "device.h"
-#include "screen_ui.h"
-
-const char* HEADERS[] = { "Volume up/down to move highlight;",
-                          "power button to select.",
-                          "",
-                          NULL };
-
-const char* ITEMS[] = { "reboot system now",
-                        "apply update from ADB",
-                        "wipe data/factory reset",
-                        "wipe cache partition",
-                        NULL };
-
-class HammerheadDevice : public Device
-{
-public:
-    HammerheadDevice() :
-        ui(new ScreenRecoveryUI) {
-    }
-
-    RecoveryUI* GetUI() { return ui; }
-
-    int HandleMenuKey(int key_code, int visible) {
-        if (visible) {
-            switch (key_code) {
-            case KEY_DOWN:
-            case KEY_VOLUMEDOWN:
-                return kHighlightDown;
-
-            case KEY_UP:
-            case KEY_VOLUMEUP:
-                return kHighlightUp;
-
-            case KEY_POWER:
-                return kInvokeItem;
-            }
-        }
-
-        return kNoAction;
-    }
-
-    BuiltinAction InvokeMenuItem(int menu_position) {
-        switch (menu_position) {
-        case 0: return REBOOT;
-        case 1: return APPLY_ADB_SIDELOAD;
-        case 2: return WIPE_DATA;
-        case 3: return WIPE_CACHE;
-        default: return NO_ACTION;
-        }
-    }
-
-    const char* const* GetMenuHeaders() { return HEADERS; }
-    const char* const* GetMenuItems() { return ITEMS; }
-
-private:
-    RecoveryUI* ui;
-};
-
-Device* make_device() {
-    return new HammerheadDevice;
-}
diff --git a/self-extractors/extract-lists.txt b/self-extractors/extract-lists.txt
index f342188..68cb154 100644
--- a/self-extractors/extract-lists.txt
+++ b/self-extractors/extract-lists.txt
@@ -8,19 +8,16 @@
   lge)
     TO_EXTRACT="\
             system/app/qcrilmsgtunnel.apk \
-            system/app/SprintHiddenMenu.apk \
-            system/app/UpdateSetting.apk \
-            system/etc/Bluetooth_cal.acdb \
-            system/etc/General_cal.acdb \
-            system/etc/Global_cal.acdb \
-            system/etc/Handset_cal.acdb \
-            system/etc/Hdmi_cal.acdb \
-            system/etc/Headset_cal.acdb \
-            system/etc/permissions/serviceitems.xml \
+            system/app/SprintDM.apk \
+            system/etc/acdbdata/MTP/MTP_Bluetooth_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_General_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Global_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Handset_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Hdmi_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Headset_cal.acdb \
             system/etc/qcril.db \
             system/etc/sensor_def_hh.conf \
-            system/etc/Speaker_cal.acdb \
-            system/framework/serviceitems.jar \
+            system/etc/acdbdata/MTP/MTP_Speaker_cal.acdb \
             system/vendor/bin/vss_init \
             system/vendor/firmware/bu24205_LGIT_VER_2_DATA1.bin \
             system/vendor/firmware/bu24205_LGIT_VER_2_DATA2.bin \
@@ -157,7 +154,8 @@
             system/vendor/lib/libizat_core.so \
             system/vendor/lib/libjpegdhw.so \
             system/vendor/lib/libjpegehw.so \
-            system/vendor/lib/libllvm-a3xx.so \
+            system/vendor/lib/libllvm-qcom.so \
+            system/vendor/lib/libbccQTI.so \
             system/vendor/lib/libloc_api_v02.so \
             system/vendor/lib/libloc_ds_api.so \
             system/vendor/lib/libmmcamera2_c2d_module.so \
diff --git a/self-extractors/generate-packages.sh b/self-extractors/generate-packages.sh
index 38c108a..dd94135 100755
--- a/self-extractors/generate-packages.sh
+++ b/self-extractors/generate-packages.sh
@@ -45,18 +45,18 @@
   lge)
     TO_EXTRACT="\
             system/app/qcrilmsgtunnel.apk \
-            system/app/SprintHiddenMenu.apk \
+            system/app/SprintDM.apk \
             system/app/UpdateSetting.apk \
-            system/etc/Bluetooth_cal.acdb \
-            system/etc/General_cal.acdb \
-            system/etc/Global_cal.acdb \
-            system/etc/Handset_cal.acdb \
-            system/etc/Hdmi_cal.acdb \
-            system/etc/Headset_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Bluetooth_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_General_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Global_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Handset_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Hdmi_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Headset_cal.acdb \
             system/etc/permissions/serviceitems.xml \
             system/etc/qcril.db \
             system/etc/sensor_def_hh.conf \
-            system/etc/Speaker_cal.acdb \
+            system/etc/acdbdata/MTP/MTP_Speaker_cal.acdb \
             system/framework/serviceitems.jar \
             system/vendor/bin/vss_init \
             system/vendor/firmware/bu24205_LGIT_VER_2_DATA1.bin \
@@ -194,7 +194,8 @@
             system/vendor/lib/libizat_core.so \
             system/vendor/lib/libjpegdhw.so \
             system/vendor/lib/libjpegehw.so \
-            system/vendor/lib/libllvm-a3xx.so \
+            system/vendor/lib/libllvm-qcom.so \
+            system/vendor/lib/libbccQTI.so \
             system/vendor/lib/libloc_api_v02.so \
             system/vendor/lib/libloc_ds_api.so \
             system/vendor/lib/libmmcamera2_c2d_module.so \
diff --git a/self-extractors/lge/staging/device-partial.mk b/self-extractors/lge/staging/device-partial.mk
index b1b4148..459f492 100644
--- a/self-extractors/lge/staging/device-partial.mk
+++ b/self-extractors/lge/staging/device-partial.mk
@@ -15,19 +15,16 @@
 # LGE blob(s) necessary for Hammerhead hardware
 PRODUCT_COPY_FILES := \
     vendor/lge/hammerhead/proprietary/qcrilmsgtunnel.apk:system/app/qcrilmsgtunnel.apk:lge \
-    vendor/lge/hammerhead/proprietary/SprintHiddenMenu.apk:system/app/SprintHiddenMenu.apk:lge \
-    vendor/lge/hammerhead/proprietary/UpdateSetting.apk:system/app/UpdateSetting.apk:lge \
-    vendor/lge/hammerhead/proprietary/Bluetooth_cal.acdb:system/etc/Bluetooth_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/General_cal.acdb:system/etc/General_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/Global_cal.acdb:system/etc/Global_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/Handset_cal.acdb:system/etc/Handset_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/Hdmi_cal.acdb:system/etc/Hdmi_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/Headset_cal.acdb:system/etc/Headset_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/serviceitems.xml:system/etc/permissions/serviceitems.xml:lge \
+    vendor/lge/hammerhead/proprietary/SprintDM.apk:system/app/SprintDM.apk:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Bluetooth_cal.acdb:system/etc/MTP_Bluetooth_cal.acdb:lge \
+    vendor/lge/hammerhead/proprietary/MTP_General_cal.acdb:system/etc/MTP_General_cal.acdb:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Global_cal.acdb:system/etc/MTP_Global_cal.acdb:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Handset_cal.acdb:system/etc/MTP_Handset_cal.acdb:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Hdmi_cal.acdb:system/etc/MTP_Hdmi_cal.acdb:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Headset_cal.acdb:system/etc/MTP_Headset_cal.acdb:lge \
     vendor/lge/hammerhead/proprietary/qcril.db:system/etc/qcril.db:lge \
     vendor/lge/hammerhead/proprietary/sensor_def_hh.conf:system/etc/sensor_def_hh.conf:lge \
-    vendor/lge/hammerhead/proprietary/Speaker_cal.acdb:system/etc/Speaker_cal.acdb:lge \
-    vendor/lge/hammerhead/proprietary/serviceitems.jar:system/framework/serviceitems.jar:lge \
+    vendor/lge/hammerhead/proprietary/MTP_Speaker_cal.acdb:system/etc/MTP_Speaker_cal.acdb:lge \
     vendor/lge/hammerhead/proprietary/vss_init:system/vendor/bin/vss_init:lge \
     vendor/lge/hammerhead/proprietary/bu24205_LGIT_VER_2_DATA1.bin:system/vendor/firmware/bu24205_LGIT_VER_2_DATA1.bin:lge \
     vendor/lge/hammerhead/proprietary/bu24205_LGIT_VER_2_DATA2.bin:system/vendor/firmware/bu24205_LGIT_VER_2_DATA2.bin:lge \
diff --git a/self-extractors/qcom/staging/device-partial.mk b/self-extractors/qcom/staging/device-partial.mk
index 2fda57a..50223c9 100644
--- a/self-extractors/qcom/staging/device-partial.mk
+++ b/self-extractors/qcom/staging/device-partial.mk
@@ -126,7 +126,8 @@
     vendor/qcom/hammerhead/proprietary/libizat_core.so:system/vendor/lib/libizat_core.so:qcom \
     vendor/qcom/hammerhead/proprietary/libjpegdhw.so:system/vendor/lib/libjpegdhw.so:qcom \
     vendor/qcom/hammerhead/proprietary/libjpegehw.so:system/vendor/lib/libjpegehw.so:qcom \
-    vendor/qcom/hammerhead/proprietary/libllvm-a3xx.so:system/vendor/lib/libllvm-a3xx.so:qcom \
+    vendor/qcom/hammerhead/proprietary/libllvm-qcom.so:system/vendor/lib/libllvm-qcom.so:qcom \
+    vendor/qcom/hammerhead/proprietary/libbccQTI.so:system/vendor/lib/libbccQTI.so:qcom \
     vendor/qcom/hammerhead/proprietary/libloc_api_v02.so:system/vendor/lib/libloc_api_v02.so:qcom \
     vendor/qcom/hammerhead/proprietary/libloc_ds_api.so:system/vendor/lib/libloc_ds_api.so:qcom \
     vendor/qcom/hammerhead/proprietary/libmmcamera2_c2d_module.so:system/vendor/lib/libmmcamera2_c2d_module.so:qcom \
diff --git a/sepolicy/rmt.te b/sepolicy/rmt.te
index 0530a08..a69f192 100644
--- a/sepolicy/rmt.te
+++ b/sepolicy/rmt.te
@@ -5,8 +5,6 @@
 # Started by init
 init_daemon_domain(rmt)
 
-permissive rmt;
-
 # Drop (user, group) to (nobody, nobody)
 allow rmt self:capability { setuid setgid };
 
diff --git a/thermal-engine-8974.conf b/thermal-engine-8974.conf
new file mode 100644
index 0000000..c3a0289
--- /dev/null
+++ b/thermal-engine-8974.conf
@@ -0,0 +1,64 @@
+sampling         5000
+
+[battery_LCD_monitor]
+algo_type        monitor
+sensor           batt_therm
+sampling         10000
+thresholds       100000            340000           350000           360000           370000
+thresholds_clr   50000             330000           340000           350000           350000
+actions          override          override         override         override         override
+action_info      10000             7500             5000             2500             0
+
+[SKIN_THERMAL_management_1]
+algo_type        monitor
+sensor           xo_therm_pu2
+sampling         10000
+thresholds       40000               42000          44000
+thresholds_clr   38500               40500          42500
+actions          cpu+lcd             cpu+lcd        cpu+lcd
+action_info      1958400+229         1574400+204   1190400+178
+
+[battery_monitor]
+algo_type        monitor
+sensor           batt_therm
+sampling         10000
+thresholds       480000             550000
+thresholds_clr   460000             500000
+actions          battery            battery
+action_info      2                  3
+
+[CPU0_MONITOR]
+algo_type        monitor
+sensor           cpu0
+sampling         65
+thresholds       115000
+thresholds_clr   110000
+actions          shutdown
+action_info      0
+
+[CPU1_MONITOR]
+algo_type        monitor
+sensor           cpu1
+sampling         65
+thresholds       115000
+thresholds_clr   110000
+actions          shutdown
+action_info      0
+
+[CPU2_MONITOR]
+algo_type        monitor
+sensor           cpu2
+sampling         65
+thresholds       115000
+thresholds_clr   110000
+actions          shutdown
+action_info      0
+
+[CPU3_MONITOR]
+algo_type        monitor
+sensor           cpu3
+sampling         65
+thresholds       115000
+thresholds_clr   110000
+actions          shutdown
+action_info      0
diff --git a/thermal-engine-hammerhead.conf b/thermal-engine-hammerhead.conf
deleted file mode 100644
index d1cfcb3..0000000
--- a/thermal-engine-hammerhead.conf
+++ /dev/null
@@ -1,147 +0,0 @@
-sampling         5000
-
-[battery_LCD_monitor]
-algo_type        monitor
-sensor           batt_therm
-sampling         10000
-thresholds       100000            340000           350000           360000           370000
-thresholds_clr   50000             330000           340000           350000           350000
-actions          override          override         override         override         override
-action_info      10000             7500             5000             2500             0
-
-[SKIN_THERMAL_management_1]
-algo_type        monitor
-sensor           xo_therm_pu2
-sampling         10000
-thresholds       40000               42000          44000
-thresholds_clr   38500               40500          42500
-actions          cpu+lcd             cpu+lcd        cpu+lcd
-action_info      1958400+229         1574400+204   1190400+178
-
-[battery_monitor]
-algo_type        monitor
-sensor           batt_therm
-sampling         10000
-thresholds       480000             550000
-thresholds_clr   460000             500000
-actions          battery            battery
-action_info      2                  3
-
-[CPU0_MONITOR]
-algo_type        monitor
-sensor           cpu0
-sampling         65
-thresholds       115000
-thresholds_clr   110000
-actions          shutdown
-action_info      0
-
-[CPU1_MONITOR]
-algo_type        monitor
-sensor           cpu1
-sampling         65
-thresholds       115000
-thresholds_clr   110000
-actions          shutdown
-action_info      0
-
-[CPU2_MONITOR]
-algo_type        monitor
-sensor           cpu2
-sampling         65
-thresholds       115000
-thresholds_clr   110000
-actions          shutdown
-action_info      0
-
-[CPU3_MONITOR]
-algo_type        monitor
-sensor           cpu3
-sampling         65
-thresholds       115000
-thresholds_clr   110000
-actions          shutdown
-action_info      0
-
-[HOTPLUG-CPU1]
-algo_type        monitor
-sensor           cpu1
-sampling         65
-thresholds       105000
-thresholds_clr   85000
-actions          hotplug_1
-action_info      1
-
-[HOTPLUG-CPU2]
-algo_type        monitor
-sensor           cpu2
-sampling         65
-thresholds       105000
-thresholds_clr   85000
-actions          hotplug_2
-action_info      1
-
-[HOTPLUG-CPU3]
-algo_type        monitor
-sensor           cpu3
-sampling         65
-thresholds       105000
-thresholds_clr   85000
-actions          hotplug_3
-action_info      1
-
-[PID-CPU0]
-disable 1
-
-[PID-CPU1]
-disable 1
-
-[PID-CPU2]
-disable 1
-
-[PID-CPU3]
-disable 1
-
-[PID-POPMEM]
-disable 1
-
-[SS-CPU0]
-algo_type          ss
-sampling           65
-sensor             cpu0
-device             cpu
-set_point          80000
-set_point_clr      55000
-
-[SS-CPU1]
-algo_type          ss
-sampling           65
-sensor             cpu1
-device             cpu
-set_point          80000
-set_point_clr      55000
-
-[SS-CPU2]
-algo_type          ss
-sampling           65
-sensor             cpu2
-device             cpu
-set_point          80000
-set_point_clr      55000
-
-[SS-CPU3]
-algo_type          ss
-sampling           65
-sensor             cpu3
-device             cpu
-set_point          80000
-set_point_clr      55000
-
-[SS-POPMEM]
-algo_type          ss
-sampling           65
-sensor             pop_mem
-device             cpu
-set_point          80000
-set_point_clr      55000
-time_constant      16
diff --git a/vendor_owner_info.txt b/vendor_owner_info.txt
index d44914d..4a6bc8b 100644
--- a/vendor_owner_info.txt
+++ b/vendor_owner_info.txt
@@ -12,17 +12,15 @@
 system/vendor/firmware/bu24205_LGIT_VER_3_DATA3.bin:lge
 system/vendor/firmware/bu24205_LGIT_VER_3_CAL.bin:lge
 root/adb_keys:google
-system/etc/Bluetooth_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Bluetooth_cal.acdb:lge
 system/etc/DxHDCP.cfg:lge
-system/etc/General_cal.acdb:lge
-system/etc/Global_cal.acdb:lge
-system/etc/Handset_cal.acdb:lge
-system/etc/Hdmi_cal.acdb:lge
-system/etc/Headset_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_General_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Global_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Handset_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Hdmi_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Headset_cal.acdb:lge
 system/etc/permissions/qcrilhook.xml:qcom
 system/framework/qcrilhook.jar:qcom
-system/etc/permissions/serviceitems.xml:lge
-system/framework/serviceitems.jar:lge
 system/etc/qcril.db:lge
 system/etc/sensor_def_hh.conf:lge
 system/lib/hw/flp.msm8974.so:qcom
@@ -44,11 +42,10 @@
 system/app/QXDMLogger.apk:google
 system/app/OmaDmclient.apk:lge
 system/app/qcrilmsgtunnel.apk:lge
-system/app/SprintHiddenMenu.apk:lge
-system/app/UpdateSetting.apk:lge
+system/app/SprintDM.apk:lge
 system/lib/libloc_core.so:qcom
 system/vendor/lib/egl/libq3dtools_adreno.so:qcom
-system/etc/Speaker_cal.acdb:lge
+system/etc/acdbdata/MTP/MTP_Speaker_cal.acdb:lge
 system/app/TimeService.apk:qcom
 system/vendor/firmware/bcm2079x-b5_firmware.ncd:broadcom
 system/vendor/firmware/bcm2079x-b5_pre_firmware.ncd:broadcom
@@ -187,7 +184,8 @@
 system/vendor/lib/libc2d30-a3xx.so:qcom
 system/vendor/lib/libc2d30.so:qcom
 system/vendor/lib/libgsl.so:qcom
-system/vendor/lib/libllvm-a3xx.so:qcom
+system/vendor/lib/libllvm-qcom.so:qcom
+system/vendor/lib/libbccQTI.so:qcom
 system/vendor/lib/libsc-a3xx.so:qcom
 system/vendor/lib/libCommandSvc.so:qcom
 system/vendor/lib/libacdbrtac.so:qcom