camera: update AU_LINUX_ANDROID_LA.BF64.1.2.9.05.01.00.089.173

a6b54d4 Camera3: Change the logic to decide IS type
109c2f6 Camera3: Refactor some reprocessing code
28385fd QCamera2/HAL3: Advertise only 120 fps or above as High Speed
85172e8 Merge changes I40aa6aec,I6380d43d into LA.BF64.1.2.9
a7f38db QCamera2/HAL3: Re-send META_STREAM_INFO when HFR fps changes
4f79f08 QCamera2/HAL3: Use HFR sensor mode when in HighSpeedMode
be284ff Camera3: Enable torchlight for PMIC driver

Change-Id: I4911722a150bb11fa323d73d0e19bfd2e9a6aa58
diff --git a/QCamera2/HAL3/QCamera3Channel.cpp b/QCamera2/HAL3/QCamera3Channel.cpp
index eadecdd..d5296c7 100644
--- a/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/QCamera2/HAL3/QCamera3Channel.cpp
@@ -643,7 +643,7 @@
         return;
     }
 
-    if(!super_frame) {
+    if(NULL == super_frame) {
          ALOGE("%s: Invalid Super buffer",__func__);
          return;
     }
@@ -652,7 +652,7 @@
          ALOGE("%s: Multiple streams are not supported",__func__);
          return;
     }
-    if(super_frame->bufs[0] == NULL ) {
+    if(NULL == super_frame->bufs[0]) {
          ALOGE("%s: Error, Super buffer frame does not contain valid buffer",
                   __func__);
          return;
@@ -734,28 +734,6 @@
         setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat);
         startPostProc((NULL != pInputBuffer), reproc_cfg);
 
-        if (0 < mOfflineMetaMemory.getCnt()) {
-            mOfflineMetaMemory.deallocate();
-        }
-        if (0 < mOfflineMemory.getCnt()) {
-            mOfflineMemory.unregisterBuffers();
-        }
-
-        int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
-        if(input_index < 0) {
-            rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
-            if (NO_ERROR != rc) {
-                ALOGE("%s: On-the-fly input buffer registration failed %d",
-                        __func__, rc);
-                return rc;
-            }
-            input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
-            if (input_index < 0) {
-                ALOGE("%s: Could not find object among registered buffers",__func__);
-                return DEAD_OBJECT;
-            }
-        }
-
         qcamera_fwk_input_pp_data_t *src_frame = NULL;
         src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
                 sizeof(qcamera_fwk_input_pp_data_t));
@@ -763,44 +741,12 @@
             ALOGE("%s: No memory for src frame", __func__);
             return NO_MEMORY;
         }
-        src_frame->src_frame = *pInputBuffer;
-        rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info,
-                src_frame->input_buffer, input_index);
-        if (rc != 0) {
-            free(src_frame);
-            return rc;
-        }
-        if (mYUVDump) {
-           dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim,
-                   reproc_cfg.input_stream_plane_info.plane_info, 1);
-        }
-        cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
-        cam_stream_buf_plane_info_t meta_planes;
-        rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
-        if (rc != 0) {
-            ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
-            free(src_frame);
-            return rc;
-        }
-        rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
+        rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber);
         if (NO_ERROR != rc) {
-            ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
+            ALOGE("%s: Error %d while setting framework input PP data", __func__, rc);
             free(src_frame);
             return rc;
         }
-        mm_camera_buf_def_t meta_buf;
-        cam_frame_len_offset_t offset = meta_planes.plane_info;
-        rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
-        if (NO_ERROR != rc) {
-            free(src_frame);
-            return rc;
-        }
-        memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
-        src_frame->metadata_buffer = meta_buf;
-        src_frame->reproc_config = reproc_cfg;
-        src_frame->output_buffer = buffer;
-        src_frame->frameNumber = frameNumber;
-
         CDBG_HIGH("%s: Post-process started", __func__);
         CDBG_HIGH("%s: Issue call to reprocess", __func__);
         m_postprocessor.processData(src_frame);
@@ -871,6 +817,7 @@
  *
  * PARAMETERS :
  *   @buffer     : buffer to be registered
+ *   @isType     : image stabilization type on the stream
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
@@ -912,11 +859,95 @@
 }
 
 /*===========================================================================
- * FUNCTION   : getStreamBufs
+ * FUNCTION   : setFwkInputPPData
  *
- * DESCRIPTION:
+ * DESCRIPTION: fill out the framework src frame information for reprocessing
  *
  * PARAMETERS :
+ *   @src_frame         : input pp data to be filled out
+ *   @pInputBuffer      : input buffer for reprocessing
+ *   @reproc_cfg        : pointer to the reprocess config
+ *   @metadata          : pointer to the metadata buffer
+ *   @output_buffer     : output buffer for reprocessing; could be NULL if not
+ *                        framework allocated
+ *   @frameNumber       : frame number of the request
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
+        camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg,
+        metadata_buffer_t *metadata, buffer_handle_t *output_buffer,
+        uint32_t frameNumber)
+{
+    int32_t rc = NO_ERROR;
+    if (0 < mOfflineMetaMemory.getCnt()) {
+        mOfflineMetaMemory.deallocate();
+    }
+    if (0 < mOfflineMemory.getCnt()) {
+        mOfflineMemory.unregisterBuffers();
+    }
+
+    int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
+    if(input_index < 0) {
+        rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
+        if (NO_ERROR != rc) {
+            ALOGE("%s: On-the-fly input buffer registration failed %d",
+                    __func__, rc);
+            return rc;
+        }
+        input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
+        if (input_index < 0) {
+            ALOGE("%s: Could not find object among registered buffers",__func__);
+            return DEAD_OBJECT;
+        }
+    }
+
+    src_frame->src_frame = *pInputBuffer;
+    rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
+            src_frame->input_buffer, input_index);
+    if (rc != 0) {
+        return rc;
+    }
+    if (mYUVDump) {
+       dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim,
+               reproc_cfg->input_stream_plane_info.plane_info, 1);
+    }
+
+    cam_dimension_t dim = {sizeof(metadata_buffer_t), 1};
+    cam_stream_buf_plane_info_t meta_planes;
+    rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
+    if (rc != 0) {
+        ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
+        return rc;
+    }
+    rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
+    if (NO_ERROR != rc) {
+        ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
+        return rc;
+    }
+    mm_camera_buf_def_t meta_buf;
+    cam_frame_len_offset_t offset = meta_planes.plane_info;
+    rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
+    if (NO_ERROR != rc) {
+        return rc;
+    }
+    memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
+    src_frame->metadata_buffer = meta_buf;
+    src_frame->reproc_config = *reproc_cfg;
+    src_frame->output_buffer = output_buffer;
+    src_frame->frameNumber = frameNumber;
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : getStreamBufs
+ *
+ * DESCRIPTION: get the buffers allocated to the stream
+ *
+ * PARAMETERS :
+ * @len       : buffer length
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
@@ -931,7 +962,7 @@
 /*===========================================================================
  * FUNCTION   : putStreamBufs
  *
- * DESCRIPTION:
+ * DESCRIPTION: release the buffers allocated to the stream
  *
  * PARAMETERS : NONE
  *
@@ -980,9 +1011,7 @@
  * @config         : reprocessing configuration
  * @metadata       : metadata associated with the reprocessing request
  *
- * RETURN     : int32_t type of status
- *              NO_ERROR  -- success
- *              none-zero failure code
+ * RETURN     : NONE
  *==========================================================================*/
 void QCamera3ProcessingChannel::startPostProc(bool inputBufExists,
                                         const reprocess_config_t &config)
@@ -1040,6 +1069,61 @@
 }
 
 /*===========================================================================
+ * FUNCTION : translateStreamTypeAndFormat
+ *
+ * DESCRIPTION: translates the framework stream format into HAL stream type
+ *              and format
+ *
+ * PARAMETERS :
+ * @streamType   : translated stream type
+ * @streamFormat : translated stream format
+ * @stream       : fwk stream
+ *
+ * RETURN     : int32_t type of status
+ *              NO_ERROR  -- success
+ *              none-zero failure code
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream,
+        cam_stream_type_t &streamType, cam_format_t &streamFormat)
+{
+    switch (stream->format) {
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+            if(stream->stream_type == CAMERA3_STREAM_INPUT){
+                streamType = CAM_STREAM_TYPE_SNAPSHOT;
+                streamFormat = SNAPSHOT_FORMAT;
+            } else {
+                streamType = CAM_STREAM_TYPE_CALLBACK;
+                streamFormat = CALLBACK_FORMAT;
+            }
+            break;
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) {
+                streamType = CAM_STREAM_TYPE_VIDEO;
+                streamFormat = VIDEO_FORMAT;
+            } else if(stream->stream_type == CAMERA3_STREAM_INPUT ||
+                    stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL){
+                streamType = CAM_STREAM_TYPE_SNAPSHOT;
+                streamFormat = SNAPSHOT_FORMAT;
+            } else {
+                streamType = CAM_STREAM_TYPE_PREVIEW;
+                streamFormat = PREVIEW_FORMAT;
+            }
+            break;
+        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+        case HAL_PIXEL_FORMAT_RAW16:
+        case HAL_PIXEL_FORMAT_RAW10:
+            streamType = CAM_STREAM_TYPE_RAW;
+            streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
+            break;
+        default:
+            return -EINVAL;
+    }
+    CDBG("%s: fwk_format = %d, streamType = %d, streamFormat = %d", __func__,
+            stream->format, streamType, streamFormat);
+    return NO_ERROR;
+}
+
+/*===========================================================================
  * FUNCTION : setReprocConfig
  *
  * DESCRIPTION: sets the reprocessing parameters for the input buffer
@@ -1082,14 +1166,44 @@
     reproc_cfg.stream_format = streamFormat;
     reproc_cfg.reprocess_type = getReprocessType();
 
-    //any input buffer will be of the ZSL format so use the snapshot offset calculations
-    reproc_cfg.stream_type = CAM_STREAM_TYPE_SNAPSHOT;
-    reproc_cfg.stream_format = SNAPSHOT_FORMAT;
-    rc = mm_stream_calc_offset_snapshot(streamFormat,
-                 &reproc_cfg.input_stream_dim, reproc_cfg.padding,
-                 &reproc_cfg.input_stream_plane_info);
-    CDBG("%s: reproc_cfg.stream_type = %d, reproc_cfg.stream_format = %d", __func__,
-              reproc_cfg.stream_type, reproc_cfg.stream_format);
+    //offset calculation
+    if (NULL != pInputBuffer) {
+        rc = translateStreamTypeAndFormat(pInputBuffer->stream,
+                reproc_cfg.stream_type, reproc_cfg.stream_format);
+        if (rc != NO_ERROR) {
+            ALOGE("%s: Stream format %d is not supported", __func__,
+                    pInputBuffer->stream->format);
+            return rc;
+        }
+        CDBG("%s: reproc_cfg.stream_type = %d, reproc_cfg.stream_format = %d", __func__,
+                reproc_cfg.stream_type, reproc_cfg.stream_format);
+
+        switch (reproc_cfg.stream_type) {
+            case CAM_STREAM_TYPE_CALLBACK:
+            case CAM_STREAM_TYPE_PREVIEW:
+                rc = mm_stream_calc_offset_preview(streamFormat,
+                        &reproc_cfg.input_stream_dim,
+                        &reproc_cfg.input_stream_plane_info);
+                break;
+            case CAM_STREAM_TYPE_VIDEO:
+                rc = mm_stream_calc_offset_video(&reproc_cfg.input_stream_dim,
+                        &reproc_cfg.input_stream_plane_info);
+                break;
+            case CAM_STREAM_TYPE_RAW:
+                rc = mm_stream_calc_offset_raw(streamFormat, &reproc_cfg.input_stream_dim,
+                        reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
+                break;
+            case CAM_STREAM_TYPE_SNAPSHOT:
+            default:
+                rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim,
+                        reproc_cfg.padding, &reproc_cfg.input_stream_plane_info);
+                break;
+        }
+        if (rc != 0) {
+            ALOGE("%s: Stream %d plane info calculation failed!", __func__, mStreamType);
+            return rc;
+        }
+    }
     return rc;
 }
 
@@ -1211,31 +1325,10 @@
 
     mIsType  = isType;
 
-    switch (mCamera3Stream->format) {
-        case HAL_PIXEL_FORMAT_YCbCr_420_888:
-            mStreamFormat = CALLBACK_FORMAT;
-            break;
-        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
-            {
-                 if (mStreamType ==  CAM_STREAM_TYPE_VIDEO) {
-                     mStreamFormat = VIDEO_FORMAT;
-                 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) {
-                     mStreamFormat = PREVIEW_FORMAT;
-                 } else {
-                    //TODO: Add a new flag in libgralloc for ZSL buffers, and
-                    //its size needs to be properly aligned and padded.
-                    mStreamFormat = DEFAULT_FORMAT;
-                 }
-            }
-            break;
-        case HAL_PIXEL_FORMAT_RAW_OPAQUE:
-        case HAL_PIXEL_FORMAT_RAW16:
-        case HAL_PIXEL_FORMAT_RAW10:
-            mStreamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
-            break;
-        default:
-            ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__);
-            return -EINVAL;
+    rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType,
+            mStreamFormat);
+    if (rc != NO_ERROR) {
+        return -EINVAL;
     }
 
     if ((mStreamType == CAM_STREAM_TYPE_VIDEO) ||
@@ -1602,6 +1695,7 @@
  * DESCRIPTION: Initialize and add camera channel & stream
  *
  * PARAMETERS :
+ * @isType    : image stabilization type on the stream
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
@@ -2379,6 +2473,22 @@
     return rc;
 }
 
+/*===========================================================================
+ * FUNCTION   : request
+ *
+ * DESCRIPTION: handle the request - either with an input buffer or a direct
+ *              output request
+ *
+ * PARAMETERS :
+ * @buffer       : pointer to the output buffer
+ * @frameNumber  : frame number of the request
+ * @pInputBuffer : pointer to input buffer if an input request
+ * @metadata     : parameters associated with the request
+ *
+ * RETURN     : 0 on a success start of capture
+ *              -EINVAL on invalid input
+ *              -ENODEV on serious error
+ *==========================================================================*/
 int32_t QCamera3PicChannel::request(buffer_handle_t *buffer,
         uint32_t frameNumber,
         camera3_stream_buffer_t *pInputBuffer,
@@ -2430,7 +2540,7 @@
 
     rc = mMemory.markFrameNumber((uint32_t)index, frameNumber);
 
-    //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer
+    //Start the postprocessor for jpeg encoding. Pass mMemory as dest buffer
     mCurrentBufIndex = (uint32_t)index;
 
     // Start postprocessor
@@ -2451,78 +2561,22 @@
             rc = NOT_ENOUGH_DATA;
         }
     } else {
-        if (0 < mOfflineMetaMemory.getCnt()) {
-            mOfflineMetaMemory.deallocate();
-        }
-        if (0 < mOfflineMemory.getCnt()) {
-            mOfflineMemory.unregisterBuffers();
-        }
-
-        int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
-        if(input_index < 0) {
-            rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType);
-            if (NO_ERROR != rc) {
-                ALOGE("%s: On-the-fly input buffer registration failed %d",
-                        __func__, rc);
-                return rc;
-            }
-
-            input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer);
-            if (input_index < 0) {
-                ALOGE("%s: Could not find object among registered buffers",__func__);
-                return DEAD_OBJECT;
-            }
-        }
         qcamera_fwk_input_pp_data_t *src_frame = NULL;
-        src_frame = (qcamera_fwk_input_pp_data_t *)malloc(
+        src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1,
                 sizeof(qcamera_fwk_input_pp_data_t));
         if (src_frame == NULL) {
             ALOGE("%s: No memory for src frame", __func__);
             return NO_MEMORY;
         }
-        memset(src_frame, 0, sizeof(qcamera_fwk_input_pp_data_t));
-        src_frame->src_frame = *pInputBuffer;
-        rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info,
-                src_frame->input_buffer, (uint32_t)input_index);
-        if (rc != 0) {
-            free(src_frame);
-            return rc;
-        }
-        if (mYUVDump) {
-            dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim,
-                    reproc_cfg.input_stream_plane_info.plane_info, 1);
-        }
-        cam_dimension_t dim = {(int)sizeof(metadata_buffer_t), 1};
-        cam_stream_buf_plane_info_t meta_planes;
-        rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes);
-        if (rc != 0) {
-            ALOGE("%s: Metadata stream plane info calculation failed!", __func__);
-            free(src_frame);
-            return rc;
-        }
-
-        rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false);
+        rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata,
+                NULL /*fwk output buffer*/, frameNumber);
         if (NO_ERROR != rc) {
-            ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__);
+            ALOGE("%s: Error %d while setting framework input PP data", __func__, rc);
             free(src_frame);
             return rc;
         }
-        mm_camera_buf_def_t meta_buf;
-        cam_frame_len_offset_t offset = meta_planes.plane_info;
-        rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0);
-        if (NO_ERROR != rc) {
-            free(src_frame);
-            return rc;
-        }
-        memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t));
-        src_frame->metadata_buffer = meta_buf;
-        src_frame->reproc_config = reproc_cfg;
-        src_frame->output_buffer = NULL;
-        src_frame->frameNumber = frameNumber;
-
         CDBG_HIGH("%s: Post-process started", __func__);
         CDBG_HIGH("%s: Issue call to reprocess", __func__);
-
         m_postprocessor.processData(src_frame);
     }
     return rc;
diff --git a/QCamera2/HAL3/QCamera3Channel.h b/QCamera2/HAL3/QCamera3Channel.h
index 232a1fb..8417301 100644
--- a/QCamera2/HAL3/QCamera3Channel.h
+++ b/QCamera2/HAL3/QCamera3Channel.h
@@ -176,10 +176,19 @@
 
     int32_t queueReprocMetadata(mm_camera_super_buf_t *metadata);
     int32_t metadataBufDone(mm_camera_super_buf_t *recvd_frame);
+    int32_t translateStreamTypeAndFormat(camera3_stream_t *stream,
+            cam_stream_type_t &streamType,
+            cam_format_t &streamFormat);
     int32_t setReprocConfig(reprocess_config_t &reproc_cfg,
             camera3_stream_buffer_t *pInputBuffer,
             metadata_buffer_t *metadata,
             cam_format_t streamFormat);
+    int32_t setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame,
+            camera3_stream_buffer_t *pInputBuffer,
+            reprocess_config_t *reproc_cfg,
+            metadata_buffer_t *metadata,
+            buffer_handle_t *output_buffer,
+            uint32_t frameNumber);
 
     QCamera3PostProcessor m_postprocessor; // post processor
 
diff --git a/QCamera2/HAL3/QCamera3HWI.cpp b/QCamera2/HAL3/QCamera3HWI.cpp
index cd6ed04..ba4ba1d 100755
--- a/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/QCamera2/HAL3/QCamera3HWI.cpp
@@ -69,6 +69,9 @@
 #define VIDEO_4K_WIDTH  3840
 #define VIDEO_4K_HEIGHT 2160
 
+#define MAX_EIS_WIDTH 1920
+#define MAX_EIS_HEIGHT 1080
+
 #define MAX_RAW_STREAMS        1
 #define MAX_STALLING_STREAMS   1
 #define MAX_PROCESSED_STREAMS  3
@@ -316,6 +319,7 @@
       mSupportChannel(NULL),
       mAnalysisChannel(NULL),
       mRawDumpChannel(NULL),
+      mDummyBatchChannel(NULL),
       mFirstRequest(false),
       mFirstConfiguration(true),
       mFlush(false),
@@ -338,7 +342,10 @@
       mBatchSize(0),
       mToBeQueuedVidBufs(0),
       mHFRVideoFps(DEFAULT_VIDEO_FPS),
-      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE)
+      mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
+      mPrevUrgentFrameNumber(0),
+      mPrevFrameNumber(0),
+      mNeedSensorRestart(false)
 {
     getLogLevel();
     mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
@@ -430,6 +437,10 @@
         delete mRawDumpChannel;
         mRawDumpChannel = NULL;
     }
+    if (mDummyBatchChannel) {
+        delete mDummyBatchChannel;
+        mDummyBatchChannel = NULL;
+    }
     mPictureChannel = NULL;
 
     /* Clean up all channels */
@@ -461,7 +472,7 @@
     mPendingBuffersMap.mPendingBufferList.clear();
     mPendingReprocessResultList.clear();
     for (pendingRequestIterator i = mPendingRequestsList.begin();
-            i != mPendingRequestsList.end(); i++) {
+            i != mPendingRequestsList.end();) {
         i = erasePendingRequest(i);
     }
     for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
@@ -1069,12 +1080,8 @@
     }
 
     if (eisSupported) {
-        maxEisWidth = (uint32_t)
-            ((gCamCapability[mCameraId]->active_array_size.width * 1.0) /
-            (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index]));
-         maxEisHeight = (uint32_t)
-            ((gCamCapability[mCameraId]->active_array_size.height * 1.0) /
-            (1+ gCamCapability[mCameraId]->supported_is_type_margins[margin_index]));
+        maxEisWidth = MAX_EIS_WIDTH;
+        maxEisHeight = MAX_EIS_HEIGHT;
     }
 
     /* EIS setprop control */
@@ -1161,6 +1168,11 @@
         }
     }
 
+    if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
+        !m_bIsVideo) {
+        m_bEisEnable = false;
+    }
+
     /* Check if num_streams is sane */
     if (stallStreamCnt > MAX_STALLING_STREAMS ||
             rawStreamCnt > MAX_RAW_STREAMS ||
@@ -1293,6 +1305,11 @@
         mAnalysisChannel = NULL;
     }
 
+    if (mDummyBatchChannel) {
+        delete mDummyBatchChannel;
+        mDummyBatchChannel = NULL;
+    }
+
     //Create metadata channel and initialize it
     mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
                     mCameraHandle->ops, captureResultCb,
@@ -1461,6 +1478,12 @@
                         ALOGI("%s: num video buffers in HFR mode: %d",
                                 __func__, numBuffers);
                     }
+                    /* Copy stream contents in HFR preview only case to create
+                     * dummy batch channel */
+                    if (!m_bIsVideo && (streamList->operation_mode ==
+                            CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
+                        mDummyBatchStream = *newStream;
+                    }
                     channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
                             mCameraHandle->ops, captureResultCb,
                             &gCamCapability[mCameraId]->padding_info,
@@ -1646,12 +1669,42 @@
                 CAM_QCOM_FEATURE_NONE;
         mStreamConfigInfo.num_streams++;
     }
+    /* In HFR mode, if video stream is not added, create a dummy channel so that
+     * ISP can create a batch mode even for preview only case. This channel is
+     * never 'start'ed (no stream-on), it is only 'initialized'  */
+    if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
+            !m_bIsVideo) {
+        mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
+                mCameraHandle->ops, captureResultCb,
+                &gCamCapability[mCameraId]->padding_info,
+                this,
+                &mDummyBatchStream,
+                CAM_STREAM_TYPE_VIDEO,
+                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3,
+                mMetadataChannel);
+        if (NULL == mDummyBatchChannel) {
+            ALOGE("%s: creation of mDummyBatchChannel failed."
+                    "Preview will use non-hfr sensor mode ", __func__);
+        }
+    }
+    if (mDummyBatchChannel) {
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
+                mDummyBatchStream.width;
+        mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
+                mDummyBatchStream.height;
+        mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
+                CAM_STREAM_TYPE_VIDEO;
+        mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
+                CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
+        mStreamConfigInfo.num_streams++;
+    }
+
     mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
     mStreamConfigInfo.buffer_info.max_buffers = MAX_INFLIGHT_REQUESTS;
 
     /* Initialize mPendingRequestInfo and mPendnigBuffersMap */
     for (pendingRequestIterator i = mPendingRequestsList.begin();
-            i != mPendingRequestsList.end(); i++) {
+            i != mPendingRequestsList.end();) {
         i = erasePendingRequest(i);
     }
     mPendingFrameDropList.clear();
@@ -1918,7 +1971,7 @@
                     result.partial_result = PARTIAL_RESULT_COUNT;
                     mCallbackOps->process_capture_result(mCallbackOps, &result);
 
-                    k = erasePendingRequest(k);
+                    erasePendingRequest(k);
                     mPendingRequest--;
                     break;
                 }
@@ -1951,13 +2004,20 @@
         ALOGE("%s: metadata_buf is NULL", __func__);
         return;
     }
+    /* In batch mode, the metdata will contain the frame number and timestamp of
+     * the last frame in the batch. Eg: a batch containing buffers from request
+     * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
+     * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
+     * multiple process_capture_results */
     metadata_buffer_t *metadata =
             (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
-    int32_t frame_number_valid, urgent_frame_number_valid;
+    int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
     uint32_t last_frame_number, last_urgent_frame_number;
     uint32_t frame_number, urgent_frame_number = 0;
     int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
     bool invalid_metadata = false;
+    size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
+    size_t loopCount = 1;
 
     int32_t *p_frame_number_valid =
             POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
@@ -1987,37 +2047,80 @@
     if (!last_frame_capture_time) {
         goto done_batch_metadata;
     }
+    /* In batchmode, when no video buffers are requested, set_parms are sent
+     * for every capture_request. The difference between consecutive urgent
+     * frame numbers and frame numbers should be used to interpolate the
+     * corresponding frame numbers and time stamps */
+    if (urgent_frame_number_valid) {
+        /* Frame numbers start with 0, handle it in the else condition */
+        if (last_urgent_frame_number &&
+                (last_urgent_frame_number >= mPrevUrgentFrameNumber)) {
+            urgentFrameNumDiff = last_urgent_frame_number - mPrevUrgentFrameNumber;
+        } else {
+            urgentFrameNumDiff = 1;
+        }
+        mPrevUrgentFrameNumber = last_urgent_frame_number;
+    }
+    if (frame_number_valid) {
+        /* Frame numbers start with 0, handle it in the else condition */
+        if(last_frame_number && (last_frame_number >= mPrevFrameNumber)) {
+            frameNumDiff = last_frame_number - mPrevFrameNumber;
+        } else {
+            frameNumDiff = 1;
+        }
+        mPrevFrameNumber = last_frame_number;
+    }
+    if (urgent_frame_number_valid || frame_number_valid) {
+        loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
+    }
 
-    for (size_t i = 0; i < mBatchSize; i++) {
+    CDBG("%s: urgent_frm: valid: %d frm_num: %d previous frm_num: %d",
+            __func__, urgent_frame_number_valid, last_urgent_frame_number,
+            mPrevUrgentFrameNumber);
+    CDBG("%s:        frm: valid: %d frm_num: %d previous frm_num:: %d",
+            __func__, frame_number_valid, last_frame_number, mPrevFrameNumber);
+
+    //TODO: Need to ensure, metadata is not posted with the same frame numbers
+    //when urgentFrameNumDiff != frameNumDiff
+    for (size_t i = 0; i < loopCount; i++) {
         /* handleMetadataWithLock is called even for invalid_metadata for
          * pipeline depth calculation */
         if (!invalid_metadata) {
             /* Infer frame number. Batch metadata contains frame number of the
              * last frame */
             if (urgent_frame_number_valid) {
-                urgent_frame_number =
-                        last_urgent_frame_number + 1 - mBatchSize + i;
-                CDBG("%s: last urgent frame_number in batch: %d, "
-                        "inferred urgent frame_number: %d",
-                        __func__, last_urgent_frame_number, urgent_frame_number);
-                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
-                        CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
+                if (i < urgentFrameNumDiff) {
+                    urgent_frame_number =
+                            last_urgent_frame_number + 1 - urgentFrameNumDiff + i;
+                    CDBG("%s: inferred urgent frame_number: %d",
+                            __func__, urgent_frame_number);
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
+                } else {
+                    /* This is to handle when urgentFrameNumDiff < frameNumDiff */
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
+                }
             }
 
             /* Infer frame number. Batch metadata contains frame number of the
              * last frame */
             if (frame_number_valid) {
-                frame_number = last_frame_number + 1 - mBatchSize + i;
-                CDBG("%s: last frame_number in batch: %d, "
-                        "inferred frame_number: %d",
-                        __func__, last_frame_number, frame_number);
-                ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
-                        CAM_INTF_META_FRAME_NUMBER, frame_number);
+                if (i < frameNumDiff) {
+                    frame_number = last_frame_number + 1 - frameNumDiff + i;
+                    CDBG("%s: inferred frame_number: %d", __func__, frame_number);
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                            CAM_INTF_META_FRAME_NUMBER, frame_number);
+                } else {
+                    /* This is to handle when urgentFrameNumDiff > frameNumDiff */
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
+                             CAM_INTF_META_FRAME_NUMBER_VALID, 0);
+                }
             }
 
             //Infer timestamp
             first_frame_capture_time = last_frame_capture_time -
-                    (((mBatchSize - 1) * NSEC_PER_SEC) / mHFRVideoFps);
+                    (((loopCount - 1) * NSEC_PER_SEC) / mHFRVideoFps);
             capture_time =
                     first_frame_capture_time + (i * NSEC_PER_SEC / mHFRVideoFps);
             ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
@@ -2607,9 +2710,7 @@
         }
 
         //If EIS is enabled, turn it on for video
-        bool setEis = m_bEisEnable && m_bEisSupportedSize &&
-            ((mCaptureIntent ==  CAMERA3_TEMPLATE_VIDEO_RECORD) ||
-             (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_SNAPSHOT));
+        bool setEis = m_bEisEnable && m_bEisSupportedSize;
         int32_t vsMode;
         vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
         if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
@@ -2618,15 +2719,12 @@
 
         //IS type will be 0 unless EIS is supported. If EIS is supported
         //it could either be 1 or 4 depending on the stream and video size
-        if (setEis){
+        if (setEis) {
             if (!m_bEisSupportedSize) {
                 is_type = IS_TYPE_DIS;
             } else {
                 is_type = IS_TYPE_EIS_2_0;
             }
-        }
-
-        if (mCaptureIntent == CAMERA3_TEMPLATE_VIDEO_RECORD) {
             mStreamConfigInfo.is_type = is_type;
         } else {
             mStreamConfigInfo.is_type = IS_TYPE_NONE;
@@ -2642,6 +2740,7 @@
 
         /* Set fps and hfr mode while sending meta stream info so that sensor
          * can configure appropriate streaming mode */
+        mHFRVideoFps = DEFAULT_VIDEO_FPS;
         if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
             rc = setHalFpsRange(meta, mParameters);
             if (rc != NO_ERROR) {
@@ -2701,11 +2800,13 @@
             const camera3_stream_buffer_t& output = request->output_buffers[i];
             QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
             /*for livesnapshot stream is_type will be DIS*/
-            if (setEis && output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
-                rc = channel->registerBuffer(output.buffer, IS_TYPE_DIS);
-            } else {
+            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
+               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
+               setEis)
                 rc = channel->registerBuffer(output.buffer, is_type);
-            }
+            else
+                rc = channel->registerBuffer(output.buffer, IS_TYPE_NONE);
+
             if (rc < 0) {
                 ALOGE("%s: registerBuffer failed",
                         __func__);
@@ -2718,10 +2819,12 @@
         for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
             it != mStreamInfo.end(); it++) {
             QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
-            if (setEis && (*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
-                rc = channel->initialize(IS_TYPE_DIS);
-            } else {
+            if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
+               ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
+               setEis)
                 rc = channel->initialize(is_type);
+            else {
+                rc = channel->initialize(IS_TYPE_NONE);
             }
             if (NO_ERROR != rc) {
                 ALOGE("%s : Channel initialization failed %d", __func__, rc);
@@ -2731,7 +2834,7 @@
         }
 
         if (mRawDumpChannel) {
-            rc = mRawDumpChannel->initialize(is_type);
+            rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
             if (rc != NO_ERROR) {
                 ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
                 pthread_mutex_unlock(&mMutex);
@@ -2739,7 +2842,7 @@
             }
         }
         if (mSupportChannel) {
-            rc = mSupportChannel->initialize(is_type);
+            rc = mSupportChannel->initialize(IS_TYPE_NONE);
             if (rc < 0) {
                 ALOGE("%s: Support channel initialization failed", __func__);
                 pthread_mutex_unlock(&mMutex);
@@ -2747,13 +2850,27 @@
             }
         }
         if (mAnalysisChannel) {
-            rc = mAnalysisChannel->initialize(is_type);
+            rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
             if (rc < 0) {
                 ALOGE("%s: Analysis channel initialization failed", __func__);
                 pthread_mutex_unlock(&mMutex);
                 return rc;
             }
         }
+        if (mDummyBatchChannel) {
+            rc = mDummyBatchChannel->setBatchSize(mBatchSize);
+            if (rc < 0) {
+                ALOGE("%s: mDummyBatchChannel setBatchSize failed", __func__);
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+            rc = mDummyBatchChannel->initialize(is_type);
+            if (rc < 0) {
+                ALOGE("%s: mDummyBatchChannel initialization failed", __func__);
+                pthread_mutex_unlock(&mMutex);
+                return rc;
+            }
+        }
 
         //Then start them.
         CDBG_HIGH("%s: Start META Channel", __func__);
@@ -2827,7 +2944,6 @@
         mWokenUpByDaemon = false;
         mPendingRequest = 0;
         mFirstConfiguration = false;
-        mBatchStreamID.num_streams = 0;
     }
 
     uint32_t frameNumber = request->frame_number;
@@ -2917,6 +3033,19 @@
             ALOGE("%s: Failed to set the frame number in the parameters", __func__);
             return BAD_VALUE;
         }
+        if (mNeedSensorRestart) {
+            /* Unlock the mutex as restartSensor waits on the channels to be
+             * stopped, which in turn calls stream callback functions -
+             * handleBufferWithLock and handleMetadataWithLock */
+            pthread_mutex_unlock(&mMutex);
+            rc = dynamicUpdateMetaStreamInfo();
+            if (rc != NO_ERROR) {
+                ALOGE("%s: Restarting the sensor failed", __func__);
+                return BAD_VALUE;
+            }
+            mNeedSensorRestart = false;
+            pthread_mutex_lock(&mMutex);
+        }
     } else {
 
         if (request->input_buffer->acquire_fence != -1) {
@@ -3204,38 +3333,17 @@
 int QCamera3HardwareInterface::flush()
 {
     ATRACE_CALL();
-    unsigned int frameNum = 0;
-    camera3_capture_result_t result;
-    camera3_stream_buffer_t *pStream_Buf = NULL;
-    FlushMap flushMap;
+    int32_t rc = NO_ERROR;
 
     CDBG("%s: Unblocking Process Capture Request", __func__);
     pthread_mutex_lock(&mMutex);
     mFlush = true;
     pthread_mutex_unlock(&mMutex);
 
-    memset(&result, 0, sizeof(camera3_capture_result_t));
-
-    // Stop the Streams/Channels
-    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
-        it != mStreamInfo.end(); it++) {
-        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
-        channel->stop();
-        (*it)->status = INVALID;
-    }
-
-    if (mSupportChannel) {
-        mSupportChannel->stop();
-    }
-    if (mAnalysisChannel) {
-        mAnalysisChannel->stop();
-    }
-    if (mRawDumpChannel) {
-        mRawDumpChannel->stop();
-    }
-    if (mMetadataChannel) {
-        /* If content of mStreamInfo is not 0, there is metadata stream */
-        mMetadataChannel->stop();
+    rc = stopAllChannels();
+    if (rc < 0) {
+        ALOGE("%s: stopAllChannels failed", __func__);
+        return rc;
     }
 
     // Mutex Lock
@@ -3245,197 +3353,21 @@
     mPendingRequest = 0;
     pthread_cond_signal(&mRequestCond);
 
-    pendingRequestIterator i = mPendingRequestsList.begin();
-    frameNum = i->frame_number;
-    CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
-      __func__, frameNum);
-
-    // Go through the pending buffers and group them depending
-    // on frame number
-    for (List<PendingBufferInfo>::iterator k =
-            mPendingBuffersMap.mPendingBufferList.begin();
-            k != mPendingBuffersMap.mPendingBufferList.end();) {
-
-        if (k->frame_number < frameNum) {
-            ssize_t idx = flushMap.indexOfKey(k->frame_number);
-            if (idx == NAME_NOT_FOUND) {
-                Vector<PendingBufferInfo> pending;
-                pending.add(*k);
-                flushMap.add(k->frame_number, pending);
-            } else {
-                Vector<PendingBufferInfo> &pending =
-                        flushMap.editValueFor(k->frame_number);
-                pending.add(*k);
-            }
-
-            mPendingBuffersMap.num_buffers--;
-            k = mPendingBuffersMap.mPendingBufferList.erase(k);
-        } else {
-            k++;
-        }
+    rc = notifyErrorForPendingRequests();
+    if (rc < 0) {
+        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
+        pthread_mutex_unlock(&mMutex);
+        return rc;
     }
 
-    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
-        uint32_t frame_number = flushMap.keyAt(iFlush);
-        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
-
-        // Send Error notify to frameworks for each buffer for which
-        // metadata buffer is already sent
-        CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
-          __func__, frame_number, pending.size());
-
-        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
-        if (NULL == pStream_Buf) {
-            ALOGE("%s: No memory for pending buffers array", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return NO_MEMORY;
-        }
-        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
-
-        for (size_t j = 0; j < pending.size(); j++) {
-            const PendingBufferInfo &info = pending.itemAt(j);
-            camera3_notify_msg_t notify_msg;
-            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
-            notify_msg.type = CAMERA3_MSG_ERROR;
-            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
-            notify_msg.message.error.error_stream = info.stream;
-            notify_msg.message.error.frame_number = frame_number;
-            pStream_Buf[j].acquire_fence = -1;
-            pStream_Buf[j].release_fence = -1;
-            pStream_Buf[j].buffer = info.buffer;
-            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
-            pStream_Buf[j].stream = info.stream;
-            mCallbackOps->notify(mCallbackOps, &notify_msg);
-            CDBG("%s: notify frame_number = %d stream %p", __func__,
-                    frame_number, info.stream);
-        }
-
-        result.result = NULL;
-        result.frame_number = frame_number;
-        result.num_output_buffers = (uint32_t)pending.size();
-        result.output_buffers = pStream_Buf;
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
-
-        delete [] pStream_Buf;
-    }
-
-    CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
-
-    flushMap.clear();
-    for (List<PendingBufferInfo>::iterator k =
-            mPendingBuffersMap.mPendingBufferList.begin();
-            k != mPendingBuffersMap.mPendingBufferList.end();) {
-        ssize_t idx = flushMap.indexOfKey(k->frame_number);
-        if (idx == NAME_NOT_FOUND) {
-            Vector<PendingBufferInfo> pending;
-            pending.add(*k);
-            flushMap.add(k->frame_number, pending);
-        } else {
-            Vector<PendingBufferInfo> &pending =
-                    flushMap.editValueFor(k->frame_number);
-            pending.add(*k);
-        }
-
-        mPendingBuffersMap.num_buffers--;
-        k = mPendingBuffersMap.mPendingBufferList.erase(k);
-    }
-
-    // Go through the pending requests info and send error request to framework
-    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
-        uint32_t frame_number = flushMap.keyAt(iFlush);
-        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
-        CDBG("%s:Sending ERROR REQUEST for frame %d",
-              __func__, frame_number);
-
-        // Send shutter notify to frameworks
-        camera3_notify_msg_t notify_msg;
-        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
-        notify_msg.type = CAMERA3_MSG_ERROR;
-        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
-        notify_msg.message.error.error_stream = NULL;
-        notify_msg.message.error.frame_number = frame_number;
-        mCallbackOps->notify(mCallbackOps, &notify_msg);
-
-        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
-        if (NULL == pStream_Buf) {
-            ALOGE("%s: No memory for pending buffers array", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return NO_MEMORY;
-        }
-        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
-
-        for (size_t j = 0; j < pending.size(); j++) {
-            const PendingBufferInfo &info = pending.itemAt(j);
-            pStream_Buf[j].acquire_fence = -1;
-            pStream_Buf[j].release_fence = -1;
-            pStream_Buf[j].buffer = info.buffer;
-            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
-            pStream_Buf[j].stream = info.stream;
-        }
-
-        result.num_output_buffers = (uint32_t)pending.size();
-        result.output_buffers = pStream_Buf;
-        result.result = NULL;
-        result.frame_number = frame_number;
-        mCallbackOps->process_capture_result(mCallbackOps, &result);
-        delete [] pStream_Buf;
-    }
-
-    /* Reset pending buffer list and requests list */
-    for (pendingRequestIterator i = mPendingRequestsList.begin();
-            i != mPendingRequestsList.end(); i++) {
-        i = erasePendingRequest(i);
-    }
-    /* Reset pending frame Drop list and requests list */
-    mPendingFrameDropList.clear();
-
-    flushMap.clear();
-    mPendingBuffersMap.num_buffers = 0;
-    mPendingBuffersMap.mPendingBufferList.clear();
-    mPendingReprocessResultList.clear();
-    CDBG("%s: Cleared all the pending buffers ", __func__);
-
     mFlush = false;
 
     // Start the Streams/Channels
-    int rc = NO_ERROR;
-    if (mMetadataChannel) {
-        /* If content of mStreamInfo is not 0, there is metadata stream */
-        rc = mMetadataChannel->start();
-        if (rc < 0) {
-            ALOGE("%s: META channel start failed", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return rc;
-        }
-    }
-    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
-        it != mStreamInfo.end(); it++) {
-        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
-        rc = channel->start();
-        if (rc < 0) {
-            ALOGE("%s: channel start failed", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return rc;
-        }
-    }
-    if (mAnalysisChannel) {
-        mAnalysisChannel->start();
-    }
-    if (mSupportChannel) {
-        rc = mSupportChannel->start();
-        if (rc < 0) {
-            ALOGE("%s: Support channel start failed", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return rc;
-        }
-    }
-    if (mRawDumpChannel) {
-        rc = mRawDumpChannel->start();
-        if (rc < 0) {
-            ALOGE("%s: RAW dump channel start failed", __func__);
-            pthread_mutex_unlock(&mMutex);
-            return rc;
-        }
+    rc = startAllChannels();
+    if (rc < 0) {
+        ALOGE("%s: startAllChannels failed", __func__);
+        pthread_mutex_unlock(&mMutex);
+        return rc;
     }
 
     pthread_mutex_unlock(&mMutex);
@@ -5451,8 +5383,8 @@
             break;
         }
 
-        //TODO: Enable 120 fps once the iommu issues in CPP/ISP are fixed
-        if (fps > 120) {
+        /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
+        if (fps >= MIN_FPS_FOR_BATCH_MODE) {
             /* For each HFR frame rate, need to advertise one variable fps range
              * and one fixed fps range. Eg: for 120 FPS, advertise [30, 120] and
              * [120, 120]. While camcorder preview alone is running [30, 120] is
@@ -6968,9 +6900,9 @@
      * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
      *                   |             |             | vid_min/max_fps|
      * ---------------------------------------------------------------|
-     *        NO         |  [ 30, 240] |     30      |  [ 30,  30]    |
+     *        NO         |  [ 30, 240] |     240     |  [240, 240]    |
      *                   |-------------|-------------|----------------|
-     *                   |  [240, 240] |     30      |  [ 30,  30]    |
+     *                   |  [240, 240] |     240     |  [240, 240]    |
      * ---------------------------------------------------------------|
      *     Video stream is present in configure_streams               |
      * ---------------------------------------------------------------|
@@ -6986,9 +6918,19 @@
      *                   |-------------|-------------|----------------|
      *                   |  [240, 240] |     240     |  [240, 240]    |
      * ---------------------------------------------------------------|
+     * When Video stream is absent in configure_streams,
+     * preview fps = sensor_fps / batchsize
+     * Eg: for 240fps at batchSize 4, preview = 60fps
+     *     for 120fps at batchSize 4, preview = 30fps
+     *
+     * When video stream is present in configure_streams, preview fps is as per
+     * the ratio of preview buffers to video buffers requested in process
+     * capture request
      */
     mBatchSize = 0;
     if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
+        fps_range.min_fps = fps_range.video_max_fps;
+        fps_range.video_min_fps = fps_range.video_max_fps;
         int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
                 fps_range.max_fps);
         if (NAME_NOT_FOUND != val) {
@@ -6998,6 +6940,12 @@
             }
 
             if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
+                /* If batchmode is currently in progress and the fps changes,
+                 * set the flag to restart the sensor */
+                if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
+                        (mHFRVideoFps != fps_range.max_fps)) {
+                    mNeedSensorRestart = true;
+                }
                 mHFRVideoFps = fps_range.max_fps;
                 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
                 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
@@ -7006,16 +6954,6 @@
              }
             CDBG("%s: hfrMode: %d batchSize: %d", __func__, hfrMode, mBatchSize);
 
-            if (!m_bIsVideo) {
-                if (fps_range.min_fps > PREVIEW_FPS_FOR_HFR) {
-                    fps_range.min_fps = PREVIEW_FPS_FOR_HFR;
-                }
-                fps_range.max_fps = fps_range.min_fps;
-                fps_range.video_max_fps = fps_range.min_fps;
-            } else {
-                fps_range.min_fps = fps_range.video_max_fps;
-            }
-            fps_range.video_min_fps = fps_range.video_max_fps;
          }
     }
     if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
@@ -8176,6 +8114,7 @@
  *
  * PARAMETERS :
  *   @config  : reprocess configuration
+ *   @inputChHandle : pointer to the input (source) channel
  *
  *
  * RETURN     : Ptr to the newly created channel obj. NULL if failed.
@@ -8375,4 +8314,331 @@
                 QCAMERA_MAX_FILEPATH_LENGTH);
     }
 }
+
+/*===========================================================================
+ * FUNCTION   : dynamicUpdateMetaStreamInfo
+ *
+ * DESCRIPTION: This function:
+ *             (1) stops all the channels
+ *             (2) returns error on pending requests and buffers
+ *             (3) sends metastream_info in setparams
+ *             (4) starts all channels
+ *             This is useful when sensor has to be restarted to apply any
+ *             settings such as frame rate from a different sensor mode
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
+{
+    ATRACE_CALL();
+    int rc = NO_ERROR;
+
+    CDBG("%s: E", __func__);
+
+    rc = stopAllChannels();
+    if (rc < 0) {
+        ALOGE("%s: stopAllChannels failed", __func__);
+        return rc;
+    }
+
+    rc = notifyErrorForPendingRequests();
+    if (rc < 0) {
+        ALOGE("%s: notifyErrorForPendingRequests failed", __func__);
+        return rc;
+    }
+
+    /* Send meta stream info once again so that ISP can start */
+    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+            CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
+    CDBG("%s: set_parms META_STREAM_INFO with new settings ", __func__ );
+    rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+            mParameters);
+    if (rc < 0) {
+        ALOGE("%s: set Metastreaminfo failed. Sensor mode does not change",
+                __func__);
+    }
+
+    rc = startAllChannels();
+    if (rc < 0) {
+        ALOGE("%s: startAllChannels failed", __func__);
+        return rc;
+    }
+
+    CDBG("%s:%d X", __func__, __LINE__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : stopAllChannels
+ *
+ * DESCRIPTION: This function stops (equivalent to stream-off) all channels
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::stopAllChannels()
+{
+    int32_t rc = NO_ERROR;
+
+    // Stop the Streams/Channels
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+        channel->stop();
+        (*it)->status = INVALID;
+    }
+
+    if (mSupportChannel) {
+        mSupportChannel->stop();
+    }
+    if (mAnalysisChannel) {
+        mAnalysisChannel->stop();
+    }
+    if (mRawDumpChannel) {
+        mRawDumpChannel->stop();
+    }
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        mMetadataChannel->stop();
+    }
+
+    CDBG("%s:%d All channels stopped", __func__, __LINE__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : startAllChannels
+ *
+ * DESCRIPTION: This function starts (equivalent to stream-on) all channels
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : NO_ERROR on success
+ *              Error codes on failure
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::startAllChannels()
+{
+    int32_t rc = NO_ERROR;
+
+    CDBG("%s: Start all channels ", __func__);
+    // Start the Streams/Channels
+    if (mMetadataChannel) {
+        /* If content of mStreamInfo is not 0, there is metadata stream */
+        rc = mMetadataChannel->start();
+        if (rc < 0) {
+            ALOGE("%s: META channel start failed", __func__);
+            return rc;
+        }
+    }
+    for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
+        it != mStreamInfo.end(); it++) {
+        QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
+        rc = channel->start();
+        if (rc < 0) {
+            ALOGE("%s: channel start failed", __func__);
+            return rc;
+        }
+    }
+    if (mAnalysisChannel) {
+        mAnalysisChannel->start();
+    }
+    if (mSupportChannel) {
+        rc = mSupportChannel->start();
+        if (rc < 0) {
+            ALOGE("%s: Support channel start failed", __func__);
+            return rc;
+        }
+    }
+    if (mRawDumpChannel) {
+        rc = mRawDumpChannel->start();
+        if (rc < 0) {
+            ALOGE("%s: RAW dump channel start failed", __func__);
+            return rc;
+        }
+    }
+
+    CDBG("%s:%d All channels started", __func__, __LINE__);
+    return rc;
+}
+
+/*===========================================================================
+ * FUNCTION   : notifyErrorForPendingRequests
+ *
+ * DESCRIPTION: This function sends error for all the pending requests/buffers
+ *
+ * PARAMETERS : None
+ *
+ * RETURN     : Error codes
+ *              NO_ERROR on success
+ *
+ *==========================================================================*/
+int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
+{
+    int32_t rc = NO_ERROR;
+    unsigned int frameNum = 0;
+    camera3_capture_result_t result;
+    camera3_stream_buffer_t *pStream_Buf = NULL;
+    FlushMap flushMap;
+
+    memset(&result, 0, sizeof(camera3_capture_result_t));
+
+    pendingRequestIterator i = mPendingRequestsList.begin();
+    frameNum = i->frame_number;
+    CDBG("%s: Oldest frame num on  mPendingRequestsList = %d",
+      __func__, frameNum);
+
+    // Go through the pending buffers and group them depending
+    // on frame number
+    for (List<PendingBufferInfo>::iterator k =
+            mPendingBuffersMap.mPendingBufferList.begin();
+            k != mPendingBuffersMap.mPendingBufferList.end();) {
+
+        if (k->frame_number < frameNum) {
+            ssize_t idx = flushMap.indexOfKey(k->frame_number);
+            if (idx == NAME_NOT_FOUND) {
+                Vector<PendingBufferInfo> pending;
+                pending.add(*k);
+                flushMap.add(k->frame_number, pending);
+            } else {
+                Vector<PendingBufferInfo> &pending =
+                        flushMap.editValueFor(k->frame_number);
+                pending.add(*k);
+            }
+
+            mPendingBuffersMap.num_buffers--;
+            k = mPendingBuffersMap.mPendingBufferList.erase(k);
+        } else {
+            k++;
+        }
+    }
+
+    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
+        uint32_t frame_number = flushMap.keyAt(iFlush);
+        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
+
+        // Send Error notify to frameworks for each buffer for which
+        // metadata buffer is already sent
+        CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
+          __func__, frame_number, pending.size());
+
+        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
+        if (NULL == pStream_Buf) {
+            ALOGE("%s: No memory for pending buffers array", __func__);
+            return NO_MEMORY;
+        }
+        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
+
+        for (size_t j = 0; j < pending.size(); j++) {
+            const PendingBufferInfo &info = pending.itemAt(j);
+            camera3_notify_msg_t notify_msg;
+            memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+            notify_msg.type = CAMERA3_MSG_ERROR;
+            notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+            notify_msg.message.error.error_stream = info.stream;
+            notify_msg.message.error.frame_number = frame_number;
+            pStream_Buf[j].acquire_fence = -1;
+            pStream_Buf[j].release_fence = -1;
+            pStream_Buf[j].buffer = info.buffer;
+            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
+            pStream_Buf[j].stream = info.stream;
+            mCallbackOps->notify(mCallbackOps, &notify_msg);
+            CDBG("%s: notify frame_number = %d stream %p", __func__,
+                    frame_number, info.stream);
+        }
+
+        result.result = NULL;
+        result.frame_number = frame_number;
+        result.num_output_buffers = (uint32_t)pending.size();
+        result.output_buffers = pStream_Buf;
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+
+        delete [] pStream_Buf;
+    }
+
+    CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
+
+    flushMap.clear();
+    for (List<PendingBufferInfo>::iterator k =
+            mPendingBuffersMap.mPendingBufferList.begin();
+            k != mPendingBuffersMap.mPendingBufferList.end();) {
+        ssize_t idx = flushMap.indexOfKey(k->frame_number);
+        if (idx == NAME_NOT_FOUND) {
+            Vector<PendingBufferInfo> pending;
+            pending.add(*k);
+            flushMap.add(k->frame_number, pending);
+        } else {
+            Vector<PendingBufferInfo> &pending =
+                    flushMap.editValueFor(k->frame_number);
+            pending.add(*k);
+        }
+
+        mPendingBuffersMap.num_buffers--;
+        k = mPendingBuffersMap.mPendingBufferList.erase(k);
+    }
+
+    // Go through the pending requests info and send error request to framework
+    for (size_t iFlush = 0; iFlush < flushMap.size(); iFlush++) {
+        uint32_t frame_number = flushMap.keyAt(iFlush);
+        const Vector<PendingBufferInfo> &pending = flushMap.valueAt(iFlush);
+        CDBG("%s:Sending ERROR REQUEST for frame %d",
+              __func__, frame_number);
+
+        // Send shutter notify to frameworks
+        camera3_notify_msg_t notify_msg;
+        memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
+        notify_msg.type = CAMERA3_MSG_ERROR;
+        notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
+        notify_msg.message.error.error_stream = NULL;
+        notify_msg.message.error.frame_number = frame_number;
+        mCallbackOps->notify(mCallbackOps, &notify_msg);
+
+        pStream_Buf = new camera3_stream_buffer_t[pending.size()];
+        if (NULL == pStream_Buf) {
+            ALOGE("%s: No memory for pending buffers array", __func__);
+            return NO_MEMORY;
+        }
+        memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*pending.size());
+
+        for (size_t j = 0; j < pending.size(); j++) {
+            const PendingBufferInfo &info = pending.itemAt(j);
+            pStream_Buf[j].acquire_fence = -1;
+            pStream_Buf[j].release_fence = -1;
+            pStream_Buf[j].buffer = info.buffer;
+            pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
+            pStream_Buf[j].stream = info.stream;
+        }
+
+        result.num_output_buffers = (uint32_t)pending.size();
+        result.output_buffers = pStream_Buf;
+        result.result = NULL;
+        result.frame_number = frame_number;
+        mCallbackOps->process_capture_result(mCallbackOps, &result);
+        delete [] pStream_Buf;
+    }
+
+    /* Reset pending buffer list and requests list */
+    for (pendingRequestIterator i = mPendingRequestsList.begin();
+            i != mPendingRequestsList.end();) {
+        i = erasePendingRequest(i);
+    }
+    /* Reset pending frame Drop list and requests list */
+    mPendingFrameDropList.clear();
+
+    flushMap.clear();
+    mPendingBuffersMap.num_buffers = 0;
+    mPendingBuffersMap.mPendingBufferList.clear();
+    mPendingReprocessResultList.clear();
+    CDBG("%s: Cleared all the pending buffers ", __func__);
+
+    return rc;
+}
+
 }; //end namespace qcamera
diff --git a/QCamera2/HAL3/QCamera3HWI.h b/QCamera2/HAL3/QCamera3HWI.h
index 910a18d..792b7c0 100644
--- a/QCamera2/HAL3/QCamera3HWI.h
+++ b/QCamera2/HAL3/QCamera3HWI.h
@@ -244,6 +244,10 @@
 
     void updatePowerHint(bool bWasVideo, bool bIsVideo);
     int32_t getSensorOutputSize(cam_dimension_t &sensor_dim);
+    int32_t dynamicUpdateMetaStreamInfo();
+    int32_t startAllChannels();
+    int32_t stopAllChannels();
+    int32_t notifyErrorForPendingRequests();
 
     camera3_device_t   mCameraDevice;
     uint32_t           mCameraId;
@@ -259,6 +263,7 @@
     QCamera3SupportChannel *mSupportChannel;
     QCamera3SupportChannel *mAnalysisChannel;
     QCamera3RawDumpChannel *mRawDumpChannel;
+    QCamera3RegularChannel *mDummyBatchChannel;
 
     void saveExifParams(metadata_buffer_t *metadata);
     mm_jpeg_exif_params_t mExifParams;
@@ -360,8 +365,11 @@
     uint8_t mToBeQueuedVidBufs;
     // Fixed video fps
     float mHFRVideoFps;
-    cam_stream_ID_t mBatchStreamID;
     uint8_t mOpMode;
+    uint32_t mPrevUrgentFrameNumber;
+    uint32_t mPrevFrameNumber;
+    camera3_stream_t mDummyBatchStream;
+    bool mNeedSensorRestart;
 
     /* sensor output size with current stream configuration */
     QCamera3CropRegionMapper mCropRegionMapper;
diff --git a/QCamera2/HAL3/QCamera3PostProc.cpp b/QCamera2/HAL3/QCamera3PostProc.cpp
index 9bfc034..a9b3a9b 100644
--- a/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -188,7 +188,7 @@
     max_size.w =  max_pic_dim->width;
     max_size.h =  max_pic_dim->height;
 
-    mJpegClientHandle = jpeg_open(&mJpegHandle,max_size);
+    mJpegClientHandle = jpeg_open(&mJpegHandle, max_size);
     if(!mJpegClientHandle) {
         ALOGE("%s : jpeg_open did not work", __func__);
         return UNKNOWN_ERROR;
@@ -204,7 +204,6 @@
  *
  * PARAMETERS :
  *   @config        : reprocess configuration
- *   @metadata      : metadata for the reprocessing
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
@@ -1803,7 +1802,7 @@
  *==========================================================================*/
 int32_t getRational(rat_t *rat, int num, int denom)
 {
-    if ((0 > num) || (0 > denom)) {
+    if ((0 > num) || (0 >= denom)) {
         ALOGE("%s: Negative values", __func__);
         return BAD_VALUE;
     }
@@ -1978,7 +1977,7 @@
  *
  * PARAMETERS :
  *   @latitude : ptr to rational struct to store latitude info
- *   @ladRef   : charater to indicate latitude reference
+ *   @latRef   : character to indicate latitude reference
  *   @value    : value of the latitude
  *
  * RETURN     : int32_t type of status
@@ -2013,7 +2012,7 @@
  *
  * PARAMETERS :
  *   @longitude : ptr to rational struct to store longitude info
- *   @lonRef    : charater to indicate longitude reference
+ *   @lonRef    : character to indicate longitude reference
  *   @value     : value of the longitude
  *
  * RETURN     : int32_t type of status
@@ -2048,7 +2047,7 @@
  *
  * PARAMETERS :
  *   @altitude : ptr to rational struct to store altitude info
- *   @altRef   : charater to indicate altitude reference
+ *   @altRef   : character to indicate altitude reference
  *   @argValue : altitude value
  *
  * RETURN     : int32_t type of status
@@ -2081,6 +2080,7 @@
  *   @gpsDateStamp : GPS date time stamp string
  *   @bufLen       : length of the string
  *   @gpsTimeStamp : ptr to rational struct to store time stamp info
+ *   @value        : timestamp value
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
@@ -2138,7 +2138,9 @@
  *
  * DESCRIPTION: get exif data to be passed into jpeg encoding
  *
- * PARAMETERS : none
+ * PARAMETERS :
+ * @metadata      : metadata of the encoding request
+ * @jpeg_settings : jpeg_settings for encoding
  *
  * RETURN     : exif data from user setting and GPS
  *==========================================================================*/