Merge "DO NOT MERGE: Merge Oreo MR1 into master"
diff --git a/msm8998/QCamera2/Android.mk b/msm8998/QCamera2/Android.mk
index 37eeb23..e8a6811 100755
--- a/msm8998/QCamera2/Android.mk
+++ b/msm8998/QCamera2/Android.mk
@@ -118,9 +118,12 @@
     LOCAL_CFLAGS += -DVENUS_PRESENT
 endif
 
+# Disable UBWC for Easel HDR+.
+ifeq ($(TARGET_USES_EASEL), false)
 ifneq (,$(filter msm8996 msmcobalt sdm660 msm8998,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DUBWC_PRESENT
 endif
+endif
 
 ifneq (,$(filter msm8996,$(TARGET_BOARD_PLATFORM)))
     LOCAL_CFLAGS += -DTARGET_MSM8996
@@ -143,6 +146,7 @@
 ifeq ($(TARGET_TS_MAKEUP),true)
 LOCAL_SHARED_LIBRARIES += libts_face_beautify_hal libts_detected_face_hal
 endif
+LOCAL_HEADER_LIBRARIES := libhardware_headers media_plugin_headers
 
 LOCAL_MODULE_RELATIVE_PATH := hw
 LOCAL_MODULE := camera.$(TARGET_BOARD_PLATFORM)
diff --git a/msm8998/QCamera2/HAL/test/qcamera_test.cpp b/msm8998/QCamera2/HAL/test/qcamera_test.cpp
index 932724a..90d4969 100644
--- a/msm8998/QCamera2/HAL/test/qcamera_test.cpp
+++ b/msm8998/QCamera2/HAL/test/qcamera_test.cpp
@@ -369,12 +369,6 @@
         }
         break;
 
-        case kIndex_8_SkColorType:
-        {
-            mfmtMultiplier = 4;
-        }
-        break;
-
         case kAlpha_8_SkColorType:
         {
             mfmtMultiplier = 4;
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
index 0d9d3f6..bb12822 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
@@ -101,6 +101,7 @@
     mNRMode = 0;
 
     mYUVDump = property_get_int32("persist.camera.dumpimg", 0);
+    mMapStreamBuffers = mYUVDump;
 }
 
 /*===========================================================================
@@ -185,7 +186,8 @@
                                                m_handle,
                                                m_camOps,
                                                &mPaddingInfo,
-                                               this);
+                                               this,
+                                               mMapStreamBuffers);
     if (pStream == NULL) {
         LOGE("No mem for Stream");
         return NO_MEMORY;
@@ -777,6 +779,7 @@
             mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM),
             mStreamType(stream_type),
             mPostProcStarted(false),
+            mReprocessType(REPROCESS_TYPE_NONE),
             mInputBufferConfig(false),
             m_pMetaChannel(metadataChannel),
             mMetaFrame(NULL),
@@ -1208,6 +1211,58 @@
     return rc;
 }
 
+int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer,
+        mm_camera_buf_def_t *frame)
+{
+    if (buffer == nullptr || frame == nullptr) {
+        ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    status_t rc;
+
+    // Get the buffer index.
+    int index = mMemory.getMatchBufIndex((void*)buffer);
+    if(index < 0) {
+        // Register the buffer if it was not registered.
+        rc = registerBuffer(buffer, mIsType);
+        if (rc != OK) {
+            ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+            return rc;
+        }
+
+        index = mMemory.getMatchBufIndex((void*)buffer);
+        if (index < 0) {
+            ALOGE("%s: Could not find object among registered buffers", __FUNCTION__);
+            return DEAD_OBJECT;
+        }
+    }
+
+    cam_frame_len_offset_t offset = {};
+    mStreams[0]->getFrameOffset(offset);
+
+    // Get the buffer def.
+    rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers);
+    if (rc != 0) {
+        ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
+        return rc;
+    }
+
+    // Set the frame's stream ID because it's not set in getBufDef.
+    frame->stream_id = mStreams[0]->getMyHandle();
+    return 0;
+}
+
+void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame)
+{
+    if (frame == nullptr) {
+        ALOGE("%s: frame is nullptr", __FUNCTION__);
+        return;
+    }
+
+    mMemory.unregisterBuffer(frame->buf_idx);
+}
+
 /*===========================================================================
  * FUNCTION   : setFwkInputPPData
  *
@@ -1250,7 +1305,7 @@
 
     src_frame->src_frame = *pInputBuffer;
     rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info,
-            src_frame->input_buffer, input_index);
+            src_frame->input_buffer, input_index, mMapStreamBuffers);
     if (rc != 0) {
         return rc;
     }
@@ -1281,7 +1336,7 @@
 
     mm_camera_buf_def_t meta_buf;
     cam_frame_len_offset_t offset = meta_planes.plane_info;
-    rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx);
+    rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx, true /*virtualAddr*/);
     if (NO_ERROR != rc) {
         return rc;
     }
@@ -1439,10 +1494,20 @@
  *==========================================================================*/
 void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config)
 {
-    if(!mPostProcStarted) {
-        m_postprocessor.start(config);
-        mPostProcStarted = true;
+    if (mPostProcStarted) {
+        if (config.reprocess_type != mReprocessType) {
+            // If the reprocess type doesn't match, stop and start with the new type
+            m_postprocessor.stop();
+            mPostProcStarted = false;
+        } else {
+            // Return if reprocess type is the same.
+            return;
+        }
     }
+
+    m_postprocessor.start(config);
+    mPostProcStarted = true;
+    mReprocessType = config.reprocess_type;
 }
 
 /*===========================================================================
@@ -2101,6 +2166,7 @@
                                 userData, numBuffers),
                         mMemory(NULL), mDepthDataPresent(false)
 {
+    mMapStreamBuffers = true;
 }
 
 QCamera3MetadataChannel::~QCamera3MetadataChannel()
@@ -2244,6 +2310,7 @@
     char prop[PROPERTY_VALUE_MAX];
     property_get("persist.camera.raw.debug.dump", prop, "0");
     mRawDump = atoi(prop);
+    mMapStreamBuffers = (mRawDump || mIsRaw16);
 }
 
 QCamera3RawChannel::~QCamera3RawChannel()
@@ -2387,6 +2454,8 @@
 
         uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U;
         uint16_t* raw16_buffer = (uint16_t *)frame->buffer;
+        uint8_t first_quintuple[5];
+        memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple));
 
         // In-place format conversion.
         // Raw16 format always occupy more memory than opaque raw10.
@@ -2402,13 +2471,19 @@
             for (int32_t xs = dim.width - 1; xs >= 0; xs--) {
                 uint32_t x = (uint32_t)xs;
                 uint8_t upper_8bit = row_start[5*(x/4)+x%4];
-                uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3);
+                uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> ((x%4) << 1)) & 0x3);
                 uint16_t raw16_pixel =
                         (uint16_t)(((uint16_t)upper_8bit)<<2 |
                         (uint16_t)lower_2bit);
                 raw16_buffer[y*raw16_stride+x] = raw16_pixel;
             }
         }
+
+        // Re-convert the first 2 pixels of the buffer because the loop above messes
+        // them up by reading the first quintuple while modifying it.
+        raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3);
+        raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3);
+
     } else {
         LOGE("Could not find stream");
     }
@@ -3711,7 +3786,7 @@
         Mutex::Autolock lock(mFreeBuffersLock);
         uint32_t bufIdx;
         if (mFreeBufferList.empty()) {
-            rc = mYuvMemory->allocateOne(mFrameLen);
+            rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
             if (rc < 0) {
                 LOGE("Failed to allocate heap buffer. Fatal");
                 return rc;
@@ -3957,19 +4032,25 @@
     // Image description
     const char *eepromVersion = hal_obj->getEepromVersionInfo();
     const uint32_t *ldafCalib = hal_obj->getLdafCalib();
+    const char *easelFwVersion = hal_obj->getEaselFwVersion();
     if ((eepromVersion && strlen(eepromVersion)) ||
             ldafCalib) {
         int len = 0;
         settings->image_desc_valid = true;
         if (eepromVersion && strlen(eepromVersion)) {
             len = snprintf(settings->image_desc, sizeof(settings->image_desc),
-                    "%s ", eepromVersion);
+                    "%s", eepromVersion);
         }
         if (ldafCalib) {
-            snprintf(settings->image_desc + len,
+            len += snprintf(settings->image_desc + len,
                     sizeof(settings->image_desc) - len, "L:%u-%u",
                     ldafCalib[0], ldafCalib[1]);
         }
+        if (easelFwVersion) {
+            ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion);
+            len += snprintf(settings->image_desc + len,
+                            sizeof(settings->image_desc) - len, ":%s", easelFwVersion);
+        }
     }
 
     return m_postprocessor.processJpegSettingData(settings);
@@ -4045,7 +4126,7 @@
     // Get an available YUV buffer.
     if (mFreeBufferList.empty()) {
         // Allocate a buffer if no one is available.
-        rc = mYuvMemory->allocateOne(mFrameLen);
+        rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false);
         if (rc < 0) {
             LOGE("Failed to allocate heap buffer. Fatal");
             return rc;
@@ -4064,7 +4145,7 @@
     mStreams[0]->getFrameOffset(offset);
 
     // Get a buffer from YUV memory.
-    rc = mYuvMemory->getBufDef(offset, *frame, bufIdx);
+    rc = mYuvMemory->getBufDef(offset, *frame, bufIdx, mMapStreamBuffers);
     if (rc != 0) {
         ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc);
         return rc;
@@ -4128,9 +4209,6 @@
         return rc;
     }
 
-    // Invalidate YUV buffer cache
-    mYuvMemory->invalidateCache(frame->buf_idx);
-
     // Start postprocessor
     startPostProc(reproc_cfg);
 
@@ -4976,7 +5054,7 @@
         return BAD_VALUE;
     }
 
-    if (NULL == frame->input_buffer.buffer) {
+    if (0 > frame->input_buffer.fd) {
         LOGE("No input buffer available");
         return BAD_VALUE;
     }
@@ -5260,7 +5338,8 @@
             m_handle,
             m_camOps,
             &mPaddingInfo,
-            (QCamera3Channel*)this);
+            (QCamera3Channel*)this,
+            false/*mapStreamBuffers*/);
     if (pStream == NULL) {
         LOGE("No mem for Stream");
         return NO_MEMORY;
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.h b/msm8998/QCamera2/HAL3/QCamera3Channel.h
index 4a8c358..a23acd5 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.h
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.h
@@ -184,6 +184,7 @@
     uint32_t mSkipMode;
     uint32_t mDumpSkipCnt;
     uint8_t mNRMode;
+    bool    mMapStreamBuffers; // Whether to mmap all stream buffers
 };
 
 /* QCamera3ProcessingChannel is used to handle all streams that are directly
@@ -219,7 +220,10 @@
     virtual QCamera3StreamMem *getStreamBufs(uint32_t len);
     virtual void putStreamBufs();
     virtual int32_t registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType);
-
+    // Register a buffer and get the buffer def for the registered buffer.
+    virtual int32_t registerBufferAndGetBufDef(buffer_handle_t *buffer, mm_camera_buf_def_t *frame);
+    // Unregister a buffer.
+    virtual void unregisterBuffer(mm_camera_buf_def_t *frame);
     virtual int32_t stop();
 
     virtual reprocess_type_t getReprocessType() = 0;
@@ -269,6 +273,7 @@
     uint8_t mIntent;
 
     bool mPostProcStarted;
+    reprocess_type_t mReprocessType; // Only valid when mPostProcStarted is true.
     bool mInputBufferConfig;   // Set when the processing channel is configured
                                // for processing input(framework) buffers
 
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
index d8a9a24..fe1fe8d 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
@@ -72,7 +72,9 @@
 #define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
 
 #define EMPTY_PIPELINE_DELAY 2
-#define PARTIAL_RESULT_COUNT 2
+// mm_camera has 2 partial results: 3A, and final result.
+// HDR+ requests have 3 partial results: postview, next request ready, and final result.
+#define PARTIAL_RESULT_COUNT 3
 #define FRAME_SKIP_DELAY     0
 
 #define MAX_VALUE_8BIT ((1<<8)-1)
@@ -132,9 +134,6 @@
 // Max preferred zoom
 #define MAX_PREFERRED_ZOOM_RATIO 7.0
 
-// TODO: Enable HDR+ for front camera after it's supported. b/37100623.
-#define ENABLE_HDRPLUS_FOR_FRONT_CAMERA 0
-
 // Whether to check for the GPU stride padding, or use the default
 //#define CHECK_GPU_PIXEL_ALIGNMENT
 
@@ -150,13 +149,14 @@
 bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
 std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
 bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
+std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
 bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
 bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
 
 // If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
 bool gEaselBypassOnly;
 
-Mutex gHdrPlusClientLock; // Protect above Easel related variables.
+std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
 
 
 const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
@@ -275,7 +275,9 @@
     { ANDROID_CONTROL_AE_MODE_ON,                   CAM_FLASH_MODE_OFF },
     { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH,        CAM_FLASH_MODE_AUTO},
     { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH,      CAM_FLASH_MODE_ON  },
-    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
+    { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
+    { (camera_metadata_enum_android_control_ae_mode_t)
+      NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
 };
 
 const QCamera3HardwareInterface::QCameraMap<
@@ -472,6 +474,7 @@
       m_bEisSupportedSize(false),
       m_bEisEnable(false),
       m_bEis3PropertyEnabled(false),
+      m_bAVTimerEnabled(false),
       m_MobicatMask(0),
       mShutterDispatcher(this),
       mOutputBufferDispatcher(this),
@@ -485,7 +488,6 @@
       mCallbacks(callbacks),
       mCaptureIntent(0),
       mCacMode(0),
-      mHybridAeEnable(0),
       /* DevCamDebug metadata internal m control*/
       mDevCamDebugMetaEnable(0),
       /* DevCamDebug metadata end */
@@ -507,6 +509,8 @@
       mInstantAECSettledFrameNumber(0),
       mAecSkipDisplayFrameBound(0),
       mInstantAecFrameIdxCount(0),
+      mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
+      mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
       mCurrFeatureState(0),
       mLdafCalibExist(false),
       mLastCustIntentFrmNum(-1),
@@ -519,10 +523,12 @@
       m_pDualCamCmdPtr(NULL),
       mHdrPlusModeEnabled(false),
       mZslEnabled(false),
+      mEaselMipiStarted(false),
       mIsApInputUsedForHdrPlus(false),
       mFirstPreviewIntentSeen(false),
       m_bSensorHDREnabled(false),
-      mAfTrigger()
+      mAfTrigger(),
+      mSceneDistance(-1)
 {
     getLogLevel();
     mCommon.init(gCamCapability[cameraId]);
@@ -564,6 +570,9 @@
     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
     memset(mLdafCalib, 0, sizeof(mLdafCalib));
 
+    memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
+    mEaselFwUpdated = false;
+
     memset(prop, 0, sizeof(prop));
     property_get("persist.camera.tnr.preview", prop, "0");
     m_bTnrPreview = (uint8_t)atoi(prop);
@@ -628,6 +637,19 @@
     mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
     mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
 
+    // Close HDR+ client first before destroying HAL.
+    {
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        finishHdrPlusClientOpeningLocked(l);
+        if (gHdrPlusClient != nullptr) {
+            // Disable HDR+ mode.
+            disableHdrPlusModeLocked();
+            // Disconnect Easel if it's connected.
+            gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
+            gHdrPlusClient = nullptr;
+        }
+    }
+
     // unlink of dualcam during close camera
     if (mIsDeviceLinked) {
         cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
@@ -686,9 +708,7 @@
         mMetadataChannel->stop();
     }
     if (mChannelHandle) {
-        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
-                mChannelHandle, /*stop_immediately*/false);
-        LOGD("stopping channel %d", mChannelHandle);
+        stopChannelLocked(/*stop_immediately*/false);
     }
 
     for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
@@ -886,7 +906,7 @@
     }
 
     {
-        Mutex::Autolock l(gHdrPlusClientLock);
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
         if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
             logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
             rc = gEaselManagerClient->resume(this);
@@ -894,6 +914,7 @@
                 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
                 return rc;
             }
+            mEaselFwUpdated = false;
         }
     }
 
@@ -905,7 +926,7 @@
 
         // Suspend Easel because opening camera failed.
         {
-            Mutex::Autolock l(gHdrPlusClientLock);
+            std::unique_lock<std::mutex> l(gHdrPlusClientLock);
             if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
                 status_t suspendErr = gEaselManagerClient->suspend();
                 if (suspendErr != 0) {
@@ -1101,21 +1122,8 @@
          mCameraId, rc);
 
     {
-        Mutex::Autolock l(gHdrPlusClientLock);
-        if (gHdrPlusClient != nullptr) {
-            // Disable HDR+ mode.
-            disableHdrPlusModeLocked();
-            // Disconnect Easel if it's connected.
-            gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
-            gHdrPlusClient = nullptr;
-        }
-
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
         if (EaselManagerClientOpened) {
-            rc = gEaselManagerClient->stopMipi(mCameraId);
-            if (rc != 0) {
-                ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
-            }
-
             rc = gEaselManagerClient->suspend();
             if (rc != 0) {
                 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
@@ -1704,14 +1712,24 @@
 {
     for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
             req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+        // WAR: save the av_timestamp to the next frame
+        if(req->frame_number == frameNumber + 1) {
+            req->av_timestamp = timestamp;
+        }
+
         if (req->frame_number != frameNumber)
             continue;
 
         for (auto k = req->mPendingBufferList.begin();
                 k != req->mPendingBufferList.end(); k++ ) {
-            struct private_handle_t *priv_handle =
-                    (struct private_handle_t *) (*(k->buffer));
-            setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
+            // WAR: update timestamp when it's not VT usecase
+            QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
+            if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
+                m_bAVTimerEnabled)) {
+                    struct private_handle_t *priv_handle =
+                        (struct private_handle_t *) (*(k->buffer));
+                    setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
+            }
         }
     }
     return;
@@ -1791,6 +1809,13 @@
         return rc;
     }
 
+    // Disable HDR+ if it's enabled;
+    {
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        finishHdrPlusClientOpeningLocked(l);
+        disableHdrPlusModeLocked();
+    }
+
     /* first invalidate all the steams in the mStreamList
      * if they appear again, they will be validated */
     for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
@@ -1825,13 +1850,13 @@
         mMetadataChannel->stop();
     }
     if (mChannelHandle) {
-        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
-                mChannelHandle, /*stop_immediately*/false);
-        LOGD("stopping channel %d", mChannelHandle);
+        stopChannelLocked(/*stop_immediately*/false);
     }
 
     pthread_mutex_lock(&mMutex);
 
+    mPictureChannel = NULL;
+
     // Check state
     switch (mState) {
         case INITIALIZED:
@@ -1895,6 +1920,8 @@
     mCurrFeatureState = 0;
     mStreamConfig = true;
 
+    m_bAVTimerEnabled = false;
+
     memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
 
     size_t count = IS_TYPE_MAX;
@@ -2216,6 +2243,7 @@
             stream_info->stream = newStream;
             stream_info->status = VALID;
             stream_info->channel = NULL;
+            stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
             mStreamInfo.push_back(stream_info);
         }
         /* Covers Opaque ZSL and API1 F/W ZSL */
@@ -3041,12 +3069,6 @@
 
     mFirstPreviewIntentSeen = false;
 
-    // Disable HRD+ if it's enabled;
-    {
-        Mutex::Autolock l(gHdrPlusClientLock);
-        disableHdrPlusModeLocked();
-    }
-
     // Update state
     mState = CONFIGURED;
 
@@ -3549,7 +3571,7 @@
     result.partial_result = requestIter->partial_result_cnt;
 
     {
-        Mutex::Autolock l(gHdrPlusClientLock);
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
             // Notify HDR+ client about the partial metadata.
             gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
@@ -3711,10 +3733,11 @@
             LOGD("Iterator Frame = %d urgent frame = %d",
                  i->frame_number, urgent_frame_number);
 
-            if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
-                (i->partial_result_cnt == 0)) {
+            if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
+                    (i->partial_result_cnt == 0)) {
                 LOGE("Error: HAL missed urgent metadata for frame number %d",
                          i->frame_number);
+                i->partialResultDropped = true;
                 i->partial_result_cnt++;
             }
 
@@ -3813,7 +3836,13 @@
 
     for (auto & pendingRequest : mPendingRequestsList) {
         // Find the pending request with the frame number.
-        if (pendingRequest.frame_number == frame_number) {
+        if (pendingRequest.frame_number < frame_number) {
+            // Workaround for case where shutter is missing due to dropped
+            // metadata
+            if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
+                mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
+            }
+        } else if (pendingRequest.frame_number == frame_number) {
             // Update the sensor timestamp.
             pendingRequest.timestamp = capture_time;
 
@@ -4163,6 +4192,25 @@
         }
     }
 
+    // WAR for encoder avtimer timestamp issue
+    QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
+    if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
+        m_bAVTimerEnabled) {
+        for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
+            req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
+            if (req->frame_number != frame_number)
+                continue;
+            if(req->av_timestamp == 0) {
+                buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
+            }
+            else {
+                struct private_handle_t *priv_handle =
+                    (struct private_handle_t *) (*(buffer->buffer));
+                setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
+            }
+        }
+    }
+
     buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
     LOGH("result frame_number = %d, buffer = %p",
              frame_number, buffer->buffer);
@@ -4185,8 +4233,36 @@
     }
 }
 
+void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
+        camera_metadata_t *resultMetadata) {
+    CameraMetadata metadata;
+    metadata.acquire(resultMetadata);
+
+    // Remove len shading map if it's not requested.
+    if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
+            metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
+            metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+        metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
+        metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
+            &requestIter->requestedLensShadingMapMode, 1);
+    }
+
+    // Remove face information if it's not requested.
+    if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
+            metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
+            metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
+            ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+        metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
+        metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
+                &requestIter->requestedFaceDetectMode, 1);
+    }
+
+    requestIter->resultMetadata = metadata.release();
+}
+
 void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
-        const camera_metadata_t *resultMetadata)
+        camera_metadata_t *resultMetadata)
 {
     // Find the pending request for this result metadata.
     auto requestIter = mPendingRequestsList.begin();
@@ -4214,11 +4290,11 @@
         requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
     } else {
         liveRequest = true;
-        requestIter->partial_result_cnt++;
+        requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
         mPendingLiveRequest--;
 
         {
-            Mutex::Autolock l(gHdrPlusClientLock);
+            std::unique_lock<std::mutex> l(gHdrPlusClientLock);
             // For a live request, send the metadata to HDR+ client.
             if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
                 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
@@ -4227,6 +4303,10 @@
         }
     }
 
+    if (requestIter->input_buffer == nullptr) {
+        removeUnrequestedMetadata(requestIter, resultMetadata);
+    }
+
     dispatchResultMetadataWithLock(frameNumber, liveRequest);
 }
 
@@ -4248,6 +4328,7 @@
         }
 
         bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
+        bool errorResult = false;
 
         camera3_capture_result_t result = {};
         result.frame_number = iter->frame_number;
@@ -4264,30 +4345,27 @@
                 iter++;
                 continue;
             }
+            // Notify ERROR_RESULT if partial result was dropped.
+            errorResult = iter->partialResultDropped;
         } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
             // If the result metadata belongs to a live request, notify errors for previous pending
             // live requests.
             mPendingLiveRequest--;
 
-            CameraMetadata dummyMetadata;
-            dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
-            result.result = dummyMetadata.release();
-
-            notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
-
-            // partial_result should be PARTIAL_RESULT_CNT in case of
-            // ERROR_RESULT.
-            iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
-            result.partial_result = PARTIAL_RESULT_COUNT;
+            LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
+            errorResult = true;
         } else {
             iter++;
             continue;
         }
 
-        result.output_buffers = nullptr;
-        result.num_output_buffers = 0;
-        orchestrateResult(&result);
-
+        if (errorResult) {
+            notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
+        } else {
+            result.output_buffers = nullptr;
+            result.num_output_buffers = 0;
+            orchestrateResult(&result);
+        }
         // For reprocessing, result metadata is the same as settings so do not free it here to
         // avoid double free.
         if (result.result != iter->settings) {
@@ -4730,8 +4808,8 @@
 }
 
 status_t QCamera3HardwareInterface::fillPbStreamConfig(
-        pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
-        QCamera3Channel *channel, uint32_t streamIndex) {
+        pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
+        uint32_t streamIndex) {
     if (config == nullptr) {
         LOGE("%s: config is null", __FUNCTION__);
         return BAD_VALUE;
@@ -4758,14 +4836,30 @@
     config->image.width = streamInfo->dim.width;
     config->image.height = streamInfo->dim.height;
     config->image.padding = 0;
-    config->image.format = pbStreamFormat;
+
+    int bytesPerPixel = 0;
+
+    switch (streamInfo->fmt) {
+        case CAM_FORMAT_YUV_420_NV21:
+            config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+            bytesPerPixel = 1;
+            break;
+        case CAM_FORMAT_YUV_420_NV12:
+        case CAM_FORMAT_YUV_420_NV12_VENUS:
+            config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
+            bytesPerPixel = 1;
+            break;
+        default:
+            ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
+            return BAD_VALUE;
+    }
 
     uint32_t totalPlaneSize = 0;
 
     // Fill plane information.
     for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
         pbcamera::PlaneConfiguration plane;
-        plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
+        plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
         plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
         config->image.planes.push_back(plane);
 
@@ -4959,11 +5053,13 @@
             if (m_debug_avtimer){
                 LOGI(" Enabling AV timer through setprop");
                 use_av_timer = &m_debug_avtimer;
+                m_bAVTimerEnabled = true;
             }
             else{
                 use_av_timer =
                     meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
                 if (use_av_timer) {
+                    m_bAVTimerEnabled = true;
                     LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
                 }
             }
@@ -5360,11 +5456,25 @@
                 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
     }
 
+    uint8_t requestedLensShadingMapMode;
+    // Get the shading map mode.
+    if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
+        mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
+                meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
+    } else {
+        requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
+    }
+
+    if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
+        mLastRequestedFaceDetectMode =
+                meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
+    }
+
     bool hdrPlusRequest = false;
     HdrPlusPendingRequest pendingHdrPlusRequest = {};
 
     {
-        Mutex::Autolock l(gHdrPlusClientLock);
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
         // If this request has a still capture intent, try to submit an HDR+ request.
         if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
                 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
@@ -5397,6 +5507,18 @@
                 pthread_mutex_unlock(&mMutex);
                 return rc;
             }
+
+            {
+                // If HDR+ mode is enabled, override the following modes so the necessary metadata
+                // will be included in the result metadata sent to Easel HDR+.
+                std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+                if (mHdrPlusModeEnabled) {
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
+                        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
+                    ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
+                        ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
+                }
+            }
         }
         /* For batchMode HFR, setFrameParameters is not called for every
          * request. But only frame number of the latest request is parsed.
@@ -5457,6 +5579,8 @@
     pendingRequest.request_id = request_id;
     pendingRequest.blob_request = blob_request;
     pendingRequest.timestamp = 0;
+    pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
+    pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
     if (request->input_buffer) {
         pendingRequest.input_buffer =
                 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
@@ -5475,12 +5599,10 @@
     pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
     pendingRequest.capture_intent = mCaptureIntent;
     if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
-        mHybridAeEnable =
+        pendingRequest.hybrid_ae_enable =
                 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
     }
 
-    // Enable hybrid AE if it's enabled in metadata or HDR+ mode is enabled.
-    pendingRequest.hybrid_ae_enable = mHybridAeEnable || mHdrPlusModeEnabled;
     /* DevCamDebug metadata processCaptureRequest */
     if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
         mDevCamDebugMetaEnable =
@@ -5513,6 +5635,7 @@
     bufsForCurRequest.frame_number = frameNumber;
     // Mark current timestamp for the new request
     bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
+    bufsForCurRequest.av_timestamp = 0;
     bufsForCurRequest.hdrplus = hdrPlusRequest;
 
     if (hdrPlusRequest) {
@@ -5985,42 +6108,9 @@
                 }
 
                 // Configure modules for stream on.
-                rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
-                        mChannelHandle, /*start_sensor_streaming*/false);
+                rc = startChannelLocked();
                 if (rc != NO_ERROR) {
-                    LOGE("start_channel failed %d", rc);
-                    pthread_mutex_unlock(&mMutex);
-                    return rc;
-                }
-
-                {
-                    // Configure Easel for stream on.
-                    Mutex::Autolock l(gHdrPlusClientLock);
-
-                    // Now that sensor mode should have been selected, get the selected sensor mode
-                    // info.
-                    memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
-                    getCurrentSensorModeInfo(mSensorModeInfo);
-
-                    if (EaselManagerClientOpened) {
-                        logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
-                        rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
-                                /*enableCapture*/true);
-                        if (rc != OK) {
-                            ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
-                                    mCameraId, mSensorModeInfo.op_pixel_clk);
-                            pthread_mutex_unlock(&mMutex);
-                            return rc;
-                        }
-                        logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
-                    }
-                }
-
-                // Start sensor streaming.
-                rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
-                        mChannelHandle);
-                if (rc != NO_ERROR) {
-                    LOGE("start_sensor_stream_on failed %d", rc);
+                    LOGE("startChannelLocked failed %d", rc);
                     pthread_mutex_unlock(&mMutex);
                     return rc;
                 }
@@ -6028,19 +6118,24 @@
         }
     }
 
-    // Enable HDR+ mode for the first PREVIEW_INTENT request.
-    if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || mCameraId == 0) {
-        Mutex::Autolock l(gHdrPlusClientLock);
-        if (gEaselManagerClient != nullptr  && gEaselManagerClient->isEaselPresentOnDevice() &&
+    // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
+    {
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
                 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
                 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
                 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
-                ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
-            rc = enableHdrPlusModeLocked();
-            if (rc != OK) {
-                LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
-                pthread_mutex_unlock(&mMutex);
-                return rc;
+                ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
+                meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
+                meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
+
+            if (isSessionHdrPlusModeCompatible()) {
+                rc = enableHdrPlusModeLocked();
+                if (rc != OK) {
+                    LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
+                    pthread_mutex_unlock(&mMutex);
+                    return rc;
+                }
             }
 
             mFirstPreviewIntentSeen = true;
@@ -6099,6 +6194,70 @@
     return rc;
 }
 
+int32_t QCamera3HardwareInterface::startChannelLocked()
+{
+    // Configure modules for stream on.
+    int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
+            mChannelHandle, /*start_sensor_streaming*/false);
+    if (rc != NO_ERROR) {
+        LOGE("start_channel failed %d", rc);
+        return rc;
+    }
+
+    {
+        // Configure Easel for stream on.
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        if (EaselManagerClientOpened) {
+            // Now that sensor mode should have been selected, get the selected sensor mode
+            // info.
+            memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
+            rc = getCurrentSensorModeInfo(mSensorModeInfo);
+            if (rc != NO_ERROR) {
+                ALOGE("%s: Get current sensor mode failed, bail out: %s (%d).", __FUNCTION__,
+                        strerror(-rc), rc);
+                return rc;
+            }
+            logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
+            rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
+                    /*enableCapture*/true);
+            if (rc != OK) {
+                ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
+                        mCameraId, mSensorModeInfo.op_pixel_clk);
+                return rc;
+            }
+            logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
+            mEaselMipiStarted = true;
+        }
+    }
+
+    // Start sensor streaming.
+    rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
+            mChannelHandle);
+    if (rc != NO_ERROR) {
+        LOGE("start_sensor_stream_on failed %d", rc);
+        return rc;
+    }
+
+    return 0;
+}
+
+void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
+{
+    mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
+            mChannelHandle, stopChannelImmediately);
+
+    {
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        if (EaselManagerClientOpened && mEaselMipiStarted) {
+            int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
+            if (rc != 0) {
+                ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+            }
+            mEaselMipiStarted = false;
+        }
+    }
+}
+
 /*===========================================================================
  * FUNCTION   : dump
  *
@@ -6185,6 +6344,13 @@
     mFlush = true;
     pthread_mutex_unlock(&mMutex);
 
+    // Disable HDR+ if it's enabled;
+    {
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+        finishHdrPlusClientOpeningLocked(l);
+        disableHdrPlusModeLocked();
+    }
+
     rc = stopAllChannels();
     // unlink of dualcam
     if (mIsDeviceLinked) {
@@ -6221,8 +6387,7 @@
         return rc;
     }
     if (mChannelHandle) {
-        mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
-                mChannelHandle, stopChannelImmediately);
+        stopChannelLocked(stopChannelImmediately);
     }
 
     // Reset bundle info
@@ -6257,10 +6422,10 @@
             return rc;
         }
         if (mChannelHandle) {
-            mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
-                        mChannelHandle, /*start_sensor_streaming*/true);
+            // Configure modules for stream on.
+            rc = startChannelLocked();
             if (rc < 0) {
-                LOGE("start_channel failed");
+                LOGE("startChannelLocked failed");
                 pthread_mutex_unlock(&mMutex);
                 return rc;
             }
@@ -6667,12 +6832,12 @@
             camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
         }
         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
-                CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
+                CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
             int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
             camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
         }
         IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
-                CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
+                CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
             int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
             camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
         }
@@ -7230,12 +7395,11 @@
                         faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
                         // Adjust crop region from sensor output coordinate system to active
                         // array coordinate system.
-                        cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
+                        cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
                         mCropRegionMapper.toActiveArray(rect.left, rect.top,
                                 rect.width, rect.height);
 
-                        convertToRegions(faceDetectionInfo->faces[i].face_boundary,
-                                faceRectangles+j, -1);
+                        convertToRegions(rect, faceRectangles+j, -1);
 
                         LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
                                 "bottom-right (%d, %d)",
@@ -7262,19 +7426,20 @@
                                 CAM_INTF_META_FACE_LANDMARK, metadata) {
 
                             for (size_t i = 0; i < numFaces; i++) {
+                                cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
                                 // Map the co-ordinate sensor output coordinate system to active
                                 // array coordinate system.
                                 mCropRegionMapper.toActiveArray(
-                                        landmarks->face_landmarks[i].left_eye_center.x,
-                                        landmarks->face_landmarks[i].left_eye_center.y);
+                                        face_landmarks.left_eye_center.x,
+                                        face_landmarks.left_eye_center.y);
                                 mCropRegionMapper.toActiveArray(
-                                        landmarks->face_landmarks[i].right_eye_center.x,
-                                        landmarks->face_landmarks[i].right_eye_center.y);
+                                        face_landmarks.right_eye_center.x,
+                                        face_landmarks.right_eye_center.y);
                                 mCropRegionMapper.toActiveArray(
-                                        landmarks->face_landmarks[i].mouth_center.x,
-                                        landmarks->face_landmarks[i].mouth_center.y);
+                                        face_landmarks.mouth_center.x,
+                                        face_landmarks.mouth_center.y);
 
-                                convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
+                                convertLandmarks(face_landmarks, faceLandmarks+k);
 
                                 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
                                         "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
@@ -7705,16 +7870,17 @@
         int32_t aeRegions[REGIONS_TUPLE_COUNT];
         // Adjust crop region from sensor output coordinate system to active
         // array coordinate system.
-        mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
-                hAeRegions->rect.width, hAeRegions->rect.height);
+        cam_rect_t hAeRect = hAeRegions->rect;
+        mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
+                hAeRect.width, hAeRect.height);
 
-        convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
+        convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
         camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
                 REGIONS_TUPLE_COUNT);
         LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
                  aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
-                hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
-                hAeRegions->rect.height);
+                hAeRect.left, hAeRect.top, hAeRect.width,
+                hAeRect.height);
     }
 
     if (!pendingRequest.focusStateSent) {
@@ -8074,16 +8240,14 @@
 
     // OIS Data
     IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
-        camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
-            &(frame_ois_data->frame_sof_timestamp_vsync), 1);
         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
             &(frame_ois_data->frame_sof_timestamp_boottime), 1);
         camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
             frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
-        camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
-            frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
-        camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
-            frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
+        camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
+            frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
+        camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
+            frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
     }
 
     resultMetadata = camMetadata.release();
@@ -8281,19 +8445,20 @@
 
     IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
         /*af regions*/
+        cam_rect_t hAfRect = hAfRegions->rect;
         int32_t afRegions[REGIONS_TUPLE_COUNT];
         // Adjust crop region from sensor output coordinate system to active
         // array coordinate system.
-        mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
-                hAfRegions->rect.width, hAfRegions->rect.height);
+        mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
+                hAfRect.width, hAfRect.height);
 
-        convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
+        convertToRegions(hAfRect, afRegions, hAfRegions->weight);
         camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
                 REGIONS_TUPLE_COUNT);
         LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
                  afRegions[0], afRegions[1], afRegions[2], afRegions[3],
-                hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
-                hAfRegions->rect.height);
+                hAfRect.left, hAfRect.top, hAfRect.width,
+                hAfRect.height);
     }
 
     // AF region confidence
@@ -8370,6 +8535,24 @@
             }
         }
     }
+
+    IF_META_AVAILABLE(int32_t, af_tof_confidence,
+            CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
+        IF_META_AVAILABLE(int32_t, af_tof_distance,
+                CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
+            int32_t fwk_af_tof_confidence = *af_tof_confidence;
+            int32_t fwk_af_tof_distance = *af_tof_distance;
+            if (fwk_af_tof_confidence == 1) {
+                mSceneDistance = fwk_af_tof_distance;
+            } else {
+                mSceneDistance = -1;
+            }
+            LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
+                     fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
+        }
+    }
+    camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
+
     resultMetadata = camMetadata.release();
     return resultMetadata;
 }
@@ -8529,6 +8712,13 @@
     }
 
     mStreamInfo = newStreamInfo;
+
+    // Make sure that stream IDs are unique.
+    uint32_t id = 0;
+    for (auto streamInfo : mStreamInfo) {
+        streamInfo->id = id++;
+    }
+
 }
 
 /*===========================================================================
@@ -9213,6 +9403,7 @@
     bool limitedDevice = false;
     char prop[PROPERTY_VALUE_MAX];
     bool supportBurst = false;
+    Vector<int32_t> available_characteristics_keys;
 
     supportBurst = supportBurstCapture(cameraId);
 
@@ -9365,6 +9556,9 @@
     }
     staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
             histBins.data(), histBins.size());
+    if (!histBins.empty()) {
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS);
+    }
 
     int32_t sharpness_map_size[] = {
             gCamCapability[cameraId]->sharpness_map_size.width,
@@ -9418,6 +9612,23 @@
         int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
         staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
                 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS);
+
+        staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
+                reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
+                sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS);
+
+        staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
+                reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
+                sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS);
+
+        staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
+                reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
+                sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
+
     }
 
     int32_t scalar_formats[] = {
@@ -10158,6 +10369,23 @@
        ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
        ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
        ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
+       QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
+       QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
+       QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TEMPORAL_DENOISE_ENABLE,
+       QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, QCAMERA3_USE_ISO_EXP_PRIORITY,
+       QCAMERA3_SELECT_PRIORITY, QCAMERA3_USE_SATURATION,
+       QCAMERA3_EXPOSURE_METER, QCAMERA3_USE_AV_TIMER,
+       QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
+       QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
+       QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
+       QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
+       QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
+       QCAMERA3_JPEG_ENCODE_CROP_ENABLE, QCAMERA3_JPEG_ENCODE_CROP_RECT,
+       QCAMERA3_JPEG_ENCODE_CROP_ROI, QCAMERA3_VIDEO_HDR_MODE,
+       QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
+       QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
+       QCAMERA3_SHARPNESS_STRENGTH, QCAMERA3_HISTOGRAM_MODE,
+       QCAMERA3_BINNING_CORRECTION_MODE,
        /* DevCamDebug metadata request_keys_basic */
        DEVCAMDEBUG_META_ENABLE,
        /* DevCamDebug metadata end */
@@ -10166,6 +10394,7 @@
        TANGO_MODE_DATA_SENSOR_FULLFOV,
        NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
        NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
+       NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE
        };
 
     size_t request_keys_cnt =
@@ -10177,9 +10406,10 @@
     }
 
     if (gExposeEnableZslKey) {
-        if (ENABLE_HDRPLUS_FOR_FRONT_CAMERA || cameraId == 0) {
-            available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
-        }
+        available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
+        available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
+        available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
+        available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
     }
 
     staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
@@ -10212,6 +10442,24 @@
 #endif
        NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
        NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
+       QCAMERA3_PRIVATEDATA_REPROCESS, QCAMERA3_CDS_MODE, QCAMERA3_CDS_INFO,
+       QCAMERA3_CROP_COUNT_REPROCESS, QCAMERA3_CROP_REPROCESS,
+       QCAMERA3_CROP_ROI_MAP_REPROCESS, QCAMERA3_TUNING_META_DATA_BLOB,
+       QCAMERA3_TEMPORAL_DENOISE_ENABLE, QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE,
+       QCAMERA3_EXPOSURE_METER, QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN,
+       QCAMERA3_DUALCAM_LINK_ENABLE, QCAMERA3_DUALCAM_LINK_IS_MAIN,
+       QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID,
+       QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
+       QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
+       QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB, QCAMERA3_VIDEO_HDR_MODE,
+       QCAMERA3_IR_MODE, QCAMERA3_AEC_CONVERGENCE_SPEED,
+       QCAMERA3_AWB_CONVERGENCE_SPEED, QCAMERA3_INSTANT_AEC_MODE,
+       QCAMERA3_HISTOGRAM_MODE, QCAMERA3_BINNING_CORRECTION_MODE,
+       QCAMERA3_STATS_IS_HDR_SCENE, QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
+       QCAMERA3_STATS_BLINK_DETECTED, QCAMERA3_STATS_BLINK_DEGREE,
+       QCAMERA3_STATS_SMILE_DEGREE, QCAMERA3_STATS_SMILE_CONFIDENCE,
+       QCAMERA3_STATS_GAZE_ANGLE, QCAMERA3_STATS_GAZE_DIRECTION,
+       QCAMERA3_STATS_GAZE_DEGREE,
        // DevCamDebug metadata result_keys_basic
        DEVCAMDEBUG_META_ENABLE,
        // DevCamDebug metadata result_keys AF
@@ -10280,7 +10528,16 @@
        NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
        NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
        NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
+       NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
        NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
+       NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
+       NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
+       NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
+       NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
+       NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
+       NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
+       NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
+       NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y
        };
 
     size_t result_keys_cnt =
@@ -10304,7 +10561,7 @@
         available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
     }
 #ifndef USE_HAL_3_3
-    if (hasBlackRegions) {
+    {
         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
         available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
     }
@@ -10312,6 +10569,9 @@
 
     if (gExposeEnableZslKey) {
         available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
+        available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
+        available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
+        available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
     }
 
     staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
@@ -10375,9 +10635,14 @@
        ANDROID_SENSOR_OPAQUE_RAW_SIZE,
        ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
 #endif
+       QCAMERA3_OPAQUE_RAW_FORMAT, QCAMERA3_EXP_TIME_RANGE,
+       QCAMERA3_SATURATION_RANGE, QCAMERA3_SENSOR_IS_MONO_ONLY,
+       QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
+       QCAMERA3_SHARPNESS_RANGE,
+       QCAMERA3_HISTOGRAM_BUCKETS, QCAMERA3_HISTOGRAM_MAX_COUNT,
+       QCAMERA3_STATS_BSGC_AVAILABLE
        };
 
-    Vector<int32_t> available_characteristics_keys;
     available_characteristics_keys.appendArray(characteristics_keys_basic,
             sizeof(characteristics_keys_basic)/sizeof(int32_t));
 #ifndef USE_HAL_3_3
@@ -10398,10 +10663,6 @@
                 sizeof(depthKeys) / sizeof(depthKeys[0]));
     }
 
-    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
-                      available_characteristics_keys.array(),
-                      available_characteristics_keys.size());
-
     /*available stall durations depend on the hw + sw and will be different for different devices */
     /*have to add for raw after implementation*/
     int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
@@ -10470,8 +10731,12 @@
             &gCamCapability[cameraId]->padding_info, &buf_planes);
         strides.add(buf_planes.plane_info.mp[0].stride);
     }
-    staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
-            strides.size());
+
+    if (!strides.isEmpty()) {
+        staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
+                strides.size());
+        available_characteristics_keys.add(QCAMERA3_OPAQUE_RAW_STRIDES);
+    }
 
     //TBD: remove the following line once backend advertises zzHDR in feature mask
     gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
@@ -10486,6 +10751,7 @@
         size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
         staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
                     vhdr_mode, vhdr_mode_count);
+        available_characteristics_keys.add(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES);
     }
 
     staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
@@ -10541,6 +10807,7 @@
         }
         staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
                 avail_ir_modes, size);
+        available_characteristics_keys.add(QCAMERA3_IR_AVAILABLE_MODES);
     }
 
     if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
@@ -10558,6 +10825,7 @@
         }
         staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
                 available_instant_aec_modes, size);
+        available_characteristics_keys.add(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES);
     }
 
     int32_t sharpness_range[] = {
@@ -10581,6 +10849,7 @@
         }
         staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
                 avail_binning_modes, size);
+        available_characteristics_keys.add(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES);
     }
 
     if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
@@ -10595,6 +10864,7 @@
         }
         staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
                 available_aec_modes, size);
+        available_characteristics_keys.add(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES);
     }
 
     if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
@@ -10609,6 +10879,7 @@
         }
         staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
                 available_iso_modes, size);
+        available_characteristics_keys.add(QCAMERA3_ISO_AVAILABLE_MODES);
     }
 
     int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
@@ -10647,13 +10918,18 @@
         if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
             eepromLength += sizeof(easelInfo);
             strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
-                    gEaselManagerClient->isEaselPresentOnDevice()) ? ",E:Y" : ",E:N"),
+                    gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
                     MAX_EEPROM_VERSION_INFO_LEN);
         }
         staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
                 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
+        available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
     }
 
+    staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+                      available_characteristics_keys.array(),
+                      available_characteristics_keys.size());
+
     gStaticMetadata[cameraId] = staticInfo.release();
     return rc;
 }
@@ -10889,7 +11165,8 @@
         // If Easel is present, power on Easel and suspend it immediately.
         status_t res = gEaselManagerClient->open();
         if (res != OK) {
-            ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+            ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
+                    res);
             return res;
         }
 
@@ -10932,7 +11209,7 @@
     pthread_mutex_lock(&gCamLock);
 
     {
-        Mutex::Autolock l(gHdrPlusClientLock);
+        std::unique_lock<std::mutex> l(gHdrPlusClientLock);
         rc = initHdrPlusClientLocked();
         if (rc != OK) {
             ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
@@ -11051,7 +11328,7 @@
     char hybrid_ae_prop[PROPERTY_VALUE_MAX];
     memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
     property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
-    const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
+    uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
 
     uint8_t controlIntent = 0;
     uint8_t focusMode;
@@ -11448,12 +11725,27 @@
     int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
     settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
 
-    /* hybrid ae */
-    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
-
     if (gExposeEnableZslKey) {
         settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
+        int32_t postview = 0;
+        settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
+        int32_t continuousZslCapture = 0;
+        settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
+        // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
+        // CAMERA3_TEMPLATE_PREVIEW.
+        int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
+                                  type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
+        settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
+
+        // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
+        // hybrid ae is enabled for 3rd party app HDR+.
+        if (type == CAMERA3_TEMPLATE_PREVIEW ||
+                type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
+            hybrid_ae = 1;
+        }
     }
+    /* hybrid ae */
+    settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
 
     mDefaultMetadata[type] = settings.release();
 
@@ -12180,6 +12472,8 @@
                     rc = BAD_VALUE;
                 }
             }
+        } else {
+            LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
         }
     } else {
         uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
@@ -13038,9 +13332,7 @@
     if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
         uint8_t *hybrid_ae = (uint8_t *)
                 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
-
-        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
-                CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
+        if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
             rc = BAD_VALUE;
         }
     }
@@ -13074,6 +13366,23 @@
         }
     }
 
+    // Makernote
+    camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
+    if (entry.count != 0) {
+        if (entry.count <= MAX_MAKERNOTE_LENGTH) {
+            cam_makernote_t makernote;
+            makernote.length = entry.count;
+            memcpy(makernote.data, entry.data.u8, makernote.length);
+            if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
+                rc = BAD_VALUE;
+            }
+        } else {
+            ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
+                    MAX_MAKERNOTE_LENGTH);
+            rc = BAD_VALUE;
+        }
+    }
+
     return rc;
 }
 
@@ -13953,6 +14262,25 @@
 }
 
 /*===========================================================================
+* FUNCTION   : getEaselFwVersion
+*
+* DESCRIPTION: Retrieve Easel firmware version
+*
+* PARAMETERS : None
+*
+* RETURN     : string describing Firmware version
+*              "\0" if version is not up to date
+*==========================================================================*/
+const char *QCamera3HardwareInterface::getEaselFwVersion()
+{
+    if (mEaselFwUpdated) {
+        return (const char *)&mEaselFwVersion[0];
+    } else {
+        return NULL;
+    }
+}
+
+/*===========================================================================
  * FUNCTION   : dynamicUpdateMetaStreamInfo
  *
  * DESCRIPTION: This function:
@@ -14584,29 +14912,41 @@
 
     IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
         resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
     }
 
     IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
         String8 str((const char *)gps_methods);
         resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
     }
 
     IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
         resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
     }
 
     IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
         resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
     }
 
     IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
         uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
         resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_QUALITY);
     }
 
     IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
         uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
     }
 
     IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
@@ -14614,79 +14954,232 @@
         fwk_thumb_size[0] = thumb_size->width;
         fwk_thumb_size[1] = thumb_size->height;
         resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
+    } else {
+        resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
     }
 
     IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
         uint8_t fwk_intent = intent[0];
         resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
+    } else {
+        resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
     }
 }
 
+bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
+        const camera3_capture_request_t &request, const CameraMetadata &metadata) {
+    if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
+            metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
+        ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
+         metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
+            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
+        ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
+                metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_EDGE_MODE) ||
+            metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
+        ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
+            metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
+                    ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
+        ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
+            (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
+             metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
+                    ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
+        ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
+            metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
+        ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
+            metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
+                    ANDROID_CONTROL_EFFECT_MODE_OFF) {
+        ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_CONTROL_MODE) ||
+            (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
+             metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
+                    ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
+        ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
+        return false;
+    }
+
+    // TODO (b/66500626): support AE compensation.
+    if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
+            metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
+        ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
+        return false;
+    }
+
+    // TODO (b/32585046): support non-ZSL.
+    if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
+         metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
+        ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
+        return false;
+    }
+
+    // TODO (b/32586081): support flash.
+    if (!metadata.exists(ANDROID_FLASH_MODE) ||
+         metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
+        ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
+        return false;
+    }
+
+    if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
+         metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
+        ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
+        return false;
+    }
+
+    switch (request.output_buffers[0].stream->format) {
+        case HAL_PIXEL_FORMAT_BLOB:
+        case HAL_PIXEL_FORMAT_YCbCr_420_888:
+        case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            break;
+        default:
+            ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
+            for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+                ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
+                        request.output_buffers[0].stream->width,
+                        request.output_buffers[0].stream->height,
+                        request.output_buffers[0].stream->format);
+            }
+            return false;
+    }
+
+    return true;
+}
+
+void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
+    if (hdrPlusRequest == nullptr) return;
+
+    for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
+        // Find the stream for this buffer.
+        for (auto streamInfo : mStreamInfo) {
+            if (streamInfo->id == outputBufferIter.first) {
+                if (streamInfo->channel == mPictureChannel) {
+                    // For picture channel, this buffer is internally allocated so return this
+                    // buffer to picture channel.
+                    mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
+                } else {
+                    // Unregister this buffer for other channels.
+                    streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
+                }
+                break;
+            }
+        }
+    }
+
+    hdrPlusRequest->outputBuffers.clear();
+    hdrPlusRequest->frameworkOutputBuffers.clear();
+}
+
 bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
         HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
         const CameraMetadata &metadata)
 {
     if (hdrPlusRequest == nullptr) return false;
+    if (!isRequestHdrPlusCompatible(request, metadata)) return false;
 
-    // Check noise reduction mode is high quality.
-    if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
-         metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
-            ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
-        ALOGD("%s: Not an HDR+ request: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
-                metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
-        return false;
-    }
-
-    // Check edge mode is high quality.
-    if (!metadata.exists(ANDROID_EDGE_MODE) ||
-         metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
-        ALOGD("%s: Not an HDR+ request: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
-        return false;
-    }
-
-    if (request.num_output_buffers != 1 ||
-            request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
-        ALOGD("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
-        for (uint32_t i = 0; i < request.num_output_buffers; i++) {
-            ALOGD("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
-                    request.output_buffers[0].stream->width,
-                    request.output_buffers[0].stream->height,
-                    request.output_buffers[0].stream->format);
-        }
-        return false;
-    }
-
-    // Get a YUV buffer from pic channel.
-    QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
-    auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
-    status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
-    if (res != OK) {
-        ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-        return false;
-    }
-
-    pbcamera::StreamBuffer buffer;
-    buffer.streamId = kPbYuvOutputStreamId;
-    buffer.dmaBufFd = yuvBuffer->fd;
-    buffer.data = yuvBuffer->buffer;
-    buffer.dataSize = yuvBuffer->frame_len;
-
+    status_t res = OK;
     pbcamera::CaptureRequest pbRequest;
     pbRequest.id = request.frame_number;
-    pbRequest.outputBuffers.push_back(buffer);
+    // Iterate through all requested output buffers and add them to an HDR+ request.
+    for (uint32_t i = 0; i < request.num_output_buffers; i++) {
+        // Find the index of the stream in mStreamInfo.
+        uint32_t pbStreamId = 0;
+        bool found = false;
+        for (auto streamInfo : mStreamInfo) {
+            if (streamInfo->stream == request.output_buffers[i].stream) {
+                pbStreamId = streamInfo->id;
+                found = true;
+                break;
+            }
+        }
+
+        if (!found) {
+            ALOGE("%s: requested stream was not configured.", __FUNCTION__);
+            abortPendingHdrplusRequest(hdrPlusRequest);
+            return false;
+        }
+        auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
+        switch (request.output_buffers[i].stream->format) {
+            case HAL_PIXEL_FORMAT_BLOB:
+            {
+                // For jpeg output, get a YUV buffer from pic channel.
+                QCamera3PicChannel *picChannel =
+                        (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
+                res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
+                if (res != OK) {
+                    ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
+                    abortPendingHdrplusRequest(hdrPlusRequest);
+                    return false;
+                }
+                break;
+            }
+            case HAL_PIXEL_FORMAT_YCbCr_420_888:
+            case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+            {
+                // For YUV output, register the buffer and get the buffer def from the channel.
+                QCamera3ProcessingChannel *channel =
+                        (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
+                res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
+                        outBuffer.get());
+                if (res != OK) {
+                    ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
+                            strerror(-res), res);
+                    abortPendingHdrplusRequest(hdrPlusRequest);
+                    return false;
+                }
+                break;
+            }
+            default:
+                abortPendingHdrplusRequest(hdrPlusRequest);
+                return false;
+        }
+
+        pbcamera::StreamBuffer buffer;
+        buffer.streamId = pbStreamId;
+        buffer.dmaBufFd = outBuffer->fd;
+        buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
+        buffer.dataSize = outBuffer->frame_len;
+
+        pbRequest.outputBuffers.push_back(buffer);
+
+        hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
+        hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
+    }
 
     // Submit an HDR+ capture request to HDR+ service.
-    res = gHdrPlusClient->submitCaptureRequest(&pbRequest);
+    res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
     if (res != OK) {
         ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
                 strerror(-res), res);
+        abortPendingHdrplusRequest(hdrPlusRequest);
         return false;
     }
 
-    hdrPlusRequest->yuvBuffer = yuvBuffer;
-    hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
-
     return true;
 }
 
@@ -14754,6 +15247,13 @@
     return OK;
 }
 
+void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
+{
+    if (gHdrPlusClientOpening) {
+        gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
+    }
+}
+
 void QCamera3HardwareInterface::disableHdrPlusModeLocked()
 {
     // Disable HDR+ mode.
@@ -14769,69 +15269,79 @@
     }
 
     mHdrPlusModeEnabled = false;
-    gHdrPlusClientOpening = false;
     ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
 }
 
+bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
+{
+    // Check that at least one YUV or one JPEG output is configured.
+    // TODO: Support RAW (b/36690506)
+    for (auto streamInfo : mStreamInfo) {
+        if (streamInfo != nullptr && streamInfo->stream != nullptr) {
+            if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
+                    (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
+                     streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+                     streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
+                return true;
+            }
+        }
+    }
+
+    return false;
+}
+
 status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
 {
     pbcamera::InputConfiguration inputConfig;
     std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
     status_t res = OK;
 
-    // Configure HDR+ client streams.
-    // Get input config.
-    if (mHdrPlusRawSrcChannel) {
-        // HDR+ input buffers will be provided by HAL.
-        res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
-                HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
-        if (res != OK) {
-            LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
-            return res;
-        }
+    // Sensor MIPI will send data to Easel.
+    inputConfig.isSensorInput = true;
+    inputConfig.sensorMode.cameraId = mCameraId;
+    inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
+    inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
+    inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
+    inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
+    inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
+    inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
+    inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
 
-        inputConfig.isSensorInput = false;
-    } else {
-        // Sensor MIPI will send data to Easel.
-        inputConfig.isSensorInput = true;
-        inputConfig.sensorMode.cameraId = mCameraId;
-        inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
-        inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
-        inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
-        inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
-        inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
-        inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
-        if (mSensorModeInfo.num_raw_bits != 10) {
-            ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
-                    mSensorModeInfo.num_raw_bits);
-            return BAD_VALUE;
-        }
-
-        inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
+    if (mSensorModeInfo.num_raw_bits != 10) {
+        ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
+                mSensorModeInfo.num_raw_bits);
+        return BAD_VALUE;
     }
 
-    // Get output configurations.
-    // Easel may need to output RAW16 buffers if mRawChannel was created.
-    // TODO: handle RAW16 outputs.
+    inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
 
-    // Easel may need to output YUV output buffers if mPictureChannel was created.
-    pbcamera::StreamConfiguration yuvOutputConfig;
-    if (mPictureChannel != nullptr) {
-        res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
-                HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
-        if (res != OK) {
-            LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
-                __FUNCTION__, strerror(-res), res);
+    // Iterate through configured output streams in HAL and configure those streams in HDR+
+    // service.
+    for (auto streamInfo : mStreamInfo) {
+        pbcamera::StreamConfiguration outputConfig;
+        if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
+            switch (streamInfo->stream->format) {
+                case HAL_PIXEL_FORMAT_BLOB:
+                case HAL_PIXEL_FORMAT_YCbCr_420_888:
+                case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+                    res = fillPbStreamConfig(&outputConfig, streamInfo->id,
+                            streamInfo->channel, /*stream index*/0);
+                    if (res != OK) {
+                        LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
+                            __FUNCTION__, strerror(-res), res);
 
-            return res;
+                        return res;
+                    }
+
+                    outputStreamConfigs.push_back(outputConfig);
+                    break;
+                default:
+                    // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
+                    break;
+            }
         }
-
-        outputStreamConfigs.push_back(yuvOutputConfig);
     }
 
-    // TODO: consider other channels for YUV output buffers.
-
     res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
     if (res != OK) {
         LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
@@ -14842,10 +15352,8 @@
     return OK;
 }
 
-void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
+void QCamera3HardwareInterface::handleEaselFatalError()
 {
-    ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
-    // Set HAL state to error.
     pthread_mutex_lock(&mMutex);
     mState = ERROR;
     pthread_mutex_unlock(&mMutex);
@@ -14853,8 +15361,28 @@
     handleCameraDeviceError(/*stopChannelImmediately*/true);
 }
 
+void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
+{
+    if (mEaselErrorFuture.valid()) {
+        // The error future has been invoked.
+        return;
+    }
+
+    // Launch a future to handle the fatal error.
+    mEaselErrorFuture = std::async(std::launch::async,
+            &QCamera3HardwareInterface::handleEaselFatalError, this);
+}
+
+void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
+{
+    ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
+    handleEaselFatalErrorAsync();
+}
+
 void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
 {
+    int rc = NO_ERROR;
+
     if (client == nullptr) {
         ALOGE("%s: Opened client is null.", __FUNCTION__);
         return;
@@ -14863,7 +15391,7 @@
     logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
     ALOGI("%s: HDR+ client opened.", __FUNCTION__);
 
-    Mutex::Autolock l(gHdrPlusClientLock);
+    std::unique_lock<std::mutex> l(gHdrPlusClientLock);
     if (!gHdrPlusClientOpening) {
         ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
         return;
@@ -14871,6 +15399,7 @@
 
     gHdrPlusClient = std::move(client);
     gHdrPlusClientOpening = false;
+    gHdrPlusClientOpenCond.notify_one();
 
     // Set static metadata.
     status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
@@ -14887,132 +15416,266 @@
     if (res != OK) {
         LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
     }
+
+    // Get Easel firmware version
+    if (EaselManagerClientOpened) {
+        rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
+        if (rc != OK) {
+            ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
+        } else {
+            mEaselFwUpdated = true;
+        }
+    }
 }
 
 void QCamera3HardwareInterface::onOpenFailed(status_t err)
 {
     ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
-    Mutex::Autolock l(gHdrPlusClientLock);
+    std::unique_lock<std::mutex> l(gHdrPlusClientLock);
     gHdrPlusClientOpening = false;
+    gHdrPlusClientOpenCond.notify_one();
 }
 
 void QCamera3HardwareInterface::onFatalError()
 {
-    ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
+    ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
+    handleEaselFatalErrorAsync();
+}
 
-    // Set HAL state to error.
+void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
+{
+    ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
+            __LINE__, requestId, apSensorTimestampNs);
+
+    mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
+}
+
+void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
+{
     pthread_mutex_lock(&mMutex);
-    mState = ERROR;
-    pthread_mutex_unlock(&mMutex);
 
-    handleCameraDeviceError(/*stopChannelImmediately*/true);
+    // Find the pending request for this result metadata.
+    auto requestIter = mPendingRequestsList.begin();
+    while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
+        requestIter++;
+    }
+
+    if (requestIter == mPendingRequestsList.end()) {
+        ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
+        pthread_mutex_unlock(&mMutex);
+        return;
+    }
+
+    requestIter->partial_result_cnt++;
+
+    CameraMetadata metadata;
+    uint8_t ready = true;
+    metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
+
+    // Send it to framework.
+    camera3_capture_result_t result = {};
+
+    result.result = metadata.getAndLock();
+    // Populate metadata result
+    result.frame_number = requestId;
+    result.num_output_buffers = 0;
+    result.output_buffers = NULL;
+    result.partial_result = requestIter->partial_result_cnt;
+
+    orchestrateResult(&result);
+    metadata.unlock(result.result);
+
+    pthread_mutex_unlock(&mMutex);
+}
+
+void QCamera3HardwareInterface::onPostview(uint32_t requestId,
+        std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
+        uint32_t stride, int32_t format)
+{
+    if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
+        ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
+                __LINE__, width, height, requestId);
+        char buf[FILENAME_MAX] = {};
+        snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
+                requestId, width, height);
+
+        pbcamera::StreamConfiguration config = {};
+        config.image.width = width;
+        config.image.height = height;
+        config.image.format = format;
+
+        pbcamera::PlaneConfiguration plane = {};
+        plane.stride = stride;
+        plane.scanline = height;
+
+        config.image.planes.push_back(plane);
+
+        pbcamera::StreamBuffer buffer = {};
+        buffer.streamId = 0;
+        buffer.dmaBufFd = -1;
+        buffer.data = postview->data();
+        buffer.dataSize = postview->size();
+
+        hdrplus_client_utils::writePpm(buf, config, buffer);
+    }
+
+    pthread_mutex_lock(&mMutex);
+
+    // Find the pending request for this result metadata.
+    auto requestIter = mPendingRequestsList.begin();
+    while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
+        requestIter++;
+    }
+
+    if (requestIter == mPendingRequestsList.end()) {
+        ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
+        pthread_mutex_unlock(&mMutex);
+        return;
+    }
+
+    requestIter->partial_result_cnt++;
+
+    CameraMetadata metadata;
+    int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
+            static_cast<int32_t>(stride)};
+    metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
+    metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
+
+    // Send it to framework.
+    camera3_capture_result_t result = {};
+
+    result.result = metadata.getAndLock();
+    // Populate metadata result
+    result.frame_number = requestId;
+    result.num_output_buffers = 0;
+    result.output_buffers = NULL;
+    result.partial_result = requestIter->partial_result_cnt;
+
+    orchestrateResult(&result);
+    metadata.unlock(result.result);
+
+    pthread_mutex_unlock(&mMutex);
 }
 
 void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
         const camera_metadata_t &resultMetadata)
 {
-    if (result != nullptr) {
-        if (result->outputBuffers.size() != 1) {
-            ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
-                result->outputBuffers.size());
-            return;
+    if (result == nullptr) {
+        ALOGE("%s: result is nullptr.", __FUNCTION__);
+        return;
+    }
+
+    // Find the pending HDR+ request.
+    HdrPlusPendingRequest pendingRequest;
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(result->requestId);
+        pendingRequest = req->second;
+    }
+
+    // Update the result metadata with the settings of the HDR+ still capture request because
+    // the result metadata belongs to a ZSL buffer.
+    CameraMetadata metadata;
+    metadata = &resultMetadata;
+    updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
+    camera_metadata_t* updatedResultMetadata = metadata.release();
+
+    uint32_t halSnapshotStreamId = 0;
+    if (mPictureChannel != nullptr) {
+        halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
+    }
+
+    auto halMetadata = std::make_shared<metadata_buffer_t>();
+    clear_metadata_buffer(halMetadata.get());
+
+    // Convert updated result metadata to HAL metadata.
+    status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
+            halSnapshotStreamId, /*minFrameDuration*/0);
+    if (res != 0) {
+        ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
+    }
+
+    for (auto &outputBuffer : result->outputBuffers) {
+        uint32_t streamId = outputBuffer.streamId;
+
+        // Find the framework output buffer in the pending request.
+        auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+        if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+            ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+                    streamId);
+            continue;
         }
 
-        if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
-            ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
-                result->outputBuffers[0].streamId);
-            return;
+        camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+        // Find the channel for the output buffer.
+        QCamera3ProcessingChannel *channel =
+                (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+        // Find the output buffer def.
+        auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+        if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+            ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+            continue;
         }
 
-        // Find the pending HDR+ request.
-        HdrPlusPendingRequest pendingRequest;
-        {
-            Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-            auto req = mHdrPlusPendingRequests.find(result->requestId);
-            pendingRequest = req->second;
-        }
+        std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
 
-        // Update the result metadata with the settings of the HDR+ still capture request because
-        // the result metadata belongs to a ZSL buffer.
-        CameraMetadata metadata;
-        metadata = &resultMetadata;
-        updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
-        camera_metadata_t* updatedResultMetadata = metadata.release();
+        // Check whether to dump the buffer.
+        if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
+                frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
+            // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
+            char prop[PROPERTY_VALUE_MAX];
+            property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
+            bool dumpYuvOutput = atoi(prop);
 
-        QCamera3PicChannel *picChannel =
-            (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
+            if (dumpYuvOutput) {
+                // Dump yuv buffer to a ppm file.
+                pbcamera::StreamConfiguration outputConfig;
+                status_t rc = fillPbStreamConfig(&outputConfig, streamId,
+                        channel, /*stream index*/0);
+                if (rc == OK) {
+                    char buf[FILENAME_MAX] = {};
+                    snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
+                            result->requestId, streamId,
+                            outputConfig.image.width, outputConfig.image.height);
 
-        // Check if dumping HDR+ YUV output is enabled.
-        char prop[PROPERTY_VALUE_MAX];
-        property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
-        bool dumpYuvOutput = atoi(prop);
-
-        if (dumpYuvOutput) {
-            // Dump yuv buffer to a ppm file.
-            pbcamera::StreamConfiguration outputConfig;
-            status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
-                    HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
-            if (rc == OK) {
-                char buf[FILENAME_MAX] = {};
-                snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
-                        result->requestId, result->outputBuffers[0].streamId,
-                        outputConfig.image.width, outputConfig.image.height);
-
-                hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
-            } else {
-                LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
-                        __FUNCTION__, strerror(-rc), rc);
+                    hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
+                } else {
+                    LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
+                            "%s (%d).", __FUNCTION__, strerror(-rc), rc);
+                }
             }
         }
 
-        uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
-        auto halMetadata = std::make_shared<metadata_buffer_t>();
-        clear_metadata_buffer(halMetadata.get());
-
-        // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
-        // encoding.
-        status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
-                halStreamId, /*minFrameDuration*/0);
-        if (res == OK) {
+        if (channel == mPictureChannel) {
             // Return the buffer to pic channel for encoding.
-            picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
-                    pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
+            mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
+                    frameworkOutputBuffer->buffer, result->requestId,
                     halMetadata);
         } else {
-            // Return the buffer without encoding.
-            // TODO: This should not happen but we may want to report an error buffer to camera
-            // service.
-            picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
-            ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
-                    strerror(-res), res);
-        }
-
-        // Find the timestamp
-        camera_metadata_ro_entry_t entry;
-        res = find_camera_metadata_ro_entry(updatedResultMetadata,
-                ANDROID_SENSOR_TIMESTAMP, &entry);
-        if (res != OK) {
-            ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
-                    __FUNCTION__, result->requestId, strerror(-res), res);
-        } else {
-            mShutterDispatcher.markShutterReady(result->requestId, entry.data.i64[0]);
-        }
-
-        // Send HDR+ metadata to framework.
-        {
+            // Return the buffer to camera framework.
             pthread_mutex_lock(&mMutex);
-
-            // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
-            handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+            handleBufferWithLock(frameworkOutputBuffer, result->requestId);
+            channel->unregisterBuffer(outputBufferDef.get());
             pthread_mutex_unlock(&mMutex);
         }
+    }
 
-        // Remove the HDR+ pending request.
-        {
-            Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-            auto req = mHdrPlusPendingRequests.find(result->requestId);
-            mHdrPlusPendingRequests.erase(req);
-        }
+    // Send HDR+ metadata to framework.
+    {
+        pthread_mutex_lock(&mMutex);
+
+        // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
+        handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
+        pthread_mutex_unlock(&mMutex);
+    }
+
+    // Remove the HDR+ pending request.
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(result->requestId);
+        mHdrPlusPendingRequests.erase(req);
     }
 }
 
@@ -15025,17 +15688,58 @@
 
     ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
 
-    // Remove the pending HDR+ request.
+    // Find the pending HDR+ request.
+    HdrPlusPendingRequest pendingRequest;
     {
         Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
-        auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
+        auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+        if (req == mHdrPlusPendingRequests.end()) {
+            ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
+            return;
+        }
+        pendingRequest = req->second;
+    }
 
-        // Return the buffer to pic channel.
-        QCamera3PicChannel *picChannel =
-                (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
-        picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
+    for (auto &outputBuffer : failedResult->outputBuffers) {
+        uint32_t streamId = outputBuffer.streamId;
 
-        mHdrPlusPendingRequests.erase(pendingRequest);
+        // Find the channel
+        // Find the framework output buffer in the pending request.
+        auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
+        if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
+            ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
+                    streamId);
+            continue;
+        }
+
+        camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
+
+        // Find the channel for the output buffer.
+        QCamera3ProcessingChannel *channel =
+                (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
+
+        // Find the output buffer def.
+        auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
+        if (outputBufferIter == pendingRequest.outputBuffers.end()) {
+            ALOGE("%s: Cannot find output buffer", __FUNCTION__);
+            continue;
+        }
+
+        std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
+
+        if (channel == mPictureChannel) {
+            // Return the buffer to pic channel.
+            mPictureChannel->returnYuvBuffer(outputBufferDef.get());
+        } else {
+            channel->unregisterBuffer(outputBufferDef.get());
+        }
+    }
+
+    // Remove the HDR+ pending request.
+    {
+        Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
+        auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
+        mHdrPlusPendingRequests.erase(req);
     }
 
     pthread_mutex_lock(&mMutex);
@@ -15132,6 +15836,11 @@
         shutters = &mShutters;
     }
 
+    if (shutter->second.ready) {
+        // If shutter is already ready, don't update timestamp again.
+        return;
+    }
+
     // Make this frame's shutter ready.
     shutter->second.ready = true;
     shutter->second.timestamp = timestamp;
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.h b/msm8998/QCamera2/HAL3/QCamera3HWI.h
index 9ef806f..4eaf8a8 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.h
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.h
@@ -108,6 +108,7 @@
     stream_status_t status;
     int registered;
     QCamera3ProcessingChannel *channel;
+    uint32_t id; // unique ID
 } stream_info_t;
 
 typedef struct {
@@ -124,6 +125,7 @@
     uint32_t frame_number;
     // Time when request queued into system
     nsecs_t timestamp;
+    nsecs_t av_timestamp;
     List<PendingBufferInfo> mPendingBufferList;
     bool hdrplus;
 } PendingBuffersInRequest;
@@ -343,6 +345,7 @@
             char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH]);
     const char *getEepromVersionInfo();
     const uint32_t *getLdafCalib();
+    const char *getEaselFwVersion();
     void get3AVersion(cam_q3a_version_t &swVersion);
     static void setBufferErrorStatus(QCamera3Channel*, uint32_t frameNumber,
             camera3_buffer_status_t err, void *userdata);
@@ -430,7 +433,7 @@
     // Handle pending results when a new result metadata of a frame is received.
     // metadata callbacks are invoked in the order of frame number.
     void handlePendingResultMetadataWithLock(uint32_t frameNumber,
-            const camera_metadata_t *resultMetadata);
+            camera_metadata_t *resultMetadata);
     // Going through pending request list and send out result metadata for requests
     // that are ready.
     // frameNumber is the lastest frame whose result metadata is ready.
@@ -503,6 +506,9 @@
             T output[BLACK_LEVEL_PATTERN_CNT],
             cam_color_filter_arrangement_t color_arrangement);
 
+    int32_t startChannelLocked();
+    void stopChannelLocked(bool stopChannelImmediately);
+
     camera3_device_t   mCameraDevice;
     uint32_t           mCameraId;
     mm_camera_vtbl_t  *mCameraHandle;
@@ -543,6 +549,7 @@
     bool m_bEisEnable;
     bool m_bEis3PropertyEnabled;
     bool m_bEisSupported;
+    bool m_bAVTimerEnabled;
     typedef struct {
         cam_dimension_t dim;
         int format;
@@ -599,6 +606,9 @@
 
         bool enableZsl; // If ZSL is enabled.
         bool hdrplus; // If this is an HDR+ request.
+        uint8_t requestedLensShadingMapMode; // Lens shading map mode for this request.
+        uint8_t requestedFaceDetectMode; // Face detect mode for this request.
+        bool partialResultDropped; // Whether partial metadata is dropped.
     } PendingRequestInfo;
     typedef struct {
         uint32_t frame_number;
@@ -650,7 +660,6 @@
 
     uint8_t mCaptureIntent;
     uint8_t mCacMode;
-    uint8_t mHybridAeEnable;
     // DevCamDebug metadata internal variable
     uint8_t mDevCamDebugMetaEnable;
     /* DevCamDebug metadata end */
@@ -687,13 +696,19 @@
     uint8_t mInstantAecFrameIdxCount;
     /* sensor output size with current stream configuration */
     QCamera3CropRegionMapper mCropRegionMapper;
+    // Last lens shading map mode framework requsted.
+    uint8_t mLastRequestedLensShadingMapMode;
+    // Last face detect mode framework requsted.
+    uint8_t mLastRequestedFaceDetectMode;
 
     cam_feature_mask_t mCurrFeatureState;
     /* Ldaf calibration data */
     bool mLdafCalibExist;
     uint32_t mLdafCalib[2];
     int32_t mLastCustIntentFrmNum;
-
+    // Easel firmware version
+    char mEaselFwVersion[FW_VER_SIZE];
+    bool mEaselFwUpdated;
     static const QCameraMap<camera_metadata_enum_android_control_effect_mode_t,
             cam_effect_mode_type> EFFECT_MODES_MAP[];
     static const QCameraMap<camera_metadata_enum_android_control_awb_mode_t,
@@ -737,6 +752,11 @@
     static const QCameraPropMap CDS_MAP[];
 
     pendingRequestIterator erasePendingRequest(pendingRequestIterator i);
+
+    // Remove unrequested metadata due to Easel HDR+.
+    void removeUnrequestedMetadata(pendingRequestIterator requestIter,
+            camera_metadata_t *resultMetadata);
+
     //GPU library to read buffer padding details.
     void *lib_surface_utils;
     int (*LINK_get_surface_pixel_alignment)();
@@ -765,23 +785,17 @@
     Mutex mFlushLock;
     bool m60HzZone;
 
-    // Stream IDs used in stream configuration with HDR+ client.
-    const static uint32_t kPbRaw10InputStreamId = 0;
-    const static uint32_t kPbYuvOutputStreamId = 1;
-    const static uint32_t kPbRaw16OutputStreamId = 2;
-
     // Issue an additional RAW for every 10 requests to control RAW capture rate. Requesting RAW
     // too often will cause frame drops due to latency of sending RAW to HDR+ service.
     const static uint32_t kHdrPlusRawPeriod = 10;
 
     // Define a pending HDR+ request submitted to HDR+ service and not yet received by HAL.
     struct HdrPlusPendingRequest {
-        // YUV buffer from QCamera3PicChannel to be filled by HDR+ client with an HDR+ processed
-        // frame.
-        std::shared_ptr<mm_camera_buf_def_t> yuvBuffer;
+        // HDR+ stream ID -> output buffer to be filled by HDR+ client with an HDR+ processed frame.
+        std::map<uint32_t, std::shared_ptr<mm_camera_buf_def_t>> outputBuffers;
 
-        // Output buffers in camera framework's request.
-        std::vector<camera3_stream_buffer_t> frameworkOutputBuffers;
+        // HDR+ stream ID -> output buffers in camera framework's request.
+        std::map<uint32_t, camera3_stream_buffer_t> frameworkOutputBuffers;
 
         // Settings in camera framework's request.
         std::shared_ptr<metadata_buffer_t> settings;
@@ -789,7 +803,7 @@
 
     // Fill pbcamera::StreamConfiguration based on the channel stream.
     status_t fillPbStreamConfig(pbcamera::StreamConfiguration *config, uint32_t pbStreamId,
-            int pbStreamFormat, QCamera3Channel *channel, uint32_t streamIndex);
+            QCamera3Channel *channel, uint32_t streamIndex);
 
     // Open HDR+ client asynchronously.
     status_t openHdrPlusClientAsyncLocked();
@@ -800,6 +814,13 @@
     // Disable HDR+ mode. Easel will stop capturing ZSL buffers.
     void disableHdrPlusModeLocked();
 
+    // Return if current session with configured streams is compatible with HDR+ mode.
+    bool isSessionHdrPlusModeCompatible();
+
+    // Return if the request is compatible with HDR+.
+    bool isRequestHdrPlusCompatible(
+            const camera3_capture_request_t &request, const CameraMetadata &metadata);
+
     // Configure streams for HDR+.
     status_t configureHdrPlusStreamsLocked();
 
@@ -809,10 +830,23 @@
     bool trySubmittingHdrPlusRequestLocked(HdrPlusPendingRequest *hdrPlusRequest,
         const camera3_capture_request_t &request, const CameraMetadata &metadata);
 
+    // Abort an HDR+ request that was not submitted successfully in
+    // trySubmittingHdrPlusRequestLocked.
+    void abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest);
+
     // Update HDR+ result metadata with the still capture's request settings.
     void updateHdrPlusResultMetadata(CameraMetadata &resultMetadata,
             std::shared_ptr<metadata_buffer_t> settings);
 
+    // Wait until opening HDR+ client completes if it's being opened.
+    void finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock);
+
+    // Handle Easel error asynchronuously in another thread.
+    void handleEaselFatalErrorAsync();
+
+    // Handle Easel error.
+    void handleEaselFatalError();
+
     // Easel manager client callbacks.
     void onEaselFatalError(std::string errMsg);
 
@@ -823,6 +857,10 @@
     void onCaptureResult(pbcamera::CaptureResult *result,
             const camera_metadata_t &resultMetadata) override;
     void onFailedCaptureResult(pbcamera::CaptureResult *failedResult) override;
+    void onShutter(uint32_t requestId, int64_t apSensorTimestampNs) override;
+    void onNextCaptureReady(uint32_t requestId) override;
+    void onPostview(uint32_t requestId, std::unique_ptr<std::vector<uint8_t>> postview,
+            uint32_t width, uint32_t height, uint32_t stride, int32_t format) override;
 
     nsecs_t calculateMaxExpectedDuration(const camera_metadata_t *request);
     void getExpectedFrameDuration(const camera_metadata_t *request, nsecs_t *frameDuration);
@@ -837,6 +875,9 @@
     // If ZSL is enabled (android.control.enableZsl).
     bool mZslEnabled;
 
+    // If Easel MIPI has been started.
+    bool mEaselMipiStarted;
+
     // If HAL provides RAW input buffers to Easel. This is just for prototyping.
     bool mIsApInputUsedForHdrPlus;
 
@@ -849,6 +890,10 @@
     bool m_bSensorHDREnabled;
 
     cam_trigger_t mAfTrigger;
+
+    int32_t mSceneDistance;
+
+    std::future<void> mEaselErrorFuture;
 };
 
 }; // namespace qcamera
diff --git a/msm8998/QCamera2/HAL3/QCamera3Mem.cpp b/msm8998/QCamera2/HAL3/QCamera3Mem.cpp
index c7b39d0..9cccf32 100755
--- a/msm8998/QCamera2/HAL3/QCamera3Mem.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Mem.cpp
@@ -218,13 +218,14 @@
  *   @offset  : [input] frame buffer offset
  *   @bufDef  : [output] reference to struct to store buffer definition
  *   @index   : [input] index of the buffer
+ *   @virtualAddr : [input] Whether to fill out virtual address
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
 int32_t QCamera3Memory::getBufDef(const cam_frame_len_offset_t &offset,
-        mm_camera_buf_def_t &bufDef, uint32_t index)
+        mm_camera_buf_def_t &bufDef, uint32_t index, bool virtualAddr)
 {
     Mutex::Autolock lock(mLock);
 
@@ -236,7 +237,7 @@
     bufDef.fd = mMemInfo[index].fd;
     bufDef.frame_len = mMemInfo[index].size;
     bufDef.mem_info = (void *)this;
-    bufDef.buffer = getPtrLocked(index);
+    bufDef.buffer = virtualAddr ? getPtrLocked(index) : nullptr;
     bufDef.planes_buf.num_planes = (int8_t)offset.num_planes;
     bufDef.buf_idx = (uint8_t)index;
 
@@ -302,7 +303,7 @@
  *              none-zero failure code
  *==========================================================================*/
 int QCamera3HeapMemory::allocOneBuffer(QCamera3MemInfo &memInfo,
-        unsigned int heap_id, size_t size)
+        unsigned int heap_id, size_t size, bool isCached)
 {
     int rc = OK;
     struct ion_handle_data handle_data;
@@ -319,7 +320,9 @@
     /* to make it page size aligned */
     allocData.len = (allocData.len + 4095U) & (~4095U);
     allocData.align = 4096;
-    allocData.flags = ION_FLAG_CACHED;
+    if (isCached) {
+        allocData.flags = ION_FLAG_CACHED;
+    }
     allocData.heap_id_mask = heap_id;
     rc = ioctl(main_ion_fd, ION_IOC_ALLOC, &allocData);
     if (rc < 0) {
@@ -616,7 +619,7 @@
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
-int QCamera3HeapMemory::allocateOne(size_t size)
+int QCamera3HeapMemory::allocateOne(size_t size, bool isCached)
 {
     unsigned int heap_id_mask = 0x1 << ION_IOMMU_HEAP_ID;
     int rc = NO_ERROR;
@@ -630,7 +633,7 @@
         return BAD_INDEX;
     }
 
-    rc = allocOneBuffer(mMemInfo[mBufferCount], heap_id_mask, size);
+    rc = allocOneBuffer(mMemInfo[mBufferCount], heap_id_mask, size, isCached);
     if (rc < 0) {
         LOGE("AllocateIonMemory failed");
         return NO_MEMORY;
@@ -745,6 +748,7 @@
     for (int i = 0; i < MM_CAMERA_MAX_NUM_FRAMES; i ++) {
         mBufferHandle[i] = NULL;
         mPrivateHandle[i] = NULL;
+        mPtr[i] = nullptr;
     }
 }
 
@@ -779,7 +783,6 @@
 {
     status_t ret = NO_ERROR;
     struct ion_fd_data ion_info_fd;
-    void *vaddr = NULL;
     int32_t colorSpace = ITU_R_601_FR;
     int32_t idx = -1;
 
@@ -833,18 +836,7 @@
             mPrivateHandle[idx]->size;
     mMemInfo[idx].handle = ion_info_fd.handle;
 
-    vaddr = mmap(NULL,
-            mMemInfo[idx].size,
-            PROT_READ | PROT_WRITE,
-            MAP_SHARED,
-            mMemInfo[idx].fd, 0);
-    if (vaddr == MAP_FAILED) {
-        mMemInfo[idx].handle = 0;
-        ret = NO_MEMORY;
-    } else {
-        mPtr[idx] = vaddr;
-        mBufferCount++;
-    }
+    mBufferCount++;
 
 end:
     LOGD("X ");
@@ -865,8 +857,10 @@
  *==========================================================================*/
 int32_t QCamera3GrallocMemory::unregisterBufferLocked(size_t idx)
 {
-    munmap(mPtr[idx], mMemInfo[idx].size);
-    mPtr[idx] = NULL;
+    if (mPtr[idx] != nullptr) {
+        munmap(mPtr[idx], mMemInfo[idx].size);
+        mPtr[idx] = nullptr;
+    }
 
     struct ion_handle_data ion_handle;
     memset(&ion_handle, 0, sizeof(ion_handle));
@@ -1230,6 +1224,23 @@
         return NULL;
     }
 
+    if (mPtr[index] == nullptr) {
+        void *vaddr = NULL;
+        vaddr = mmap(NULL,
+                mMemInfo[index].size,
+                PROT_READ | PROT_WRITE,
+                MAP_SHARED,
+                mMemInfo[index].fd, 0);
+
+        if (vaddr == MAP_FAILED) {
+            LOGE("mmap failed for buffer index %d, size %d: %s(%d)",
+                    index, mMemInfo[index].size, strerror(errno), errno);
+            return NULL;
+        } else {
+            mPtr[index] = vaddr;
+        }
+    }
+
     return mPtr[index];
 }
 
diff --git a/msm8998/QCamera2/HAL3/QCamera3Mem.h b/msm8998/QCamera2/HAL3/QCamera3Mem.h
index 00a4ee3..eb1ac6f 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Mem.h
+++ b/msm8998/QCamera2/HAL3/QCamera3Mem.h
@@ -78,7 +78,7 @@
     virtual ~QCamera3Memory();
 
     int32_t getBufDef(const cam_frame_len_offset_t &offset,
-            mm_camera_buf_def_t &bufDef, uint32_t index);
+            mm_camera_buf_def_t &bufDef, uint32_t index, bool virtualAddr);
 
 protected:
     struct QCamera3MemInfo {
@@ -107,7 +107,7 @@
     virtual ~QCamera3HeapMemory();
 
     int allocate(size_t size);
-    int allocateOne(size_t size);
+    int allocateOne(size_t size, bool isCached = true);
     void deallocate();
 
     virtual int cacheOps(uint32_t index, unsigned int cmd);
@@ -123,7 +123,7 @@
     virtual void *getPtrLocked(uint32_t index);
 private:
     int allocOneBuffer(struct QCamera3MemInfo &memInfo,
-            unsigned int heap_id, size_t size);
+            unsigned int heap_id, size_t size, bool isCached = true);
     void deallocOneBuffer(struct QCamera3MemInfo &memInfo);
     uint32_t mMaxCnt;
 };
diff --git a/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp b/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
index 4c31bba..82925aa 100644
--- a/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -1774,6 +1774,19 @@
     //TBD_later - Zoom event removed in stream
     //main_stream->getCropInfo(crop);
 
+    // Make sure crop region has the same aspect ratio as dst_dim
+    if (src_dim.width * dst_dim.height > src_dim.height * dst_dim.width) {
+        crop.height = src_dim.height;
+        crop.width = crop.height * dst_dim.width / dst_dim.height;
+        crop.left = (src_dim.width - crop.width) / 2;
+        crop.top = 0;
+    } else {
+        crop.width = src_dim.width;
+        crop.height = crop.width * dst_dim.height / dst_dim.width;
+        crop.left = 0;
+        crop.top = (src_dim.height - crop.height) / 2;
+    }
+
     // Set main dim job parameters and handle rotation
     if (!needJpegExifRotation && (jpeg_settings->jpeg_orientation == 90 ||
             jpeg_settings->jpeg_orientation == 270)) {
diff --git a/msm8998/QCamera2/HAL3/QCamera3Stream.cpp b/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
index 3cb2298..85887fd 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Stream.cpp
@@ -278,7 +278,8 @@
                              uint32_t chId,
                              mm_camera_ops_t *camOps,
                              cam_padding_info_t *paddingInfo,
-                             QCamera3Channel *channel) :
+                             QCamera3Channel *channel,
+                             bool mapStreamBuffers) :
         mCamHandle(camHandle),
         mChannelHandle(chId),
         mHandle(0),
@@ -300,7 +301,8 @@
         mCurrentBatchBufDef(NULL),
         mBufsStaged(0),
         mFreeBatchBufQ(NULL, this),
-        mNRMode(0)
+        mNRMode(0),
+        mMapStreamBuffers(mapStreamBuffers)
 {
     mMemVtbl.user_data = this;
     mMemVtbl.get_bufs = get_bufs;
@@ -712,10 +714,10 @@
             break;
         case CAMERA_CMD_TYPE_EXIT:
             LOGH("Exit");
+            pme->flushFreeBatchBufQ();
             /* flush data buf queue */
             pme->mDataQ.flush();
             pme->mTimeoutFrameQ.clear();
-            pme->flushFreeBatchBufQ();
             running = 0;
             break;
         default:
@@ -780,15 +782,17 @@
 
         if (BAD_INDEX != bufSize) {
             LOGD("Map streamBufIdx: %d", index);
+            void* buffer = (mMapStreamBuffers ?
+                            mStreamBufs->getPtr(index) : NULL);
             rc = mMemOps->map_ops(index, -1, mStreamBufs->getFd(index),
-                    (size_t)bufSize, mStreamBufs->getPtr(index),
+                    (size_t)bufSize, buffer,
                     CAM_MAPPING_BUF_TYPE_STREAM_BUF, mMemOps->userdata);
             if (rc < 0) {
                 LOGE("Failed to map camera buffer %d", index);
                 return rc;
             }
 
-            rc = mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index);
+            rc = mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[index], index, mMapStreamBuffers);
             if (NO_ERROR != rc) {
                 LOGE("Couldn't find camera buffer definition");
                 mMemOps->unmap_ops(index, -1, CAM_MAPPING_BUF_TYPE_STREAM_BUF, mMemOps->userdata);
@@ -947,8 +951,10 @@
         if (mStreamBufs->valid(i)) {
             ssize_t bufSize = mStreamBufs->getSize(i);
             if (BAD_INDEX != bufSize) {
+                void* buffer = (mMapStreamBuffers ?
+                        mStreamBufs->getPtr(i) : NULL);
                 rc = ops_tbl->map_ops(i, -1, mStreamBufs->getFd(i),
-                        (size_t)bufSize, mStreamBufs->getPtr(i),
+                        (size_t)bufSize, buffer,
                         CAM_MAPPING_BUF_TYPE_STREAM_BUF,
                         ops_tbl->userdata);
                 if (rc < 0) {
@@ -999,7 +1005,7 @@
     memset(mBufDefs, 0, mNumBufs * sizeof(mm_camera_buf_def_t));
     for (uint32_t i = 0; i < mNumBufs; i++) {
         if (mStreamBufs->valid(i)) {
-            mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i);
+            mStreamBufs->getBufDef(mFrameLenOffset, mBufDefs[i], i, mMapStreamBuffers);
         }
     }
 
@@ -1389,8 +1395,9 @@
         if (mNumBatchBufs) {
             //For USER_BUF, size = number_of_container bufs instead of the total
             //buf size
+            void* buffer = (mMapStreamBuffers ? mStreamBufs->getPtr(i) : NULL);
             rc = ops_tbl->map_ops(i, -1, mStreamBatchBufs->getFd(i),
-                    (size_t)mNumBatchBufs, mStreamBatchBufs->getPtr(i),
+                    (size_t)mNumBatchBufs, buffer,
                     CAM_MAPPING_BUF_TYPE_STREAM_USER_BUF,
                     ops_tbl->userdata);
             if (rc < 0) {
diff --git a/msm8998/QCamera2/HAL3/QCamera3Stream.h b/msm8998/QCamera2/HAL3/QCamera3Stream.h
index e61f842..2fbe444 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Stream.h
+++ b/msm8998/QCamera2/HAL3/QCamera3Stream.h
@@ -59,7 +59,8 @@
                   uint32_t chId,
                   mm_camera_ops_t *camOps,
                   cam_padding_info_t *paddingInfo,
-                  QCamera3Channel *channel);
+                  QCamera3Channel *channel,
+                  bool mapStreamBuffers);
     virtual ~QCamera3Stream();
     virtual int32_t init(cam_stream_type_t streamType,
                          cam_format_t streamFormat,
@@ -137,6 +138,8 @@
     QCameraQueue mFreeBatchBufQ; //Buffer queue containing empty batch buffers
     uint8_t mNRMode; // Initial noise reduction mode
 
+    bool mMapStreamBuffers; // Whether to mmap every stream buffers
+
     static int32_t get_bufs(
                      cam_frame_len_offset_t *offset,
                      uint8_t *num_bufs,
diff --git a/msm8998/QCamera2/HAL3/QCamera3StreamMem.cpp b/msm8998/QCamera2/HAL3/QCamera3StreamMem.cpp
index 7c83c68..e307539 100644
--- a/msm8998/QCamera2/HAL3/QCamera3StreamMem.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3StreamMem.cpp
@@ -226,20 +226,21 @@
  *   @offset  : [input] frame buffer offset
  *   @bufDef  : [output] reference to struct to store buffer definition
  *   @index   : [input] index of the buffer
+ *   @virtualAddr : [input] whether to fill out virtual address
  *
  * RETURN     : int32_t type of status
  *              NO_ERROR  -- success
  *              none-zero failure code
  *==========================================================================*/
 int32_t QCamera3StreamMem::getBufDef(const cam_frame_len_offset_t &offset,
-        mm_camera_buf_def_t &bufDef, uint32_t index)
+        mm_camera_buf_def_t &bufDef, uint32_t index, bool virtualAddr)
 {
     int32_t ret = NO_ERROR;
 
     if (index < mMaxHeapBuffers)
-        ret = mHeapMem.getBufDef(offset, bufDef, index);
+        ret = mHeapMem.getBufDef(offset, bufDef, index, virtualAddr);
     else
-        ret = mGrallocMem.getBufDef(offset, bufDef, index);
+        ret = mGrallocMem.getBufDef(offset, bufDef, index, virtualAddr);
 
     bufDef.mem_info = (void *)this;
 
@@ -394,10 +395,10 @@
     return mHeapMem.allocate(size);
 }
 
-int QCamera3StreamMem::allocateOne(size_t size)
+int QCamera3StreamMem::allocateOne(size_t size, bool isCached)
 {
     Mutex::Autolock lock(mLock);
-    return mHeapMem.allocateOne(size);
+    return mHeapMem.allocateOne(size, isCached);
 }
 
 /*===========================================================================
diff --git a/msm8998/QCamera2/HAL3/QCamera3StreamMem.h b/msm8998/QCamera2/HAL3/QCamera3StreamMem.h
index ae1898c..263d798 100644
--- a/msm8998/QCamera2/HAL3/QCamera3StreamMem.h
+++ b/msm8998/QCamera2/HAL3/QCamera3StreamMem.h
@@ -59,7 +59,8 @@
     int cleanInvalidateCache(uint32_t index);
     int cleanCache(uint32_t index);
     int32_t getBufDef(const cam_frame_len_offset_t &offset,
-            mm_camera_buf_def_t &bufDef, uint32_t index);
+            mm_camera_buf_def_t &bufDef, uint32_t index,
+            bool virtualAddr);
     void *getPtr(uint32_t index);
 
     bool valid(uint32_t index);
@@ -73,7 +74,7 @@
 
     // Heap buffer related functions
     int allocateAll(size_t size);
-    int allocateOne(size_t size);
+    int allocateOne(size_t size, bool isCached = true);
     void deallocate(); //TODO: replace with unified clear() function?
 
     // Clear function: unregister for gralloc buffer, and deallocate for heap buffer
diff --git a/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp b/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
index cd54f89..6db2cbe 100644
--- a/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
@@ -350,6 +350,9 @@
     { "stats.histogramBucketCount", TYPE_INT32 },
     { "stats.histogram", TYPE_INT32 },
     { "sensorEepromInfo", TYPE_BYTE },
+    { "sensorEepromPDAFRightGains", TYPE_BYTE },
+    { "sensorEepromPDAFLeftGains", TYPE_BYTE },
+    { "sensorEepromPDAFConvCoeff", TYPE_BYTE },
     { "control.tracking_af_trigger", TYPE_BYTE },
     { "control.af_regions_confidence", TYPE_INT32 },
     { "stats.ois_frame_timestamp_vsync", TYPE_INT64 },
@@ -357,9 +360,19 @@
     { "stats.ois_timestamps_boottime", TYPE_INT64 },
     { "stats.ois_shift_x", TYPE_INT32 },
     { "stats.ois_shift_y", TYPE_INT32 },
+    { "stats.ois_shift_pixel_x", TYPE_FLOAT },
+    { "stats.ois_shift_pixel_y", TYPE_FLOAT },
     { "sensor.pd_data_dimensions", TYPE_INT32},
     { "sensor.pd_data_enable", TYPE_BYTE},
     { "control.exposure_time_boost", TYPE_FLOAT},
+    { "request.makernote", TYPE_BYTE },
+    { "request.next_still_intent_request_ready", TYPE_BYTE },
+    { "request.postview", TYPE_INT32},
+    { "request.postview_config", TYPE_INT32},
+    { "request.postview_data", TYPE_BYTE},
+    { "request.continuous_zsl_capture", TYPE_INT32},
+    { "request.disable_hdrplus", TYPE_INT32},
+    { "control.scene_distance", TYPE_INT32},
 };
 
 vendor_tag_info_t tango_mode_data[TANGO_MODE_DATA_END -
@@ -586,6 +599,9 @@
     (uint32_t)NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
@@ -593,9 +609,19 @@
     (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
     (uint32_t)NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_POSTVIEW,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS,
+    (uint32_t)NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
 
     //TANGO_MODE
     (uint32_t)TANGO_MODE_DATA_SENSOR_FULLFOV,
diff --git a/msm8998/QCamera2/HAL3/QCamera3VendorTags.h b/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
index 087eeeb..e940ff3 100644
--- a/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
+++ b/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
@@ -32,6 +32,7 @@
 
 // Camera dependencies
 #include "system/camera_metadata.h"
+#include "system/camera_vendor_tags.h"
 
 namespace qcamera {
 
@@ -427,6 +428,11 @@
     /* EEPROM Version Information */
     NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
 
+    /* EEPROM PDAF calibration data */
+    NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
+    NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
+    NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
+
     /* Tracking AF */
     NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
     NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
@@ -436,11 +442,23 @@
     NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
     NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
     NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
+    NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
+    NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
     /* PD plumbing */
     NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
     NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
     /* Exposure time boost */
     NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
+    /* EASEL HDR+ */
+    NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
+    NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY,
+    NEXUS_EXPERIMENTAL_2017_POSTVIEW,
+    NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG,
+    NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA,
+    NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE,
+    NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS,
+    NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
+
     NEXUS_EXPERIMENTAL_2017_END,
 
     /* Select sensor mode for tango */
diff --git a/msm8998/QCamera2/stack/common/cam_intf.h b/msm8998/QCamera2/stack/common/cam_intf.h
index 9f07f31..f722915 100644
--- a/msm8998/QCamera2/stack/common/cam_intf.h
+++ b/msm8998/QCamera2/stack/common/cam_intf.h
@@ -651,6 +651,9 @@
 
     /* Whether camera timestamp is calibrated with sensor */
     uint8_t timestamp_calibrated;
+
+    /*PDAF calibration data*/
+    cam_pd_calibration_t pdaf_cal;
 } cam_capability_t;
 
 typedef enum {
@@ -1148,8 +1151,8 @@
     INCLUDE(CAM_INTF_META_DEV_CAM_ENABLE,               uint8_t,                     1);
     /* DevCamDebug metadata CAM_INTF.H AF */
     INCLUDE(CAM_INTF_META_DEV_CAM_AF_LENS_POSITION,     int32_t,                     1);
-    INCLUDE(CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE,    int32_t,                     1);
-    INCLUDE(CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE,      int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_TOF_CONFIDENCE,            int32_t,                     1);
+    INCLUDE(CAM_INTF_META_AF_TOF_DISTANCE,              int32_t,                     1);
     INCLUDE(CAM_INTF_META_DEV_CAM_AF_LUMA,                    int32_t,               1);
     INCLUDE(CAM_INTF_META_DEV_CAM_AF_HAF_STATE,               int32_t,               1);
     INCLUDE(CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, int32_t,               1);
@@ -1222,7 +1225,7 @@
     INCLUDE(CAM_INTF_META_SENSOR_MODE_FULLFOV,          int32_t,                     1);
     INCLUDE(CAM_INTF_META_EARLY_AF_STATE,               uint32_t,                    1);
     INCLUDE(CAM_INTF_META_EXP_TIME_BOOST,               float,                       1);
-
+    INCLUDE(CAM_INTF_META_MAKERNOTE,                    cam_makernote_t,             1);
 } metadata_data_t;
 
 /* Update clear_metadata_buffer() function when a new is_xxx_valid is added to
diff --git a/msm8998/QCamera2/stack/common/cam_types.h b/msm8998/QCamera2/stack/common/cam_types.h
index 74d4a5e..82c5f2c 100644
--- a/msm8998/QCamera2/stack/common/cam_types.h
+++ b/msm8998/QCamera2/stack/common/cam_types.h
@@ -45,10 +45,14 @@
 #define BHIST_STATS_DEBUG_DATA_SIZE       (70000)
 #define TUNING_INFO_DEBUG_DATA_SIZE       (4)
 #define OIS_DATA_MAX_SIZE                 (32)
-#define MAX_OIS_SAMPLE_NUM_PER_FRAME      (10)
+#define MAX_OIS_SAMPLE_NUM_PER_FRAME      (20)
+#define MAX_MAKERNOTE_LENGTH              (65535)
 
 #define PD_DATA_SIZE                      (4032*2*758)
 
+#define MAX_PDAF_CALIB_GAINS              (25*19)
+#define MAX_PDAF_CALIB_COEFF              (200)
+
 #define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
 #define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
 #define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
@@ -615,9 +619,16 @@
     uint32_t op_pixel_clk;             // Sensor output rate.
     uint32_t num_raw_bits;             // Number of bits for RAW. 0 if not RAW.
     int64_t  timestamp_offset;         // Timestamp offset with gyro sensor. 0 if uncalibrated.
+    int64_t  timestamp_crop_offset;    // Timestamp offset due to crop on top of active array.
 } cam_sensor_mode_info_t;
 
 typedef struct {
+    uint16_t left_gain_map[MAX_PDAF_CALIB_GAINS];
+    uint16_t right_gain_map[MAX_PDAF_CALIB_GAINS];
+    int16_t conversion_coeff[MAX_PDAF_CALIB_COEFF];
+} cam_pd_calibration_t;
+
+typedef struct {
     cam_frame_len_offset_t plane_info;
 } cam_stream_buf_plane_info_t;
 
@@ -988,12 +999,11 @@
 } cam_ois_data_t;
 
 typedef struct {
-    int64_t frame_sof_timestamp_vsync;
     int64_t frame_sof_timestamp_boottime;
     int32_t num_ois_sample;
     int64_t ois_sample_timestamp_boottime[MAX_OIS_SAMPLE_NUM_PER_FRAME];
-    int32_t ois_sample_shift_x[MAX_OIS_SAMPLE_NUM_PER_FRAME];
-    int32_t ois_sample_shift_y[MAX_OIS_SAMPLE_NUM_PER_FRAME];
+    float ois_sample_shift_pixel_x[MAX_OIS_SAMPLE_NUM_PER_FRAME];
+    float ois_sample_shift_pixel_y[MAX_OIS_SAMPLE_NUM_PER_FRAME];
 } cam_frame_ois_info_t;
 
 typedef struct  {
@@ -2458,8 +2468,8 @@
     CAM_INTF_META_DEV_CAM_ENABLE,
     /* DevCamDebug metadata CAM_TYPES.h AF */
     CAM_INTF_META_DEV_CAM_AF_LENS_POSITION,
-    CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE,
-    CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE,
+    CAM_INTF_META_AF_TOF_CONFIDENCE,
+    CAM_INTF_META_AF_TOF_DISTANCE,
     CAM_INTF_META_DEV_CAM_AF_LUMA,
     CAM_INTF_META_DEV_CAM_AF_HAF_STATE,
     CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS,
@@ -2549,6 +2559,8 @@
     CAM_INTF_META_EARLY_AF_STATE,
     /* Exposure time boost */
     CAM_INTF_META_EXP_TIME_BOOST,
+    /* Easel HDR+ makernote */
+    CAM_INTF_META_MAKERNOTE,
     CAM_INTF_PARM_MAX
 } cam_intf_parm_type_t;
 
@@ -3173,4 +3185,11 @@
     CAM_STREAM_ON_TYPE_START_SENSOR_STREAMING, // Start sensor streaming.
 } cam_stream_on_type_t;
 
+
+// Used with CAM_INTF_META_MAKERNOTE.
+typedef struct {
+    char data[MAX_MAKERNOTE_LENGTH];
+    uint32_t length;
+} cam_makernote_t;
+
 #endif /* __QCAMERA_TYPES_H__ */
diff --git a/msm8998/QCamera2/stack/mm-camera-interface/Android.mk b/msm8998/QCamera2/stack/mm-camera-interface/Android.mk
index 708b94b..a7cfbd4 100644
--- a/msm8998/QCamera2/stack/mm-camera-interface/Android.mk
+++ b/msm8998/QCamera2/stack/mm-camera-interface/Android.mk
@@ -61,6 +61,7 @@
 
 LOCAL_MODULE           := libmmcamera_interface
 LOCAL_SHARED_LIBRARIES := libdl libcutils liblog libutils
+LOCAL_HEADER_LIBRARIES := libhardware_headers
 LOCAL_MODULE_TAGS := optional
 LOCAL_VENDOR_MODULE := true
 
diff --git a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
index 03b6055..59e0ae4 100644
--- a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
+++ b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera.c
@@ -36,6 +36,8 @@
 #include <fcntl.h>
 #include <stdlib.h>
 #include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
 #include <dlfcn.h>
 #define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
 #include IOCTL_H
diff --git a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
index 2b6e7ce..2c97a56 100644
--- a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
+++ b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -38,6 +38,7 @@
 #include <linux/media.h>
 #include <media/msm_cam_sensor.h>
 #include <dlfcn.h>
+#include <unistd.h>
 
 #define IOCTL_H <SYSTEM_HEADER_PREFIX/ioctl.h>
 #include IOCTL_H
diff --git a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
index 85a5d3b..3ff18bd 100644
--- a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
+++ b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -32,6 +32,7 @@
 #include <stdlib.h>
 #include <errno.h>
 #include <string.h>
+#include <unistd.h>
 
 // Camera dependencies
 #include "mm_camera_dbg.h"
diff --git a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
index a49fab1..652238b 100644
--- a/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
+++ b/msm8998/QCamera2/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -34,6 +34,7 @@
 #include <sys/types.h>
 #include <sys/stat.h>
 #include <sys/prctl.h>
+#include <unistd.h>
 #include <fcntl.h>
 #include <poll.h>
 #include <cam_semaphore.h>
diff --git a/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
index eb3a3c7..6f82ff5 100644
--- a/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
+++ b/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -658,6 +658,14 @@
     if (rc) {
       LOGE(": Error adding ASD Exif Entry");
     }
+
+    IF_META_AVAILABLE(cam_makernote_t, makernote, CAM_INTF_META_MAKERNOTE, p_meta) {
+      rc = addExifEntry(exif_info, EXIFTAGID_EXIF_MAKER_NOTE, EXIF_UNDEFINED, makernote->length,
+          makernote->data);
+      if (rc) {
+        LOGE(": Error adding makernote");
+      }
+    }
   } else {
     LOGE(": Error adding ASD Exif Entry, no meta");
   }
diff --git a/msm8998/QCamera2/util/QCameraTrace.cpp b/msm8998/QCamera2/util/QCameraTrace.cpp
index 1aac6a0..3d5f4a3 100644
--- a/msm8998/QCamera2/util/QCameraTrace.cpp
+++ b/msm8998/QCamera2/util/QCameraTrace.cpp
@@ -29,6 +29,8 @@
 
 // Camera dependencies
 #include <stdlib.h>
+#include <pthread.h>
+
 #include "QCameraTrace.h"
 
 #define CAMSCOPE_MEMSTORE_SIZE 0x00100000 // 1MB