Merge "Merge Android Pie into master"
diff --git a/msm8998/QCamera2/Android.mk b/msm8998/QCamera2/Android.mk
index e8a6811..f2d8d49 100755
--- a/msm8998/QCamera2/Android.mk
+++ b/msm8998/QCamera2/Android.mk
@@ -30,6 +30,7 @@
#HAL 3.0 source
LOCAL_SRC_FILES += \
+ HAL3/QCamera3HdrPlusListenerThread.cpp \
HAL3/QCamera3HWI.cpp \
HAL3/QCamera3Mem.cpp \
HAL3/QCamera3Stream.cpp \
@@ -142,7 +143,8 @@
LOCAL_SHARED_LIBRARIES += libmmcamera_interface libmmjpeg_interface libui libcamera_metadata
LOCAL_SHARED_LIBRARIES += libqdMetaData libqservice libbinder
LOCAL_SHARED_LIBRARIES += libbase libcutils libdl libhdrplusclient
-LOCAL_SHARED_LIBRARIES += libhidlbase libhwbinder libutils android.hardware.power@1.1
+LOCAL_SHARED_LIBRARIES += libhidlbase libhwbinder libutils android.hardware.power@1.2
+LOCAL_SHARED_LIBRARIES += libtinyxml2
ifeq ($(TARGET_TS_MAKEUP),true)
LOCAL_SHARED_LIBRARIES += libts_face_beautify_hal libts_detected_face_hal
endif
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
index bb12822..0e3b4af 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.cpp
@@ -1041,6 +1041,41 @@
}
/*===========================================================================
+ * FUNCTION : postprocFail
+ *
+ * DESCRIPTION: notify clients about failing post-process requests.
+ *
+ * PARAMETERS :
+ * @ppBuffer : pointer to the pp buffer.
+ *
+ * RETURN : 0 on success
+ * -EINVAL on invalid input
+ *==========================================================================*/
+int32_t QCamera3ProcessingChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
+ if (ppBuffer == nullptr) {
+ return BAD_VALUE;
+ }
+
+ if (ppBuffer->output == nullptr) {
+ return BAD_VALUE;
+ }
+
+ camera3_stream_buffer_t result = {};
+ result.buffer = ppBuffer->output;
+
+ LOGE("Input frame number: %d dropped!", ppBuffer->frameNumber);
+ result.stream = mCamera3Stream;
+ result.status = CAMERA3_BUFFER_STATUS_ERROR;
+ result.acquire_fence = -1;
+ result.release_fence = -1;
+ if (mChannelCB) {
+ mChannelCB(NULL, &result, ppBuffer->frameNumber, false, mUserData);
+ }
+
+ return OK;
+}
+
+/*===========================================================================
* FUNCTION : request
*
* DESCRIPTION: handle the request - either with an input buffer or a direct
@@ -3037,6 +3072,55 @@
}
/*===========================================================================
+ * FUNCTION : postprocFail
+ *
+ * DESCRIPTION: notify clients about failing post-process requests.
+ *
+ * PARAMETERS :
+ * @ppBuffer : pointer to the pp buffer.
+ *
+ * RETURN : 0 on success
+ * -EINVAL on invalid input
+ *==========================================================================*/
+int32_t QCamera3YUVChannel::postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer) {
+ if (ppBuffer == nullptr) {
+ return BAD_VALUE;
+ }
+
+ {
+ List<PpInfo>::iterator ppInfo;
+
+ Mutex::Autolock lock(mOfflinePpLock);
+ for (ppInfo = mOfflinePpInfoList.begin();
+ ppInfo != mOfflinePpInfoList.end(); ppInfo++) {
+ if (ppInfo->frameNumber == ppBuffer->frameNumber) {
+ break;
+ }
+ }
+
+ if (ppInfo == mOfflinePpInfoList.end()) {
+ LOGE("Offline reprocess info for frame number: %d not found!", ppBuffer->frameNumber);
+ return BAD_VALUE;
+ }
+
+ LOGE("Failed YUV post-process on frame number: %d removing from offline queue!",
+ ppBuffer->frameNumber);
+ mOfflinePpInfoList.erase(ppInfo);
+ }
+
+ int32_t bufferIndex = mMemory.getHeapBufferIndex(ppBuffer->frameNumber);
+ if (bufferIndex < 0) {
+ LOGE("Fatal %d: no buffer index for frame number %d", bufferIndex, ppBuffer->frameNumber);
+ return BAD_VALUE;
+ } else {
+ mMemory.markFrameNumber(bufferIndex, -1);
+ mFreeHeapBufferList.push_back(bufferIndex);
+ }
+
+ return QCamera3ProcessingChannel::postprocFail(ppBuffer);
+}
+
+/*===========================================================================
* FUNCTION : streamCbRoutine
*
* DESCRIPTION:
@@ -3053,6 +3137,7 @@
ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB);
uint8_t frameIndex;
int32_t resultFrameNumber;
+ bool droppedInputPPBuffer = false;
if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) {
LOGE("Error with the stream callback");
@@ -3088,22 +3173,28 @@
}
if (ppInfo->offlinePpFlag) {
- mm_camera_super_buf_t *frame =
+ if (ppInfo != mOfflinePpInfoList.begin() &&
+ IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
+ droppedInputPPBuffer = true;
+ mOfflinePpInfoList.erase(ppInfo);
+ } else {
+ mm_camera_super_buf_t *frame =
(mm_camera_super_buf_t *)malloc(sizeof(
- mm_camera_super_buf_t));
- if (frame == NULL) {
- LOGE("Error allocating memory to save received_frame structure.");
- if(stream) {
- stream->bufDone(frameIndex);
+ mm_camera_super_buf_t));
+ if (frame == NULL) {
+ LOGE("Error allocating memory to save received_frame structure.");
+ if(stream) {
+ stream->bufDone(frameIndex);
+ }
+ return;
}
+
+ *frame = *super_frame;
+ m_postprocessor.processData(frame, ppInfo->output,
+ resultFrameNumber);
+ free(super_frame);
return;
}
-
- *frame = *super_frame;
- m_postprocessor.processData(frame, ppInfo->output,
- resultFrameNumber);
- free(super_frame);
- return;
} else {
if (ppInfo != mOfflinePpInfoList.begin()) {
// There is pending reprocess buffer, cache current buffer
@@ -3121,6 +3212,31 @@
if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) {
mChannelCbBufErr(this, resultFrameNumber,
CAMERA3_BUFFER_STATUS_ERROR, mUserData);
+ if (droppedInputPPBuffer) {
+ camera3_stream_buffer_t result = {};
+ result.buffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex);
+ int32_t bufferIndex =
+ mMemory.getHeapBufferIndex(resultFrameNumber);
+ if (bufferIndex < 0) {
+ LOGE("Fatal %d: no buffer index for frame number %d",
+ bufferIndex, resultFrameNumber);
+ } else {
+ mMemory.markFrameNumber(bufferIndex, -1);
+ mFreeHeapBufferList.push_back(bufferIndex);
+ }
+
+ LOGE("Input frame number: %d dropped!", resultFrameNumber);
+ result.stream = mCamera3Stream;
+ result.status = CAMERA3_BUFFER_STATUS_ERROR;
+ result.acquire_fence = -1;
+ result.release_fence = -1;
+ if (mChannelCB) {
+ mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData);
+ }
+ free(super_frame);
+
+ return;
+ }
}
}
@@ -4034,8 +4150,8 @@
const uint32_t *ldafCalib = hal_obj->getLdafCalib();
const char *easelFwVersion = hal_obj->getEaselFwVersion();
if ((eepromVersion && strlen(eepromVersion)) ||
- ldafCalib) {
- int len = 0;
+ ldafCalib || easelFwVersion) {
+ uint32_t len = 0;
settings->image_desc_valid = true;
if (eepromVersion && strlen(eepromVersion)) {
len = snprintf(settings->image_desc, sizeof(settings->image_desc),
@@ -4048,8 +4164,12 @@
}
if (easelFwVersion) {
ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion);
+ if (len > 0 && len < sizeof(settings->image_desc)) {
+ settings->image_desc[len] = ',';
+ len++;
+ }
len += snprintf(settings->image_desc + len,
- sizeof(settings->image_desc) - len, ":%s", easelFwVersion);
+ sizeof(settings->image_desc) - len, "E-ver:%s", easelFwVersion);
}
}
diff --git a/msm8998/QCamera2/HAL3/QCamera3Channel.h b/msm8998/QCamera2/HAL3/QCamera3Channel.h
index a23acd5..11eb3d1 100644
--- a/msm8998/QCamera2/HAL3/QCamera3Channel.h
+++ b/msm8998/QCamera2/HAL3/QCamera3Channel.h
@@ -250,6 +250,7 @@
QCamera3Stream *stream);
int32_t getStreamSize(cam_dimension_t &dim);
virtual int32_t timeoutFrame(uint32_t frameNumber);
+ virtual int32_t postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer);
QCamera3PostProcessor m_postprocessor; // post processor
void showDebugFPS(int32_t streamType);
@@ -497,6 +498,7 @@
virtual void putStreamBufs();
virtual void reprocessCbRoutine(buffer_handle_t *resultBuffer,
uint32_t resultFrameNumber);
+ virtual int32_t postprocFail(qcamera_hal3_pp_buffer_t *ppBuffer);
private:
typedef struct {
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
index 9f4c118..d7a432c 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.cpp
@@ -57,6 +57,9 @@
#include "QCamera3VendorTags.h"
#include "QCameraTrace.h"
+// XML parsing
+#include "tinyxml2.h"
+
#include "HdrPlusClientUtils.h"
extern "C" {
@@ -98,7 +101,7 @@
#define MAX_HFR_BATCH_SIZE (8)
#define REGIONS_TUPLE_COUNT 5
// Set a threshold for detection of missing buffers //seconds
-#define MISSING_REQUEST_BUF_TIMEOUT 5
+#define MISSING_REQUEST_BUF_TIMEOUT 10
#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
#define FLUSH_TIMEOUT 3
#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
@@ -147,6 +150,7 @@
// The following Easel related variables must be protected by gHdrPlusClientLock.
std::unique_ptr<EaselManagerClient> gEaselManagerClient;
bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
+int32_t gActiveEaselClient = 0; // The number of active cameras on Easel.
std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
@@ -276,8 +280,7 @@
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
{ ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
- { (camera_metadata_enum_android_control_ae_mode_t)
- NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
+ { ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
};
const QCamera3HardwareInterface::QCameraMap<
@@ -469,6 +472,7 @@
mParamHeap(NULL),
mParameters(NULL),
mPrevParameters(NULL),
+ m_ISTypeVideo(IS_TYPE_NONE),
m_bIsVideo(false),
m_bIs4KVideo(false),
m_bEisSupportedSize(false),
@@ -511,6 +515,7 @@
mInstantAecFrameIdxCount(0),
mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
+ mLastRequestedOisDataMode(ANDROID_STATISTICS_OIS_DATA_MODE_OFF),
mCurrFeatureState(0),
mLdafCalibExist(false),
mLastCustIntentFrmNum(-1),
@@ -528,13 +533,14 @@
mFirstPreviewIntentSeen(false),
m_bSensorHDREnabled(false),
mAfTrigger(),
- mSceneDistance(-1)
+ mSceneDistance(-1),
+ mLastFocusDistance(0.0)
{
getLogLevel();
mCommon.init(gCamCapability[cameraId]);
mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
#ifndef USE_HAL_3_3
- mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
+ mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_5;
#else
mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
#endif
@@ -633,6 +639,9 @@
int32_t rc = 0;
+ // Clean up Easel error future first to avoid Easel error happens during destructor.
+ cleanupEaselErrorFuture();
+
// Disable power hint and enable the perf lock for close camera
mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
@@ -909,12 +918,23 @@
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
- rc = gEaselManagerClient->resume(this);
- if (rc != 0) {
- ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+ if (gActiveEaselClient == 0) {
+ rc = gEaselManagerClient->resume(this);
+ if (rc != 0) {
+ ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+ return rc;
+ }
+ mEaselFwUpdated = false;
+ }
+ gActiveEaselClient++;
+
+ mQCamera3HdrPlusListenerThread = new QCamera3HdrPlusListenerThread(this);
+ rc = mQCamera3HdrPlusListenerThread->run("QCamera3HdrPlusListenerThread");
+ if (rc != OK) {
+ ALOGE("%s: Starting HDR+ client listener thread failed: %s (%d)", __FUNCTION__,
+ strerror(-rc), rc);
return rc;
}
- mEaselFwUpdated = false;
}
}
@@ -928,11 +948,20 @@
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
- status_t suspendErr = gEaselManagerClient->suspend();
- if (suspendErr != 0) {
- ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
- strerror(-suspendErr), suspendErr);
+ if (gActiveEaselClient == 1) {
+ status_t suspendErr = gEaselManagerClient->suspend();
+ if (suspendErr != 0) {
+ ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
+ strerror(-suspendErr), suspendErr);
+ }
}
+ gActiveEaselClient--;
+ }
+
+ if (mQCamera3HdrPlusListenerThread != nullptr) {
+ mQCamera3HdrPlusListenerThread->requestExit();
+ mQCamera3HdrPlusListenerThread->join();
+ mQCamera3HdrPlusListenerThread = nullptr;
}
}
}
@@ -1124,10 +1153,19 @@
{
std::unique_lock<std::mutex> l(gHdrPlusClientLock);
if (EaselManagerClientOpened) {
- rc = gEaselManagerClient->suspend();
- if (rc != 0) {
- ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+ if (gActiveEaselClient == 1) {
+ rc = gEaselManagerClient->suspend();
+ if (rc != 0) {
+ ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
+ }
}
+ gActiveEaselClient--;
+ }
+
+ if (mQCamera3HdrPlusListenerThread != nullptr) {
+ mQCamera3HdrPlusListenerThread->requestExit();
+ mQCamera3HdrPlusListenerThread->join();
+ mQCamera3HdrPlusListenerThread = nullptr;
}
}
@@ -1873,7 +1911,7 @@
/* Check whether we have video stream */
m_bIs4KVideo = false;
m_bIsVideo = false;
- m_bEisSupportedSize = false;
+ m_bEisSupportedSize = true;
m_bTnrEnabled = false;
m_bVideoHdrEnabled = false;
bool isZsl = false;
@@ -1946,7 +1984,9 @@
eis_prop_set = (uint8_t)atoi(eis_prop);
m_bEisEnable = eis_prop_set && m_bEisSupported &&
- (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
+ (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
+ (gCamCapability[mCameraId]->position == CAM_POSITION_BACK ||
+ gCamCapability[mCameraId]->position == CAM_POSITION_BACK_AUX);
LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
m_bEisEnable, eis_prop_set, m_bEisSupported);
@@ -1983,23 +2023,23 @@
}
if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
- (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
- m_bIsVideo = true;
- // In HAL3 we can have multiple different video streams.
- // The variables video width and height are used below as
- // dimensions of the biggest of them
- if (videoWidth < newStream->width ||
- videoHeight < newStream->height) {
- videoWidth = newStream->width;
- videoHeight = newStream->height;
+ (IS_USAGE_PREVIEW(newStream->usage) || IS_USAGE_VIDEO(newStream->usage))) {
+ if (IS_USAGE_VIDEO(newStream->usage)) {
+ m_bIsVideo = true;
+ // In HAL3 we can have multiple different video streams.
+ // The variables video width and height are used below as
+ // dimensions of the biggest of them
+ if (videoWidth < newStream->width || videoHeight < newStream->height) {
+ videoWidth = newStream->width;
+ videoHeight = newStream->height;
+ }
+ if ((VIDEO_4K_WIDTH <= newStream->width) &&
+ (VIDEO_4K_HEIGHT <= newStream->height)) {
+ m_bIs4KVideo = true;
+ }
}
- if ((VIDEO_4K_WIDTH <= newStream->width) &&
- (VIDEO_4K_HEIGHT <= newStream->height)) {
- m_bIs4KVideo = true;
- }
- m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
+ m_bEisSupportedSize &= (newStream->width <= maxEisWidth) &&
(newStream->height <= maxEisHeight);
-
}
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
@@ -2077,12 +2117,6 @@
}
}
- if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
- gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
- !m_bIsVideo) {
- m_bEisEnable = false;
- }
-
if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
pthread_mutex_unlock(&mMutex);
return -EINVAL;
@@ -3075,12 +3109,241 @@
mFirstMetadataCallback = true;
+ if (streamList->session_parameters != nullptr) {
+ CameraMetadata meta;
+ meta = streamList->session_parameters;
+
+ // send an unconfigure to the backend so that the isp
+ // resources are deallocated
+ if (!mFirstConfiguration) {
+ cam_stream_size_info_t stream_config_info;
+ int32_t hal_version = CAM_HAL_V3;
+ memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
+ stream_config_info.buffer_info.min_buffers =
+ MIN_INFLIGHT_REQUESTS;
+ stream_config_info.buffer_info.max_buffers =
+ m_bIs4KVideo ? 0 :
+ m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
+ clear_metadata_buffer(mParameters);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+ CAM_INTF_PARM_HAL_VERSION, hal_version);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+ CAM_INTF_META_STREAM_INFO, stream_config_info);
+ rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+ mParameters);
+ if (rc < 0) {
+ LOGE("set_parms for unconfigure failed");
+ pthread_mutex_unlock(&mMutex);
+ return rc;
+ }
+
+ }
+ /* get eis information for stream configuration */
+ cam_is_type_t isTypePreview, is_type=IS_TYPE_NONE;
+ char is_type_value[PROPERTY_VALUE_MAX];
+ property_get("persist.camera.is_type", is_type_value, "4");
+ m_ISTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
+ // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
+ property_get("persist.camera.is_type_preview", is_type_value, "4");
+ isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
+ LOGD("isTypeVideo: %d isTypePreview: %d", m_ISTypeVideo, isTypePreview);
+
+ int32_t hal_version = CAM_HAL_V3;
+ clear_metadata_buffer(mParameters);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, mCaptureIntent);
+
+ if (mFirstConfiguration) {
+ // configure instant AEC
+ // Instant AEC is a session based parameter and it is needed only
+ // once per complete session after open camera.
+ // i.e. This is set only once for the first capture request, after open camera.
+ setInstantAEC(meta);
+ }
+
+ bool setEis = isEISEnabled(meta);
+ int32_t vsMode;
+ vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
+ rc = BAD_VALUE;
+ }
+ LOGD("setEis %d", setEis);
+ bool eis3Supported = false;
+ size_t count = IS_TYPE_MAX;
+ count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
+ for (size_t i = 0; i < count; i++) {
+ if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
+ eis3Supported = true;
+ break;
+ }
+ }
+
+ //IS type will be 0 unless EIS is supported. If EIS is supported
+ //it could either be 4 or 5 depending on the stream and video size
+ for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+ if (setEis) {
+ if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
+ is_type = isTypePreview;
+ } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
+ if ( (m_ISTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
+ LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
+ is_type = IS_TYPE_EIS_2_0;
+ } else {
+ is_type = m_ISTypeVideo;
+ }
+ } else {
+ is_type = IS_TYPE_NONE;
+ }
+ mStreamConfigInfo.is_type[i] = is_type;
+ } else {
+ mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
+ }
+ }
+
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+ CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
+
+ char prop[PROPERTY_VALUE_MAX];
+ //Disable tintless only if the property is set to 0
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.tintless.enable", prop, "1");
+ int32_t tintless_value = atoi(prop);
+
+ ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+ CAM_INTF_PARM_TINTLESS, tintless_value);
+
+ //Disable CDS for HFR mode or if DIS/EIS is on.
+ //CDS is a session parameter in the backend/ISP, so need to be set/reset
+ //after every configure_stream
+ if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
+ (m_bIsVideo)) {
+ int32_t cds = CAM_CDS_MODE_OFF;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
+ CAM_INTF_PARM_CDS_MODE, cds))
+ LOGE("Failed to disable CDS for HFR mode");
+
+ }
+
+ if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
+ uint8_t* use_av_timer = NULL;
+
+ if (m_debug_avtimer){
+ LOGI(" Enabling AV timer through setprop");
+ use_av_timer = &m_debug_avtimer;
+ m_bAVTimerEnabled = true;
+ }
+ else{
+ use_av_timer =
+ meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
+ if (use_av_timer) {
+ m_bAVTimerEnabled = true;
+ LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
+ }
+ }
+
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
+ rc = BAD_VALUE;
+ }
+ }
+
+ setMobicat();
+
+ /* Set fps and hfr mode while sending meta stream info so that sensor
+ * can configure appropriate streaming mode */
+ mHFRVideoFps = DEFAULT_VIDEO_FPS;
+ mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
+ mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
+ if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
+ rc = setHalFpsRange(meta, mParameters);
+ if (rc == NO_ERROR) {
+ int32_t max_fps =
+ (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
+ if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
+ mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
+ }
+ /* For HFR, more buffers are dequeued upfront to improve the performance */
+ if (mBatchSize) {
+ mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
+ mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
+ }
+ }
+ else {
+ LOGE("setHalFpsRange failed");
+ }
+ }
+ memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
+
+ if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
+ cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
+ meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
+ rc = setVideoHdrMode(mParameters, vhdr);
+ if (rc != NO_ERROR) {
+ LOGE("setVideoHDR is failed");
+ }
+ }
+
+ if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
+ uint8_t sensorModeFullFov =
+ meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
+ LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
+ sensorModeFullFov)) {
+ rc = BAD_VALUE;
+ }
+ }
+ //TODO: validate the arguments, HSV scenemode should have only the
+ //advertised fps ranges
+
+ /*set the capture intent, hal version, tintless, stream info,
+ *and disenable parameters to the backend*/
+ LOGD("set_parms META_STREAM_INFO " );
+ for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
+ LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
+ ", Format:%d is_type: %d",
+ mStreamConfigInfo.type[i],
+ mStreamConfigInfo.stream_sizes[i].width,
+ mStreamConfigInfo.stream_sizes[i].height,
+ mStreamConfigInfo.postprocess_mask[i],
+ mStreamConfigInfo.format[i],
+ mStreamConfigInfo.is_type[i]);
+ }
+
+ rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
+ mParameters);
+ if (rc < 0) {
+ LOGE("set_parms failed for hal version, stream info");
+ }
+
+ }
+
pthread_mutex_unlock(&mMutex);
return rc;
}
/*===========================================================================
+ * FUNCTION : isEISEnabled
+ *
+ * DESCRIPTION: Decide whether EIS should get enabled or not.
+ *
+ * PARAMETERS :
+ * @meta : request from framework to process
+ *
+ * RETURN : true/false Whether EIS should be enabled
+ *
+ *==========================================================================*/
+bool QCamera3HardwareInterface::isEISEnabled(const CameraMetadata& meta) {
+ uint8_t fwkVideoStabMode = 0;
+ if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
+ fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
+ }
+
+ // If EIS setprop is enabled then only turn it on for video/preview
+ return m_bEisEnable && (m_bIsVideo || fwkVideoStabMode) && m_bEisSupportedSize &&
+ (m_ISTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
+}
+
+/*===========================================================================
* FUNCTION : validateCaptureRequest
*
* DESCRIPTION: validate a capture request from camera service
@@ -3685,12 +3948,13 @@
// HDR+ request is done. So allow a longer timeout.
timeout = (mHdrPlusPendingRequests.size() > 0) ?
MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
+ timeout = s2ns(timeout);
if (timeout < mExpectedInflightDuration) {
timeout = mExpectedInflightDuration;
}
}
- if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
+ if ((currentSysTime - req.timestamp) > timeout) {
for (auto &missed : req.mPendingBufferList) {
assert(missed.stream->priv);
if (missed.stream->priv) {
@@ -3868,6 +4132,7 @@
if(p_is_metabuf_queued != NULL) {
*p_is_metabuf_queued = true;
}
+ iter->need_metadata = false;
break;
}
}
@@ -4242,8 +4507,8 @@
// Remove len shading map if it's not requested.
if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
- metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
- ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
+ metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0] !=
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
&requestIter->requestedLensShadingMapMode, 1);
@@ -4291,6 +4556,10 @@
requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
} else {
liveRequest = true;
+ if ((requestIter->partial_result_cnt == 0) && !requestIter->partialResultDropped) {
+ LOGE("Urgent metadata for frame number: %d didn't arrive!", frameNumber);
+ requestIter->partialResultDropped = true;
+ }
requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
mPendingLiveRequest--;
@@ -4361,6 +4630,24 @@
}
if (errorResult) {
+ // Check for any buffers that might be stuck in the post-process input queue
+ // awaiting metadata and queue an empty meta buffer. The invalid data should
+ // fail the offline post-process pass and return any buffers that otherwise
+ // will become lost.
+ for (auto it = iter->buffers.begin(); it != iter->buffers.end(); it++) {
+ if (it->need_metadata) {
+ QCamera3ProcessingChannel *channel =
+ reinterpret_cast<QCamera3ProcessingChannel *> (it->stream->priv);
+ if (channel != nullptr) {
+ LOGE("Dropped result: %d Unblocking any pending pp buffers!",
+ iter->frame_number);
+ channel->queueReprocMetadata(nullptr);
+ }
+ it->need_metadata = false;
+ break;
+ }
+ }
+
notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
} else {
result.output_buffers = nullptr;
@@ -4892,7 +5179,6 @@
CameraMetadata meta;
bool isVidBufRequested = false;
camera3_stream_buffer_t *pInputBuffer = NULL;
- char prop[PROPERTY_VALUE_MAX];
pthread_mutex_lock(&mMutex);
@@ -4923,233 +5209,32 @@
meta = request->settings;
- // For first capture request, send capture intent, and
- // stream on all streams
if (mState == CONFIGURED) {
logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
- // send an unconfigure to the backend so that the isp
- // resources are deallocated
- if (!mFirstConfiguration) {
- cam_stream_size_info_t stream_config_info;
- int32_t hal_version = CAM_HAL_V3;
- memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
- stream_config_info.buffer_info.min_buffers =
- MIN_INFLIGHT_REQUESTS;
- stream_config_info.buffer_info.max_buffers =
- m_bIs4KVideo ? 0 :
- m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
- clear_metadata_buffer(mParameters);
- ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
- CAM_INTF_PARM_HAL_VERSION, hal_version);
- ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
- CAM_INTF_META_STREAM_INFO, stream_config_info);
- rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
- mParameters);
- if (rc < 0) {
- LOGE("set_parms for unconfigure failed");
- pthread_mutex_unlock(&mMutex);
- return rc;
- }
- }
- mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
- /* get eis information for stream configuration */
- cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
- char is_type_value[PROPERTY_VALUE_MAX];
- property_get("persist.camera.is_type", is_type_value, "4");
- isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
- // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
- property_get("persist.camera.is_type_preview", is_type_value, "4");
- isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
- LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
-
- if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
+ // For HFR first capture request, send capture intent, and
+ // stream on all streams
+ if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) && mBatchSize) {
int32_t hal_version = CAM_HAL_V3;
- uint8_t captureIntent =
- meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
- mCaptureIntent = captureIntent;
+ uint8_t captureIntent = meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
clear_metadata_buffer(mParameters);
ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
- }
- if (mFirstConfiguration) {
- // configure instant AEC
- // Instant AEC is a session based parameter and it is needed only
- // once per complete session after open camera.
- // i.e. This is set only once for the first capture request, after open camera.
- setInstantAEC(meta);
- }
- uint8_t fwkVideoStabMode=0;
- if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
- fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
- }
-
- // If EIS setprop is enabled then only turn it on for video/preview
- bool setEis = m_bEisEnable && m_bEisSupportedSize &&
- (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
- int32_t vsMode;
- vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
- if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
- rc = BAD_VALUE;
- }
- LOGD("setEis %d", setEis);
- bool eis3Supported = false;
- size_t count = IS_TYPE_MAX;
- count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
- for (size_t i = 0; i < count; i++) {
- if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
- eis3Supported = true;
- break;
+ rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
+ if (rc < 0) {
+ LOGE("set_parms for for capture intent failed");
+ pthread_mutex_unlock(&mMutex);
+ return rc;
}
}
- //IS type will be 0 unless EIS is supported. If EIS is supported
- //it could either be 4 or 5 depending on the stream and video size
- for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
- if (setEis) {
- if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
- is_type = isTypePreview;
- } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
- if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
- LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
- is_type = IS_TYPE_EIS_2_0;
- } else {
- is_type = isTypeVideo;
- }
- } else {
- is_type = IS_TYPE_NONE;
- }
- mStreamConfigInfo.is_type[i] = is_type;
- } else {
- mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
- }
- }
-
- ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
- CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
-
- //Disable tintless only if the property is set to 0
- memset(prop, 0, sizeof(prop));
- property_get("persist.camera.tintless.enable", prop, "1");
- int32_t tintless_value = atoi(prop);
-
- ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
- CAM_INTF_PARM_TINTLESS, tintless_value);
-
- //Disable CDS for HFR mode or if DIS/EIS is on.
- //CDS is a session parameter in the backend/ISP, so need to be set/reset
- //after every configure_stream
- if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
- (m_bIsVideo)) {
- int32_t cds = CAM_CDS_MODE_OFF;
- if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
- CAM_INTF_PARM_CDS_MODE, cds))
- LOGE("Failed to disable CDS for HFR mode");
-
- }
-
- if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
- uint8_t* use_av_timer = NULL;
-
- if (m_debug_avtimer){
- LOGI(" Enabling AV timer through setprop");
- use_av_timer = &m_debug_avtimer;
- m_bAVTimerEnabled = true;
- }
- else{
- use_av_timer =
- meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
- if (use_av_timer) {
- m_bAVTimerEnabled = true;
- LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
- }
- }
-
- if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
- rc = BAD_VALUE;
- }
- }
-
- setMobicat();
-
uint8_t nrMode = 0;
if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
}
- /* Set fps and hfr mode while sending meta stream info so that sensor
- * can configure appropriate streaming mode */
- mHFRVideoFps = DEFAULT_VIDEO_FPS;
- mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
- mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
- if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
- rc = setHalFpsRange(meta, mParameters);
- if (rc == NO_ERROR) {
- int32_t max_fps =
- (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
- if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
- mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
- }
- /* For HFR, more buffers are dequeued upfront to improve the performance */
- if (mBatchSize) {
- mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
- mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
- }
- }
- else {
- LOGE("setHalFpsRange failed");
- }
- }
- if (meta.exists(ANDROID_CONTROL_MODE)) {
- uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
- rc = extractSceneMode(meta, metaMode, mParameters);
- if (rc != NO_ERROR) {
- LOGE("extractSceneMode failed");
- }
- }
- memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
-
- if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
- cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
- meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
- rc = setVideoHdrMode(mParameters, vhdr);
- if (rc != NO_ERROR) {
- LOGE("setVideoHDR is failed");
- }
- }
-
- if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
- uint8_t sensorModeFullFov =
- meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
- LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
- if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
- sensorModeFullFov)) {
- rc = BAD_VALUE;
- }
- }
- //TODO: validate the arguments, HSV scenemode should have only the
- //advertised fps ranges
-
- /*set the capture intent, hal version, tintless, stream info,
- *and disenable parameters to the backend*/
- LOGD("set_parms META_STREAM_INFO " );
- for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
- LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
- ", Format:%d is_type: %d",
- mStreamConfigInfo.type[i],
- mStreamConfigInfo.stream_sizes[i].width,
- mStreamConfigInfo.stream_sizes[i].height,
- mStreamConfigInfo.postprocess_mask[i],
- mStreamConfigInfo.format[i],
- mStreamConfigInfo.is_type[i]);
- }
-
- rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
- mParameters);
- if (rc < 0) {
- LOGE("set_parms failed for hal version, stream info");
- }
-
+ cam_is_type_t is_type = IS_TYPE_NONE;
+ bool setEis = isEISEnabled(meta);
cam_sensor_mode_info_t sensorModeInfo = {};
rc = getSensorModeInfo(sensorModeInfo);
if (rc != NO_ERROR) {
@@ -5471,6 +5556,11 @@
meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
}
+ if (meta.exists(ANDROID_STATISTICS_OIS_DATA_MODE)) {
+ mLastRequestedOisDataMode =
+ meta.find(ANDROID_STATISTICS_OIS_DATA_MODE).data.u8[0];
+ }
+
bool hdrPlusRequest = false;
HdrPlusPendingRequest pendingHdrPlusRequest = {};
@@ -5582,6 +5672,7 @@
pendingRequest.timestamp = 0;
pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
+ pendingRequest.requestedOisDataMode = mLastRequestedOisDataMode;
if (request->input_buffer) {
pendingRequest.input_buffer =
(camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
@@ -5604,6 +5695,11 @@
meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
}
+ if (meta.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
+ pendingRequest.motion_detection_enable =
+ meta.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8[0];
+ }
+
/* DevCamDebug metadata processCaptureRequest */
if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
mDevCamDebugMetaEnable =
@@ -5619,8 +5715,10 @@
}
pendingRequest.fwkCacMode = mCacMode;
pendingRequest.hdrplus = hdrPlusRequest;
- pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
- mExpectedInflightDuration += mExpectedFrameDuration;
+ // We need to account for several dropped frames initially on sensor side.
+ pendingRequest.expectedFrameDuration = (mState == CONFIGURED) ? (4 * mExpectedFrameDuration) :
+ mExpectedFrameDuration;
+ mExpectedInflightDuration += pendingRequest.expectedFrameDuration;
// extract enableZsl info
if (gExposeEnableZslKey) {
@@ -6431,6 +6529,7 @@
return rc;
}
}
+ mFirstPreviewIntentSeen = false;
}
pthread_mutex_unlock(&mMutex);
@@ -6818,6 +6917,7 @@
camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
+ camMetadata.update(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE, &pendingRequest.motion_detection_enable, 1);
if (mBatchSize == 0) {
// DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
@@ -7899,6 +7999,11 @@
IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
+ mLastFocusDistance = *focusDistance;
+ } else {
+ LOGE("Missing LENS_FOCUS_DISTANCE metadata. Use last known distance of %f",
+ mLastFocusDistance);
+ camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , &mLastFocusDistance, 1);
}
IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
@@ -8228,6 +8333,7 @@
// AF scene change
IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
+ camMetadata.update(ANDROID_CONTROL_AF_SCENE_CHANGE, afSceneChange, 1);
}
// Enable ZSL
@@ -8237,6 +8343,8 @@
camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
}
+ camMetadata.update(ANDROID_STATISTICS_OIS_DATA_MODE, &pendingRequest.requestedOisDataMode, 1);
+
// OIS Data
IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
@@ -8247,6 +8355,99 @@
frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
+
+ if (pendingRequest.requestedOisDataMode == ANDROID_STATISTICS_OIS_DATA_MODE_ON) {
+ int64_t timeDiff = pendingRequest.timestamp -
+ frame_ois_data->frame_sof_timestamp_boottime;
+
+ std::vector<int64_t> oisTimestamps;
+
+ for (int32_t i = 0; i < frame_ois_data->num_ois_sample; i++) {
+ oisTimestamps.push_back(
+ frame_ois_data->ois_sample_timestamp_boottime[i] + timeDiff);
+ }
+
+ camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
+ oisTimestamps.data(), frame_ois_data->num_ois_sample);
+ camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
+ frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
+ camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
+ frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
+ } else {
+ // If OIS data mode is OFF, add NULL for OIS keys.
+ camMetadata.update(ANDROID_STATISTICS_OIS_TIMESTAMPS,
+ frame_ois_data->ois_sample_timestamp_boottime, 0);
+ camMetadata.update(ANDROID_STATISTICS_OIS_X_SHIFTS,
+ frame_ois_data->ois_sample_shift_pixel_x, 0);
+ camMetadata.update(ANDROID_STATISTICS_OIS_Y_SHIFTS,
+ frame_ois_data->ois_sample_shift_pixel_y, 0);
+ }
+ }
+
+ // DevCamDebug metadata translateFromHalMetadata AEC MOTION
+ IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
+ CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
+ float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
+ camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
+ &fwk_DevCamDebug_aec_camera_motion_dx, 1);
+ }
+ IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
+ CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
+ float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
+ camMetadata.update(NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
+ &fwk_DevCamDebug_aec_camera_motion_dy, 1);
+ }
+ IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
+ CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
+ float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
+ camMetadata.update(NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
+ &fwk_DevCamDebug_aec_subject_motion, 1);
+ }
+
+ // Camera lens calibration dynamic fields, for back camera. Same values as for static metadata.
+ if (mCameraId == 0) {
+ const camera_metadata_t *staticInfo = gStaticMetadata[mCameraId];
+ camera_metadata_ro_entry_t rotation, translation, intrinsics, distortion, reference;
+ int res;
+ bool fail = false;
+ res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_ROTATION,
+ &rotation);
+ if (res != 0) {
+ fail = true;
+ }
+ res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_TRANSLATION,
+ &translation);
+ if (res != 0) {
+ fail = true;
+ }
+ res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_INTRINSIC_CALIBRATION,
+ &intrinsics);
+ if (res != 0) {
+ fail = true;
+ }
+ res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_DISTORTION,
+ &distortion);
+ if (res != 0) {
+ fail = true;
+ }
+ res = find_camera_metadata_ro_entry(staticInfo, ANDROID_LENS_POSE_REFERENCE,
+ &reference);
+ if (res != 0) {
+ fail = true;
+ }
+
+ if (!fail) {
+ camMetadata.update(ANDROID_LENS_POSE_ROTATION,
+ rotation.data.f, rotation.count);
+ camMetadata.update(ANDROID_LENS_POSE_TRANSLATION,
+ translation.data.f, translation.count);
+ camMetadata.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+ intrinsics.data.f, intrinsics.count);
+ camMetadata.update(ANDROID_LENS_DISTORTION,
+ distortion.data.f, distortion.count);
+ camMetadata.update(ANDROID_LENS_POSE_REFERENCE,
+ reference.data.u8, reference.count);
+ }
}
resultMetadata = camMetadata.release();
@@ -8510,7 +8711,7 @@
fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
} else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
- fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
+ fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
} else {
LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
@@ -9468,6 +9669,34 @@
lens_shading_map_size,
sizeof(lens_shading_map_size)/sizeof(int32_t));
+ // Lens calibration for MOTION_TRACKING, back camera only
+ if (cameraId == 0) {
+
+ float poseRotation[4] = {1.0f, 0.f, 0.f, 0.f}; // quaternion rotation
+ float poseTranslation[3] = {0.0f, 0.f, 0.f}; // xyz translation, meters
+ uint8_t poseReference = ANDROID_LENS_POSE_REFERENCE_GYROSCOPE;
+ // TODO: b/70565622 - these should have better identity values as a fallback
+ float cameraIntrinsics[5] = {100.f, 100.f, 0.f, 1000, 1000}; // fx,fy,sx,cx,cy
+ float radialDistortion[5] = {0.f, 0.f, 0.f, 0.f, 0.f}; // identity
+
+ bool success = readSensorCalibration(
+ gCamCapability[cameraId]->active_array_size.width,
+ poseRotation, poseTranslation, cameraIntrinsics, radialDistortion);
+ if (!success) {
+ ALOGE("Using identity lens calibration values");
+ }
+ staticInfo.update(ANDROID_LENS_POSE_ROTATION,
+ poseRotation, sizeof(poseRotation)/sizeof(float));
+ staticInfo.update(ANDROID_LENS_POSE_TRANSLATION,
+ poseTranslation, sizeof(poseTranslation)/sizeof(float));
+ staticInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
+ cameraIntrinsics, sizeof(cameraIntrinsics)/sizeof(float));
+ staticInfo.update(ANDROID_LENS_DISTORTION,
+ radialDistortion, sizeof(radialDistortion)/sizeof(float));
+ staticInfo.update(ANDROID_LENS_POSE_REFERENCE,
+ &poseReference, sizeof(poseReference));
+ }
+
staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
@@ -9627,9 +9856,26 @@
reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF);
-
}
+
+ staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
+ &(gCamCapability[cameraId]->wb_cal.num_lights), 1);
+ available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS);
+
+ const int32_t num_lights = gCamCapability[cameraId]->wb_cal.num_lights;
+ staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
+ gCamCapability[cameraId]->wb_cal.r_over_g, num_lights);
+ available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS);
+
+ staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
+ gCamCapability[cameraId]->wb_cal.b_over_g, num_lights);
+ available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS);
+
+ staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
+ &(gCamCapability[cameraId]->wb_cal.gr_over_gb), 1);
+ available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO);
+
int32_t scalar_formats[] = {
ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
@@ -10100,7 +10346,7 @@
for (size_t i = 0; i < count; i++) {
uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
- aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
+ aeMode = ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH;
}
avail_ae_modes.add(aeMode);
}
@@ -10196,6 +10442,11 @@
if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
}
+ // Only back camera supports MOTION_TRACKING
+ if (cameraId == 0) {
+ available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING);
+ }
+
staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
available_capabilities.array(),
available_capabilities.size());
@@ -10331,6 +10582,16 @@
(void *)gCamCapability[cameraId]->calibration_transform2,
CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
+#ifndef USE_HAL_3_3
+
+ int32_t session_keys[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE, QCAMERA3_INSTANT_AEC_MODE, QCAMERA3_USE_AV_TIMER,
+ QCAMERA3_VIDEO_HDR_MODE, TANGO_MODE_DATA_SENSOR_FULLFOV};
+ staticInfo.update(ANDROID_REQUEST_AVAILABLE_SESSION_KEYS, session_keys,
+ sizeof(session_keys) / sizeof(session_keys[0]));
+
+#endif
+
int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
@@ -10359,7 +10620,7 @@
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
#endif
ANDROID_STATISTICS_FACE_DETECT_MODE,
- ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
+ ANDROID_STATISTICS_SHARPNESS_MAP_MODE, ANDROID_STATISTICS_OIS_DATA_MODE,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
@@ -10388,7 +10649,8 @@
TANGO_MODE_DATA_SENSOR_FULLFOV,
NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
- NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE
+ NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE,
+ NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
};
size_t request_keys_cnt =
@@ -10413,6 +10675,7 @@
ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
+ ANDROID_CONTROL_AF_SCENE_CHANGE,
ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
@@ -10430,7 +10693,9 @@
ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
- ANDROID_STATISTICS_FACE_SCORES,
+ ANDROID_STATISTICS_FACE_SCORES, ANDROID_STATISTICS_OIS_DATA_MODE,
+ ANDROID_STATISTICS_OIS_TIMESTAMPS, ANDROID_STATISTICS_OIS_X_SHIFTS,
+ ANDROID_STATISTICS_OIS_Y_SHIFTS,
#ifndef USE_HAL_3_3
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
#endif
@@ -10531,7 +10796,11 @@
NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
- NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y
+ NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
+ NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
+ NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
+ NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
+ NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION
};
size_t result_keys_cnt =
@@ -10645,6 +10914,18 @@
}
#endif
+ if (cameraId == 0) {
+ int32_t lensCalibrationKeys[] = {
+ ANDROID_LENS_POSE_ROTATION,
+ ANDROID_LENS_POSE_TRANSLATION,
+ ANDROID_LENS_POSE_REFERENCE,
+ ANDROID_LENS_INTRINSIC_CALIBRATION,
+ ANDROID_LENS_DISTORTION,
+ };
+ available_characteristics_keys.appendArray(lensCalibrationKeys,
+ sizeof(lensCalibrationKeys) / sizeof(lensCalibrationKeys[0]));
+ }
+
if (0 <= indexPD) {
int32_t depthKeys[] = {
ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
@@ -10805,7 +11086,7 @@
}
if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
- int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
+ uint8_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
size = 0;
count = CAM_AEC_CONVERGENCE_MAX;
count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
@@ -10813,7 +11094,7 @@
int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
gCamCapability[cameraId]->supported_instant_aec_modes[i]);
if (NAME_NOT_FOUND != val) {
- available_instant_aec_modes[size] = (int32_t)val;
+ available_instant_aec_modes[size] = (uint8_t)val;
size++;
}
}
@@ -10912,18 +11193,32 @@
if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
eepromLength += sizeof(easelInfo);
strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
- gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
+ gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-Y" : ",E:N"),
MAX_EEPROM_VERSION_INFO_LEN);
}
staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
gCamCapability[cameraId]->eeprom_version_info, eepromLength);
available_characteristics_keys.add(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO);
+
+ staticInfo.update(ANDROID_INFO_VERSION,
+ gCamCapability[cameraId]->eeprom_version_info, eepromLength);
+ available_characteristics_keys.add(ANDROID_INFO_VERSION);
}
staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
available_characteristics_keys.array(),
available_characteristics_keys.size());
+ std::vector<uint8_t> availableOisModes;
+ availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_OFF);
+ if (cameraId == 0) {
+ availableOisModes.push_back(ANDROID_STATISTICS_OIS_DATA_MODE_ON);
+ }
+
+ staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
+ availableOisModes.data(),
+ availableOisModes.size());
+
gStaticMetadata[cameraId] = staticInfo.release();
return rc;
}
@@ -11171,7 +11466,7 @@
ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
}
- gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
+ gEaselBypassOnly = property_get_bool("persist.camera.hdrplus.disable", false);
gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
// Expose enableZsl key only when HDR+ mode is enabled.
@@ -11249,7 +11544,7 @@
info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
#ifndef USE_HAL_3_3
- info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
+ info->device_version = CAMERA_DEVICE_API_VERSION_3_5;
#else
info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
#endif
@@ -11735,9 +12030,15 @@
settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
// Set instant AEC to normal convergence by default
- int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
+ uint8_t instant_aec_mode = (uint8_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
+ uint8_t oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_OFF;
+ if (mCameraId == 0) {
+ oisDataMode = ANDROID_STATISTICS_OIS_DATA_MODE_ON;
+ }
+ settings.update(ANDROID_STATISTICS_OIS_DATA_MODE, &oisDataMode, 1);
+
if (gExposeEnableZslKey) {
settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
int32_t postview = 0;
@@ -11760,6 +12061,9 @@
/* hybrid ae */
settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
+ int32_t fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
+ settings.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
+
mDefaultMetadata[type] = settings.release();
return mDefaultMetadata[type];
@@ -12389,7 +12693,7 @@
if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
aeMode = CAM_AE_MODE_OFF;
- } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
+ } else if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_EXTERNAL_FLASH) {
aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
} else {
aeMode = CAM_AE_MODE_ON;
@@ -12675,6 +12979,11 @@
}
if (frame_settings.exists(ANDROID_FLASH_MODE)) {
+ uint32_t flashMode = (uint32_t)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0];
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_MODE, flashMode)) {
+ rc = BAD_VALUE;
+ }
+
int32_t respectFlashMode = 1;
if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
uint8_t fwk_aeMode =
@@ -12692,14 +13001,21 @@
LOGH("flash mode after mapping %d", val);
// To check: CAM_INTF_META_FLASH_MODE usage
if (NAME_NOT_FOUND != val) {
- uint8_t flashMode = (uint8_t)val;
- if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
+ uint8_t ledMode = (uint8_t)val;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, ledMode)) {
rc = BAD_VALUE;
}
}
}
}
+ if (frame_settings.exists(ANDROID_FLASH_STATE)) {
+ int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.i32[0];
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_STATE, flashState)) {
+ rc = BAD_VALUE;
+ }
+ }
+
if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
@@ -13345,11 +13661,24 @@
if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
uint8_t *hybrid_ae = (uint8_t *)
frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
+ // Motion tracking intent isn't compatible with hybrid ae.
+ if (mCaptureIntent == CAM_INTENT_MOTION_TRACKING) {
+ *hybrid_ae = 0;
+ }
if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
rc = BAD_VALUE;
}
}
+ // Motion Detection
+ if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE)) {
+ uint8_t *motion_detection = (uint8_t *)
+ frame_settings.find(NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE).data.u8;
+ if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MOTION_DETECTION_ENABLE, *motion_detection)) {
+ rc = BAD_VALUE;
+ }
+ }
+
// Histogram
if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
uint8_t histogramMode =
@@ -14658,7 +14987,8 @@
// First try to configure instant AEC from framework metadata
if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
- val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
+ val = meta.find(QCAMERA3_INSTANT_AEC_MODE).data.u8[0];
+ LOGE("Instant AEC mode set: %d", val);
}
// If framework did not set this value, try to read from set prop.
@@ -15037,13 +15367,6 @@
return false;
}
- // TODO (b/66500626): support AE compensation.
- if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
- metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
- ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
- return false;
- }
-
// TODO (b/32585046): support non-ZSL.
if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
@@ -15202,7 +15525,7 @@
return OK;
}
- status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
+ status_t res = gEaselManagerClient->openHdrPlusClientAsync(mQCamera3HdrPlusListenerThread.get());
if (res != OK) {
ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
strerror(-res), res);
@@ -15367,6 +15690,13 @@
void QCamera3HardwareInterface::handleEaselFatalError()
{
+ {
+ std::unique_lock<std::mutex> l(gHdrPlusClientLock);
+ if (gHdrPlusClient != nullptr) {
+ gHdrPlusClient->nofityEaselFatalError();
+ }
+ }
+
pthread_mutex_lock(&mMutex);
mState = ERROR;
pthread_mutex_unlock(&mMutex);
@@ -15374,8 +15704,23 @@
handleCameraDeviceError(/*stopChannelImmediately*/true);
}
+void QCamera3HardwareInterface::cleanupEaselErrorFuture()
+{
+ {
+ std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
+ if (!mEaselErrorFuture.valid()) {
+ // If there is no Easel error, construct a dummy future to wait for.
+ mEaselErrorFuture = std::async([]() { return; });
+ }
+ }
+
+ mEaselErrorFuture.wait();
+}
+
void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
{
+ std::lock_guard<std::mutex> lock(mEaselErrorFutureLock);
+
if (mEaselErrorFuture.valid()) {
// The error future has been invoked.
return;
@@ -15766,7 +16111,7 @@
pendingBuffers++;
}
- // Send out buffer errors for the pending buffers.
+ // Send out request errors for the pending buffers.
if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
std::vector<camera3_stream_buffer_t> streamBuffers;
for (auto &buffer : pendingBuffers->mPendingBufferList) {
@@ -15778,25 +16123,20 @@
streamBuffer.acquire_fence = -1;
streamBuffer.release_fence = -1;
- streamBuffers.push_back(streamBuffer);
-
- // Send out error buffer event.
+ // Send out request error event.
camera3_notify_msg_t notify_msg = {};
notify_msg.type = CAMERA3_MSG_ERROR;
notify_msg.message.error.frame_number = pendingBuffers->frame_number;
- notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
+ notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
notify_msg.message.error.error_stream = buffer.stream;
orchestrateNotify(¬ify_msg);
+ mOutputBufferDispatcher.markBufferReady(pendingBuffers->frame_number, streamBuffer);
}
- camera3_capture_result_t result = {};
- result.frame_number = pendingBuffers->frame_number;
- result.num_output_buffers = streamBuffers.size();
- result.output_buffers = &streamBuffers[0];
+ mShutterDispatcher.clear(pendingBuffers->frame_number);
- // Send out result with buffer errors.
- orchestrateResult(&result);
+
// Remove pending buffers.
mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
@@ -15815,6 +16155,189 @@
pthread_mutex_unlock(&mMutex);
}
+bool QCamera3HardwareInterface::readSensorCalibration(
+ int activeArrayWidth,
+ float poseRotation[4], float poseTranslation[3],
+ float cameraIntrinsics[5], float radialDistortion[5]) {
+
+ const char* calibrationPath = "/persist/sensors/calibration/calibration.xml";
+
+ using namespace tinyxml2;
+
+ XMLDocument calibrationXml;
+ XMLError err = calibrationXml.LoadFile(calibrationPath);
+ if (err != XML_SUCCESS) {
+ ALOGE("Unable to load calibration file '%s'. Error: %s",
+ calibrationPath, XMLDocument::ErrorIDToName(err));
+ return false;
+ }
+ XMLElement *rig = calibrationXml.FirstChildElement("rig");
+ if (rig == nullptr) {
+ ALOGE("No 'rig' in calibration file");
+ return false;
+ }
+ XMLElement *cam = rig->FirstChildElement("camera");
+ XMLElement *camModel = nullptr;
+ while (cam != nullptr) {
+ camModel = cam->FirstChildElement("camera_model");
+ if (camModel == nullptr) {
+ ALOGE("No 'camera_model' in calibration file");
+ return false;
+ }
+ int modelIndex = camModel->IntAttribute("index", -1);
+ // Model index "0" has the calibration we need
+ if (modelIndex == 0) {
+ break;
+ }
+ cam = cam->NextSiblingElement("camera");
+ }
+ if (cam == nullptr) {
+ ALOGE("No 'camera' in calibration file");
+ return false;
+ }
+ const char *modelType = camModel->Attribute("type");
+ if (modelType == nullptr || strcmp(modelType,"calibu_fu_fv_u0_v0_k1_k2_k3")) {
+ ALOGE("Camera model is unknown type %s",
+ modelType ? modelType : "NULL");
+ return false;
+ }
+ XMLElement *modelWidth = camModel->FirstChildElement("width");
+ if (modelWidth == nullptr || modelWidth->GetText() == nullptr) {
+ ALOGE("No camera model width in calibration file");
+ return false;
+ }
+ int width = atoi(modelWidth->GetText());
+ XMLElement *modelHeight = camModel->FirstChildElement("height");
+ if (modelHeight == nullptr || modelHeight->GetText() == nullptr) {
+ ALOGE("No camera model height in calibration file");
+ return false;
+ }
+ int height = atoi(modelHeight->GetText());
+ if (width <= 0 || height <= 0) {
+ ALOGE("Bad model width or height in calibration file: %d x %d", width, height);
+ return false;
+ }
+ ALOGI("Width: %d, Height: %d", width, height);
+
+ XMLElement *modelParams = camModel->FirstChildElement("params");
+ if (modelParams == nullptr) {
+ ALOGE("No camera model params in calibration file");
+ return false;
+ }
+ const char* paramText = modelParams->GetText();
+ if (paramText == nullptr) {
+ ALOGE("No parameters in params element in calibration file");
+ return false;
+ }
+ ALOGI("Parameters: %s", paramText);
+
+ // Parameter string is of the form "[ float; float; float ...]"
+ float params[7];
+ bool success = parseStringArray(paramText, params, 7);
+ if (!success) {
+ ALOGE("Malformed camera parameter string in calibration file");
+ return false;
+ }
+
+ XMLElement *extCalib = rig->FirstChildElement("extrinsic_calibration");
+ while (extCalib != nullptr) {
+ int id = extCalib->IntAttribute("frame_B_id", -1);
+ if (id == 0) {
+ break;
+ }
+ extCalib = extCalib->NextSiblingElement("extrinsic_calibration");
+ }
+ if (extCalib == nullptr) {
+ ALOGE("No 'extrinsic_calibration' in calibration file");
+ return false;
+ }
+
+ XMLElement *q = extCalib->FirstChildElement("A_q_B");
+ if (q == nullptr || q->GetText() == nullptr) {
+ ALOGE("No extrinsic quarternion in calibration file");
+ return false;
+ }
+ float rotation[4];
+ success = parseStringArray(q->GetText(), rotation, 4);
+ if (!success) {
+ ALOGE("Malformed extrinsic quarternion string in calibration file");
+ return false;
+ }
+
+ XMLElement *p = extCalib->FirstChildElement("A_p_B");
+ if (p == nullptr || p->GetText() == nullptr) {
+ ALOGE("No extrinsic translation in calibration file");
+ return false;
+ }
+ float position[3];
+ success = parseStringArray(p->GetText(), position, 3);
+ if (!success) {
+ ALOGE("Malformed extrinsic position string in calibration file");
+ return false;
+ }
+
+ // Map from width x height to active array
+ float scaleFactor = static_cast<float>(activeArrayWidth) / width;
+
+ cameraIntrinsics[0] = params[0] * scaleFactor; // fu -> f_x
+ cameraIntrinsics[1] = params[1] * scaleFactor; // fv -> f_y
+ cameraIntrinsics[2] = params[2] * scaleFactor; // u0 -> c_x
+ cameraIntrinsics[3] = params[3] * scaleFactor; // v0 -> c_y
+ cameraIntrinsics[4] = 0; // s = 0
+
+ radialDistortion[0] = params[4]; // k1 -> k_1
+ radialDistortion[1] = params[5]; // k2 -> k_2
+ radialDistortion[2] = params[6]; // k3 -> k_3
+ radialDistortion[3] = 0; // k_4 = 0
+ radialDistortion[4] = 0; // k_5 = 0
+
+ for (int i = 0; i < 4; i++) {
+ poseRotation[i] = rotation[i];
+ }
+ for (int i = 0; i < 3; i++) {
+ poseTranslation[i] = position[i];
+ }
+
+ ALOGI("Intrinsics: %f, %f, %f, %f, %f", cameraIntrinsics[0],
+ cameraIntrinsics[1], cameraIntrinsics[2],
+ cameraIntrinsics[3], cameraIntrinsics[4]);
+ ALOGI("Distortion: %f, %f, %f, %f, %f",
+ radialDistortion[0], radialDistortion[1], radialDistortion[2], radialDistortion[3],
+ radialDistortion[4]);
+ ALOGI("Pose rotation: %f, %f, %f, %f",
+ poseRotation[0], poseRotation[1], poseRotation[2], poseRotation[3]);
+ ALOGI("Pose translation: %f, %f, %f",
+ poseTranslation[0], poseTranslation[1], poseTranslation[2]);
+
+ return true;
+}
+
+bool QCamera3HardwareInterface::parseStringArray(const char *str, float *dest, int count) {
+ size_t idx = 0;
+ size_t len = strlen(str);
+ for (; idx < len; idx++) {
+ if (str[idx] == '[') break;
+ }
+ const char *startParam = str + idx + 1;
+ if (startParam >= str + len) {
+ ALOGE("Malformed array: %s", str);
+ return false;
+ }
+ char *endParam = nullptr;
+ for (int i = 0; i < count; i++) {
+ dest[i] = strtod(startParam, &endParam);
+ if (startParam == endParam) {
+ ALOGE("Malformed array, index %d: %s", i, str);
+ return false;
+ }
+ startParam = endParam + 1;
+ if (startParam >= str + len) {
+ ALOGE("Malformed array, index %d: %s", i, str);
+ return false;
+ }
+ }
+ return true;
+}
ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
mParent(parent) {}
diff --git a/msm8998/QCamera2/HAL3/QCamera3HWI.h b/msm8998/QCamera2/HAL3/QCamera3HWI.h
index 4eaf8a8..d5cbfbf 100644
--- a/msm8998/QCamera2/HAL3/QCamera3HWI.h
+++ b/msm8998/QCamera2/HAL3/QCamera3HWI.h
@@ -47,6 +47,7 @@
#include "QCameraCommon.h"
#include "QCamera3VendorTags.h"
#include "QCameraDualCamSettings.h"
+#include "QCamera3HdrPlusListenerThread.h"
#include "EaselManagerClient.h"
#include "HdrPlusClient.h"
@@ -484,6 +485,7 @@
int32_t getReprocessibleOutputStreamId(uint32_t &id);
int32_t handleCameraDeviceError(bool stopChannelImmediately = false);
+ bool isEISEnabled(const CameraMetadata& meta);
bool isOnEncoder(const cam_dimension_t max_viewfinder_size,
uint32_t width, uint32_t height);
void hdrPlusPerfLock(mm_camera_super_buf_t *metadata_buf);
@@ -543,6 +545,7 @@
metadata_buffer_t* mParameters;
metadata_buffer_t* mPrevParameters;
CameraMetadata mCurJpegMeta;
+ cam_is_type_t m_ISTypeVideo;
bool m_bIsVideo;
bool m_bIs4KVideo;
bool m_bEisSupportedSize;
@@ -596,6 +599,7 @@
uint8_t capture_intent;
uint8_t fwkCacMode;
uint8_t hybrid_ae_enable;
+ uint8_t motion_detection_enable;
/* DevCamDebug metadata PendingRequestInfo */
uint8_t DevCamDebug_meta_enable;
/* DevCamDebug metadata end */
@@ -609,6 +613,7 @@
uint8_t requestedLensShadingMapMode; // Lens shading map mode for this request.
uint8_t requestedFaceDetectMode; // Face detect mode for this request.
bool partialResultDropped; // Whether partial metadata is dropped.
+ uint8_t requestedOisDataMode; // OIS data mode for this request.
} PendingRequestInfo;
typedef struct {
uint32_t frame_number;
@@ -700,6 +705,8 @@
uint8_t mLastRequestedLensShadingMapMode;
// Last face detect mode framework requsted.
uint8_t mLastRequestedFaceDetectMode;
+ // Last OIS data mode framework requested.
+ uint8_t mLastRequestedOisDataMode;
cam_feature_mask_t mCurrFeatureState;
/* Ldaf calibration data */
@@ -850,6 +857,9 @@
// Easel manager client callbacks.
void onEaselFatalError(std::string errMsg);
+ // Clean up and wait for Easel error future.
+ void cleanupEaselErrorFuture();
+
// HDR+ client callbacks.
void onOpened(std::unique_ptr<HdrPlusClient> client) override;
void onOpenFailed(status_t err) override;
@@ -893,7 +903,23 @@
int32_t mSceneDistance;
+ std::mutex mEaselErrorFutureLock;
std::future<void> mEaselErrorFuture;
+
+ // Thread to handle callbacks from HDR+ client. Protected by gHdrPlusClientLock.
+ sp<QCamera3HdrPlusListenerThread> mQCamera3HdrPlusListenerThread;
+
+ // Read sensor calibration XML file for lens calibration fields. On failure to read
+ // the file, leaves passed-in values unchanged and returns false.
+ static bool readSensorCalibration(int activeArrayWidth,
+ float poseRotation[4], float poseTranslation[3],
+ float cameraIntrinsics[5], float radialDistortion[6]);
+
+ // Parse a string of form " [ x; y; z ...]" into a floating-point array.
+ // Returns false on parse error
+ static bool parseStringArray(const char *str, float *dest, int count);
+
+ float mLastFocusDistance;
};
}; // namespace qcamera
diff --git a/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.cpp b/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.cpp
new file mode 100644
index 0000000..355d970
--- /dev/null
+++ b/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.cpp
@@ -0,0 +1,327 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "QCamera3HdrPlusListenerThread.h"
+
+
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using namespace android;
+
+namespace qcamera {
+
+QCamera3HdrPlusListenerThread::QCamera3HdrPlusListenerThread(
+ HdrPlusClientListener *listener) : mListener(listener), mExitRequested(false),
+ mFatalError(false)
+{
+}
+
+QCamera3HdrPlusListenerThread::~QCamera3HdrPlusListenerThread()
+{
+ requestExit();
+}
+
+void QCamera3HdrPlusListenerThread::onOpened(std::unique_ptr<HdrPlusClient> client)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mClient != nullptr) {
+ ALOGW("%s: An old client exists and will be destroyed.", __FUNCTION__);
+ }
+ mClient = std::move(client);
+ mPendingCallbacks.push(CALLBACK_TYPE_OPENED);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onOpenFailed(status_t err)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mOpenError != OK) {
+ ALOGW("%s: An old open failure exists and will be ignored: %s (%d)", __FUNCTION__,
+ strerror(-mOpenError), mOpenError);
+ }
+ mOpenError = err;
+ mPendingCallbacks.push(CALLBACK_TYPE_OPENFAILED);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onFatalError()
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mFatalError) {
+ ALOGW("%s: An old fatal failure exists.", __FUNCTION__);
+ }
+ mFatalError = true;
+ mPendingCallbacks.push(CALLBACK_TYPE_FATAL_ERROR);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onCaptureResult(pbcamera::CaptureResult *result,
+ const camera_metadata_t &resultMetadata)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+
+ PendingResult pendingResult = {};
+ pendingResult.result = *result;
+ pendingResult.metadata = clone_camera_metadata(&resultMetadata);
+ pendingResult.isFailed = false;
+ mResults.push(pendingResult);
+
+ mPendingCallbacks.push(CALLBACK_TYPE_CAPTURE_RESULT);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+
+ PendingResult result = {};
+ result.result = *failedResult;
+ result.metadata = nullptr;
+ result.isFailed = true;
+ mResults.push(result);
+
+ mPendingCallbacks.push(CALLBACK_TYPE_CAPTURE_RESULT);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+
+ std::pair<uint32_t, int64_t> shutter(requestId, apSensorTimestampNs);
+ mShutters.push(shutter);
+
+ mPendingCallbacks.push(CALLBACK_TYPE_SHUTTER);
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::onNextCaptureReady(uint32_t requestId)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ mNextCaptureReadyIds.push(requestId);
+
+ mPendingCallbacks.push(CALLBACK_TYPE_NEXT_CAPTURE_READY);
+ mCallbackCond.notify_one();
+
+}
+
+void QCamera3HdrPlusListenerThread::onPostview(uint32_t requestId,
+ std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
+ uint32_t stride, int32_t format)
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+
+ PendingPostview pendingPostview = {};
+ pendingPostview.requestId = requestId;
+ pendingPostview.postview = std::move(postview);
+ pendingPostview.width = width;
+ pendingPostview.height = height;
+ pendingPostview.stride = stride;
+ pendingPostview.format = format;
+ mPostviews.push(std::move(pendingPostview));
+
+ mPendingCallbacks.push(CALLBACK_TYPE_POSTVIEW);
+ mCallbackCond.notify_one();
+
+}
+
+void QCamera3HdrPlusListenerThread::requestExit()
+{
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ mExitRequested = true;
+ mCallbackCond.notify_one();
+}
+
+void QCamera3HdrPlusListenerThread::handleFatalError()
+{
+ bool fatalError;
+ {
+ std::unique_lock<std::mutex> lock(mCallbackLock);
+ if (!mFatalError) {
+ ALOGW("%s: There is no fatal error.", __FUNCTION__);
+ return;
+ }
+
+ fatalError = mFatalError;
+ }
+
+ mListener->onFatalError();
+}
+
+void QCamera3HdrPlusListenerThread::handlePendingClient()
+{
+ std::unique_ptr<HdrPlusClient> client;
+ {
+ std::unique_lock<std::mutex> lock(mCallbackLock);
+ if (mClient == nullptr) {
+ ALOGW("%s: There is no pending client.", __FUNCTION__);
+ return;
+ }
+
+ client = std::move(mClient);
+ }
+
+ mListener->onOpened(std::move(client));
+}
+
+void QCamera3HdrPlusListenerThread::handleOpenError()
+{
+ status_t err = OK;
+ {
+ std::unique_lock<std::mutex> lock(mCallbackLock);
+ if (mOpenError == OK) {
+ ALOGW("%s: There is no pending open failure.", __FUNCTION__);
+ return;
+ }
+
+ err = mOpenError;
+ mOpenError = OK;
+ }
+
+ mListener->onOpenFailed(err);
+}
+
+void QCamera3HdrPlusListenerThread::handleNextCaptureReady()
+{
+ uint32_t requestId = 0;
+ {
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mNextCaptureReadyIds.size() == 0) {
+ ALOGW("%s: There is no NextCaptureReady.", __FUNCTION__);
+ return;
+ }
+ requestId = mNextCaptureReadyIds.front();
+ mNextCaptureReadyIds.pop();
+ }
+ mListener->onNextCaptureReady(requestId);
+}
+
+void QCamera3HdrPlusListenerThread::handleCaptureResult()
+{
+ PendingResult result = {};
+ {
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mResults.size() == 0) {
+ ALOGW("%s: There is no capture result.", __FUNCTION__);
+ return;
+ }
+ result = mResults.front();
+ mResults.pop();
+ }
+
+ if (result.isFailed) {
+ mListener->onFailedCaptureResult(&result.result);
+ } else {
+ mListener->onCaptureResult(&result.result, *result.metadata);
+ }
+}
+
+void QCamera3HdrPlusListenerThread::handleShutter()
+{
+ uint32_t requestId;
+ int64_t apSensorTimestampNs;
+
+ {
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mShutters.size() == 0) {
+ ALOGW("%s: There is no shutter.", __FUNCTION__);
+ return;;
+ }
+
+ auto shutter = mShutters.front();
+ requestId = shutter.first;
+ apSensorTimestampNs = shutter.second;
+ mShutters.pop();
+ }
+
+ mListener->onShutter(requestId, apSensorTimestampNs);
+}
+
+void QCamera3HdrPlusListenerThread::handlePostview()
+{
+ PendingPostview postview = {};
+
+ {
+ std::unique_lock<std::mutex> l(mCallbackLock);
+ if (mPostviews.size() == 0) {
+ ALOGW("%s: There is no postview.", __FUNCTION__);
+ return;;
+ }
+
+ postview = std::move(mPostviews.front());
+ mPostviews.pop();
+ }
+
+ mListener->onPostview(postview.requestId, std::move(postview.postview), postview.width,
+ postview.height, postview.stride, postview.format);
+}
+
+bool QCamera3HdrPlusListenerThread::threadLoop()
+{
+ if (mListener == nullptr) {
+ ALOGE("%s: mListener is nullptr.", __FUNCTION__);
+ return false;
+ }
+
+ while (1) {
+ CallbackType nextCallback;
+
+ {
+ std::unique_lock<std::mutex> lock(mCallbackLock);
+ if (!mExitRequested && mPendingCallbacks.size() == 0) {
+ mCallbackCond.wait(lock,
+ [&] { return mExitRequested || mPendingCallbacks.size() > 0; });
+ }
+
+ if (mExitRequested) {
+ return false;
+ } else {
+ nextCallback = mPendingCallbacks.front();
+ mPendingCallbacks.pop();
+ }
+ }
+
+ switch (nextCallback) {
+ case CALLBACK_TYPE_OPENED:
+ handlePendingClient();
+ break;
+ case CALLBACK_TYPE_OPENFAILED:
+ handleOpenError();
+ break;
+ case CALLBACK_TYPE_FATAL_ERROR:
+ handleFatalError();
+ break;
+ case CALLBACK_TYPE_CAPTURE_RESULT:
+ handleCaptureResult();
+ break;
+ case CALLBACK_TYPE_SHUTTER:
+ handleShutter();
+ break;
+ case CALLBACK_TYPE_NEXT_CAPTURE_READY:
+ handleNextCaptureReady();
+ break;
+ case CALLBACK_TYPE_POSTVIEW:
+ handlePostview();
+ break;
+ default:
+ ALOGE("%s: Unknown callback type %d", __FUNCTION__, nextCallback);
+ break;
+ }
+ }
+
+ return false;
+}
+
+}; // namespace qcamera
diff --git a/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.h b/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.h
new file mode 100644
index 0000000..e64de36
--- /dev/null
+++ b/msm8998/QCamera2/HAL3/QCamera3HdrPlusListenerThread.h
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef __HDRPLUSCLIENTLISTENERHANDLERTHREAD__
+#define __HDRPLUSCLIENTLISTENERHANDLERTHREAD__
+
+// System dependencies
+#include <utils/Thread.h>
+#include <queue>
+
+#include "EaselManagerClient.h"
+#include "HdrPlusClient.h"
+
+using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
+using namespace android;
+
+namespace qcamera {
+
+/*
+ * A thread to handle callbacks from HDR+ client. When a callback from HDR+ client is invoked,
+ * HDR+ client callback thread will return and the threadloop of QCamera3HdrPlusListenerThread
+ * will call the callback handlers in QCamera3HWI, to avoid deadlock in HDR+ client callback thread.
+ */
+class QCamera3HdrPlusListenerThread : public HdrPlusClientListener, public Thread
+{
+public:
+ // listener is an HdrPlusClientListener to forward the callbacks in the thread loop.
+ QCamera3HdrPlusListenerThread(HdrPlusClientListener *listener);
+ virtual ~QCamera3HdrPlusListenerThread();
+
+ // Request the thread to exit.
+ void requestExit() override;
+
+private:
+ // HDR+ client callbacks.
+ void onOpened(std::unique_ptr<HdrPlusClient> client) override;
+ void onOpenFailed(status_t err) override;
+ void onFatalError() override;
+ void onCaptureResult(pbcamera::CaptureResult *result,
+ const camera_metadata_t &resultMetadata) override;
+ void onFailedCaptureResult(pbcamera::CaptureResult *failedResult) override;
+ void onShutter(uint32_t requestId, int64_t apSensorTimestampNs) override;
+ void onNextCaptureReady(uint32_t requestId) override;
+ void onPostview(uint32_t requestId, std::unique_ptr<std::vector<uint8_t>> postview,
+ uint32_t width, uint32_t height, uint32_t stride, int32_t format) override;
+
+ bool threadLoop() override;
+
+ // The following functions handle the pending callbacks by calling the callback handlers
+ // in QCamera3HWI.
+ bool hasPendingEventsLocked();
+ void handlePendingClient();
+ void handleNextCaptureReady();
+ void handleCaptureResult();
+ void handleFatalError();
+ void handleOpenError();
+ void handleShutter();
+ void handlePostview();
+
+ struct PendingResult {
+ pbcamera::CaptureResult result;
+ camera_metadata_t *metadata;
+ bool isFailed;
+ };
+
+ struct PendingPostview {
+ uint32_t requestId;
+ std::unique_ptr<std::vector<uint8_t>> postview;
+ uint32_t width;
+ uint32_t height;
+ uint32_t stride;
+ int32_t format;
+ };
+
+ enum CallbackType {
+ CALLBACK_TYPE_OPENED = 0,
+ CALLBACK_TYPE_OPENFAILED,
+ CALLBACK_TYPE_FATAL_ERROR,
+ CALLBACK_TYPE_CAPTURE_RESULT,
+ CALLBACK_TYPE_SHUTTER,
+ CALLBACK_TYPE_NEXT_CAPTURE_READY,
+ CALLBACK_TYPE_POSTVIEW,
+ };
+
+ HdrPlusClientListener *mListener;
+
+ std::mutex mCallbackLock;
+
+ // Condition for a new callback. Protected by mCallbackLock.
+ std::condition_variable mCallbackCond;
+ // If exit has been requested. Protected by mCallbackLock.
+ bool mExitRequested;
+
+ // The following variables store pending callbacks. Protected by mCallbackLock.
+ std::unique_ptr<HdrPlusClient> mClient;
+ std::queue<uint32_t> mNextCaptureReadyIds;
+ std::queue<PendingResult> mResults;
+ bool mFatalError;
+ status_t mOpenError;
+ std::queue<std::pair<uint32_t, int64_t>> mShutters;
+ std::queue<PendingPostview> mPostviews;
+
+ // A queue of pending callback types, in the same order as invoked by HDR+ client.
+ // Protected by mCallbackLock.
+ std::queue<CallbackType> mPendingCallbacks;
+};
+
+}; // namespace qcamera
+
+#endif /* __HDRPLUSCLIENTLISTENERHANDLERTHREAD__ */
\ No newline at end of file
diff --git a/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp b/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
index 82925aa..fc0d8fb 100644
--- a/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3PostProc.cpp
@@ -2173,6 +2173,7 @@
LOGE("no mem for qcamera_hal3_pp_data_t");
ret = -1;
} else if (meta_buffer == NULL) {
+ pme->m_parent->postprocFail(pp_buffer);
LOGE("failed to dequeue from m_inputMetaQ");
ret = -1;
} else if (pp_buffer == NULL) {
diff --git a/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp b/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
index 6db2cbe..45902ad 100644
--- a/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
+++ b/msm8998/QCamera2/HAL3/QCamera3VendorTags.cpp
@@ -234,8 +234,8 @@
vendor_tag_info_t
qcamera3_instant_aec[QCAMERA3_INSTANT_AEC_END -
QCAMERA3_INSTANT_AEC_START] = {
- { "instant_aec_mode", TYPE_INT32 },
- { "instant_aec_available_modes", TYPE_INT32 }
+ { "instant_aec_mode", TYPE_BYTE },
+ { "instant_aec_available_modes", TYPE_BYTE }
};
vendor_tag_info_t nexus_experimental_2016[NEXUS_EXPERIMENTAL_2016_END -
@@ -353,6 +353,10 @@
{ "sensorEepromPDAFRightGains", TYPE_BYTE },
{ "sensorEepromPDAFLeftGains", TYPE_BYTE },
{ "sensorEepromPDAFConvCoeff", TYPE_BYTE },
+ { "sensorEepromWbNumLights", TYPE_INT32 },
+ { "sensorEepromWbRGRatios", TYPE_FLOAT },
+ { "sensorEepromWbBGRatios", TYPE_FLOAT },
+ { "sensorEepromWbGrGbRatio", TYPE_FLOAT },
{ "control.tracking_af_trigger", TYPE_BYTE },
{ "control.af_regions_confidence", TYPE_INT32 },
{ "stats.ois_frame_timestamp_vsync", TYPE_INT64 },
@@ -373,6 +377,10 @@
{ "request.continuous_zsl_capture", TYPE_INT32},
{ "request.disable_hdrplus", TYPE_INT32},
{ "control.scene_distance", TYPE_INT32},
+ { "stats.motion_detection_enable", TYPE_BYTE},
+ { "stats.camera_motion_x", TYPE_FLOAT},
+ { "stats.camera_motion_y", TYPE_FLOAT},
+ { "stats.subject_motion", TYPE_FLOAT},
};
vendor_tag_info_t tango_mode_data[TANGO_MODE_DATA_END -
@@ -602,6 +610,10 @@
(uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
(uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
(uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
(uint32_t)NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
(uint32_t)NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
(uint32_t)NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
@@ -622,7 +634,10 @@
(uint32_t)NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE,
(uint32_t)NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS,
(uint32_t)NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
-
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
+ (uint32_t)NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
//TANGO_MODE
(uint32_t)TANGO_MODE_DATA_SENSOR_FULLFOV,
};
diff --git a/msm8998/QCamera2/HAL3/QCamera3VendorTags.h b/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
index e940ff3..c547774 100644
--- a/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
+++ b/msm8998/QCamera2/HAL3/QCamera3VendorTags.h
@@ -433,6 +433,12 @@
NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
+ /* EEPROM WB calibration data */
+ NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_NUM_LIGHTS,
+ NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_R_OVER_G_RATIOS,
+ NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_B_OVER_G_RATIOS,
+ NEXUS_EXPERIMENTAL_2017_EEPROM_WB_CALIB_GR_OVER_GB_RATIO,
+
/* Tracking AF */
NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
@@ -459,6 +465,12 @@
NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS,
NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
+ /* Motion detection */
+ NEXUS_EXPERIMENTAL_2017_MOTION_DETECTION_ENABLE,
+ NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_X,
+ NEXUS_EXPERIMENTAL_2017_CAMERA_MOTION_Y,
+ NEXUS_EXPERIMENTAL_2017_SUBJECT_MOTION,
+
NEXUS_EXPERIMENTAL_2017_END,
/* Select sensor mode for tango */
diff --git a/msm8998/QCamera2/stack/common/cam_intf.h b/msm8998/QCamera2/stack/common/cam_intf.h
index f722915..bd685a5 100644
--- a/msm8998/QCamera2/stack/common/cam_intf.h
+++ b/msm8998/QCamera2/stack/common/cam_intf.h
@@ -654,6 +654,10 @@
/*PDAF calibration data*/
cam_pd_calibration_t pdaf_cal;
+
+ /*White balance calibration data*/
+ cam_wb_calibration_t wb_cal;
+
} cam_capability_t;
typedef enum {
@@ -1146,6 +1150,7 @@
INCLUDE(CAM_INTF_PARM_FOV_COMP_ENABLE, int32_t, 1);
INCLUDE(CAM_INTF_META_LED_CALIB_RESULT, int32_t, 1);
INCLUDE(CAM_INTF_META_HYBRID_AE, uint8_t, 1);
+ INCLUDE(CAM_INTF_META_MOTION_DETECTION_ENABLE, uint8_t, 1);
INCLUDE(CAM_INTF_META_AF_SCENE_CHANGE, uint8_t, 1);
/* DevCamDebug metadata CAM_INTF.H */
INCLUDE(CAM_INTF_META_DEV_CAM_ENABLE, uint8_t, 1);
diff --git a/msm8998/QCamera2/stack/common/cam_types.h b/msm8998/QCamera2/stack/common/cam_types.h
index 82c5f2c..ecdf509 100644
--- a/msm8998/QCamera2/stack/common/cam_types.h
+++ b/msm8998/QCamera2/stack/common/cam_types.h
@@ -53,6 +53,8 @@
#define MAX_PDAF_CALIB_GAINS (25*19)
#define MAX_PDAF_CALIB_COEFF (200)
+#define MAX_WB_CALIB_LIGHTS (16)
+
#define CEILING64(X) (((X) + 0x0003F) & 0xFFFFFFC0)
#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
@@ -629,6 +631,13 @@
} cam_pd_calibration_t;
typedef struct {
+ int32_t num_lights;
+ float r_over_g[MAX_WB_CALIB_LIGHTS];
+ float b_over_g[MAX_WB_CALIB_LIGHTS];
+ float gr_over_gb;
+} cam_wb_calibration_t;
+
+typedef struct {
cam_frame_len_offset_t plane_info;
} cam_stream_buf_plane_info_t;
@@ -2464,6 +2473,8 @@
CAM_INTF_META_LED_CALIB_RESULT,
/* Whether to enable hybrid ae mode */
CAM_INTF_META_HYBRID_AE,
+ /* Whether to enable motion detection */
+ CAM_INTF_META_MOTION_DETECTION_ENABLE,
/* DevCamDebug metadata CAM_TYPES.h */
CAM_INTF_META_DEV_CAM_ENABLE,
/* DevCamDebug metadata CAM_TYPES.h AF */
@@ -2623,6 +2634,8 @@
CAM_INTENT_VIDEO_RECORD,
CAM_INTENT_VIDEO_SNAPSHOT,
CAM_INTENT_ZERO_SHUTTER_LAG,
+ CAM_INTENT_MANUAL,
+ CAM_INTENT_MOTION_TRACKING,
CAM_INTENT_MAX,
} cam_intent_t;
diff --git a/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c b/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
index 6f82ff5..deebb25 100644
--- a/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
+++ b/msm8998/QCamera2/stack/mm-jpeg-interface/src/mm_jpeg_exif.c
@@ -572,18 +572,20 @@
LOGE("Cannot extract SENSOR_SENSITIVITY value");
}
+ int32_t ispSensitivity = 100;
IF_META_AVAILABLE(int32_t, isp_iso, CAM_INTF_META_ISP_SENSITIVITY, p_meta) {
- p_3a_params.iso_value= p_3a_params.iso_value * (*isp_iso) / 100;
+ ispSensitivity = *isp_iso;
} else {
LOGE("Cannot extract ISP_SENSITIVITY value");
}
IF_META_AVAILABLE(float, post_stats_iso, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, p_meta) {
- p_3a_params.iso_value= p_3a_params.iso_value * (*post_stats_iso);
+ ispSensitivity *= *post_stats_iso;
} else {
/* CAM_INTF_META_ISP_POST_STATS_SENSITIVITY is optional */
LOGD("Cannot extract ISP_POST_STATS_SENSITIVITY value");
}
+ p_3a_params.iso_value= p_3a_params.iso_value * ispSensitivity / 100;
IF_META_AVAILABLE(int64_t, sensor_exposure_time,
CAM_INTF_META_SENSOR_EXPOSURE_TIME, p_meta) {
diff --git a/msm8998/QCamera2/util/QCameraPerf.cpp b/msm8998/QCamera2/util/QCameraPerf.cpp
index 615af8f..fbdac42 100644
--- a/msm8998/QCamera2/util/QCameraPerf.cpp
+++ b/msm8998/QCamera2/util/QCameraPerf.cpp
@@ -49,6 +49,55 @@
namespace qcamera {
+using android::hidl::base::V1_0::IBase;
+using android::hardware::hidl_death_recipient;
+
+static std::mutex gPowerHalMutex;
+static sp<IPower> gPowerHal = nullptr;
+static void getPowerHalLocked();
+
+// struct PowerHalDeathRecipient;
+struct PowerHalDeathRecipient : virtual public hidl_death_recipient {
+ // hidl_death_recipient interface
+ virtual void serviceDied(uint64_t, const wp<IBase>&) override {
+ std::lock_guard<std::mutex> lock(gPowerHalMutex);
+ ALOGE("PowerHAL just died");
+ gPowerHal = nullptr;
+ getPowerHalLocked();
+ }
+};
+
+sp<PowerHalDeathRecipient> gPowerHalDeathRecipient = nullptr;
+
+// The caller must be holding gPowerHalMutex.
+static void getPowerHalLocked() {
+ if (gPowerHal != nullptr) {
+ return;
+ }
+
+ gPowerHal = IPower::getService();
+
+ if (gPowerHal == nullptr) {
+ ALOGE("Unable to get Power service.");
+ } else {
+ if (gPowerHalDeathRecipient == nullptr) {
+ gPowerHalDeathRecipient = new PowerHalDeathRecipient();
+ }
+ hardware::Return<bool> linked = gPowerHal->linkToDeath(
+ gPowerHalDeathRecipient, 0x451F /* cookie */);
+ if (!linked.isOk()) {
+ ALOGE("Transaction error in linking to PowerHAL death: %s",
+ linked.description().c_str());
+ gPowerHal = nullptr;
+ } else if (!linked) {
+ ALOGW("Unable to link to PowerHal death notifications");
+ gPowerHal = nullptr;
+ } else {
+ ALOGD("Link to death notification successful");
+ }
+ }
+}
+
typedef enum {
MPCTLV3_MIN_FREQ_CLUSTER_BIG_CORE_0 = 0x40800000,
MPCTLV3_MIN_FREQ_CLUSTER_BIG_CORE_1 = 0x40800010,
@@ -281,11 +330,11 @@
void QCameraPerfLockMgr::powerHintInternal(
PerfLockEnum perfLockType,
PowerHint powerHint,
- bool enable)
+ int32_t time_out)
{
if ((mState == LOCK_MGR_STATE_READY) &&
isValidPerfLockEnum(perfLockType)) {
- mPerfLock[perfLockType]->powerHintInternal(powerHint, enable);
+ mPerfLock[perfLockType]->powerHintInternal(powerHint, time_out);
}
}
@@ -421,10 +470,24 @@
bool ret = true;
Mutex::Autolock lock(mMutex);
- if ((mPerfLockType == PERF_LOCK_POWERHINT_PREVIEW) ||
- (mPerfLockType == PERF_LOCK_POWERHINT_ENCODE)) {
- powerHintInternal(PowerHint::VIDEO_ENCODE, true);
- return true;
+ switch (mPerfLockType) {
+ case PERF_LOCK_POWERHINT_PREVIEW:
+ case PERF_LOCK_POWERHINT_ENCODE:
+ powerHintInternal(PowerHint::VIDEO_ENCODE, true);
+ return true;
+ case PERF_LOCK_OPEN_CAMERA:
+ case PERF_LOCK_CLOSE_CAMERA:
+ powerHintInternal(PowerHint::CAMERA_LAUNCH, timer);
+ return true;
+ case PERF_LOCK_START_PREVIEW:
+ powerHintInternal(PowerHint::CAMERA_STREAMING, timer);
+ return true;
+ case PERF_LOCK_TAKE_SNAPSHOT:
+ powerHintInternal(PowerHint::CAMERA_SHOT, timer);
+ return true;
+ default:
+ LOGE("Unknown powerhint %d",(int)mPerfLockType);
+ return false;
}
if (!mIsPerfdEnabled) return ret;
@@ -473,10 +536,24 @@
bool ret = true;
Mutex::Autolock lock(mMutex);
- if ((mPerfLockType == PERF_LOCK_POWERHINT_PREVIEW) ||
- (mPerfLockType == PERF_LOCK_POWERHINT_ENCODE)) {
- powerHintInternal(PowerHint::VIDEO_ENCODE, false);
- return true;
+ switch (mPerfLockType) {
+ case PERF_LOCK_POWERHINT_PREVIEW:
+ case PERF_LOCK_POWERHINT_ENCODE:
+ powerHintInternal(PowerHint::VIDEO_ENCODE, false);
+ return true;
+ case PERF_LOCK_OPEN_CAMERA:
+ case PERF_LOCK_CLOSE_CAMERA:
+ powerHintInternal(PowerHint::CAMERA_LAUNCH, false);
+ return true;
+ case PERF_LOCK_START_PREVIEW:
+ powerHintInternal(PowerHint::CAMERA_STREAMING, false);
+ return true;
+ case PERF_LOCK_TAKE_SNAPSHOT:
+ powerHintInternal(PowerHint::CAMERA_SHOT, false);
+ return true;
+ default:
+ LOGE("Unknown powerhint %d",(int)mPerfLockType);
+ return false;
}
if (!mIsPerfdEnabled) return ret;
@@ -519,10 +596,10 @@
*==========================================================================*/
void QCameraPerfLock::powerHintInternal(
PowerHint powerHint,
- bool enable)
+ int32_t time_out)
{
#ifdef HAS_MULTIMEDIA_HINTS
- if (!mPerfLockIntf->powerHint(powerHint, enable)) {
+ if (!mPerfLockIntf->powerHint(powerHint, time_out)) {
LOGE("Send powerhint to PowerHal failed");
}
#endif
@@ -559,8 +636,9 @@
mInstance = new QCameraPerfLockIntf();
if (mInstance) {
#ifdef HAS_MULTIMEDIA_HINTS
- mInstance->mPowerHal = IPower::getService();
- if (mInstance->mPowerHal == nullptr) {
+ std::lock_guard<std::mutex> lock(gPowerHalMutex);
+ getPowerHalLocked();
+ if (gPowerHal == nullptr) {
ALOGE("Couldn't load PowerHAL module");
}
else
@@ -581,6 +659,13 @@
error = false;
} else {
LOGE("Failed to link the symbols- perf_lock_acq, perf_lock_rel");
+ bool IsPerfdEnabled = android::base::GetBoolProperty("persist.camera.perfd.enable", false);
+ if (!IsPerfdEnabled) {
+ mInstance->mDlHandle = nullptr;
+ mInstance->mPerfLockAcq = nullptr;
+ mInstance->mPerfLockRel = nullptr;
+ error = false;
+ }
}
} else {
LOGE("Unable to load lib: %s", value);
@@ -640,4 +725,18 @@
}
}
+bool QCameraPerfLockIntf::powerHint(PowerHint hint, int32_t data) {
+ std::lock_guard<std::mutex> lock(gPowerHalMutex);
+ getPowerHalLocked();
+ if (gPowerHal == nullptr) {
+ ALOGE("Couldn't do powerHint because of HAL error.");
+ return false;
+ }
+ auto ret = gPowerHal->powerHintAsync_1_2(hint, data);
+ if (!ret.isOk()) {
+ ALOGE("powerHint failed error: %s", ret.description().c_str());
+ }
+ return ret.isOk();
+}
+
}; // namespace qcamera
diff --git a/msm8998/QCamera2/util/QCameraPerf.h b/msm8998/QCamera2/util/QCameraPerf.h
index d5b2cc2..7385196 100644
--- a/msm8998/QCamera2/util/QCameraPerf.h
+++ b/msm8998/QCamera2/util/QCameraPerf.h
@@ -34,11 +34,11 @@
#include <utils/Mutex.h>
// Camera dependencies
-#include <android/hardware/power/1.1/IPower.h>
+#include <android/hardware/power/1.2/IPower.h>
using namespace android;
-using android::hardware::power::V1_1::IPower;
-using android::hardware::power::V1_0::PowerHint;
+using android::hardware::power::V1_2::IPower;
+using android::hardware::power::V1_2::PowerHint;
using ::android::hardware::Return;
using ::android::hardware::Void;
@@ -86,7 +86,7 @@
bool releasePerfLock();
bool acquirePerfLock(bool forceReacquirePerfLock,
uint32_t timer = DEFAULT_PERF_LOCK_TIMEOUT_MS);
- void powerHintInternal(PowerHint powerHint, bool enable);
+ void powerHintInternal(PowerHint powerHint, int32_t time_out);
protected:
QCameraPerfLock(PerfLockEnum perfLockType, QCameraPerfLockIntf *perfLockIntf);
@@ -115,7 +115,6 @@
uint32_t mRefCount;
perfLockAcquire mPerfLockAcq;
perfLockRelease mPerfLockRel;
- sp<IPower> mPowerHal;
void *mDlHandle;
protected:
@@ -128,9 +127,7 @@
inline perfLockAcquire perfLockAcq() { return mPerfLockAcq; }
inline perfLockRelease perfLockRel() { return mPerfLockRel; }
- inline bool powerHint(PowerHint hint, int32_t data) {
- return ((mPowerHal == nullptr) ? false : mPowerHal->powerHintAsync(hint, data).isOk());
- }
+ bool powerHint(PowerHint hint, int32_t data);
};
@@ -144,7 +141,7 @@
bool acquirePerfLockIfExpired(PerfLockEnum perfLockRnum,
uint32_t timer = DEFAULT_PERF_LOCK_TIMEOUT_MS);
- void powerHintInternal(PerfLockEnum perfLockType, PowerHint powerHint, bool enable);
+ void powerHintInternal(PerfLockEnum perfLockType, PowerHint powerHint, int32_t time_out);
private:
PerfLockMgrStateEnum mState;