blob: 6a37c650ac4c5148e91d27289c2b883da7c0d009 [file] [log] [blame]
/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#define LOG_TAG "QCamera3HWI"
//#define LOG_NDEBUG 0
#define __STDC_LIMIT_MACROS
#include <cutils/properties.h>
#include <hardware/camera3.h>
#include <camera/CameraMetadata.h>
#include <stdio.h>
#include <stdlib.h>
#include <fcntl.h>
#include <stdint.h>
#include <utils/Log.h>
#include <utils/Errors.h>
#include <ui/Fence.h>
#include <gralloc_priv.h>
#include "QCamera3HWI.h"
#include "QCamera3Mem.h"
#include "QCamera3Channel.h"
#include "QCamera3PostProc.h"
#include "QCamera3VendorTags.h"
using namespace android;
namespace qcamera {
#define MAX(a, b) ((a) > (b) ? (a) : (b))
#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
#define EMPTY_PIPELINE_DELAY 2
#define PARTIAL_RESULT_COUNT 2
#define FRAME_SKIP_DELAY 1
#define VIDEO_4K_WIDTH 3840
#define VIDEO_4K_HEIGHT 2160
#define MAX_RAW_STREAMS 1
#define MAX_STALLING_STREAMS 1
#define MAX_PROCESSED_STREAMS 3
cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
volatile uint32_t gCamHal3LogLevel = 1;
const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
{"On", CAM_CDS_MODE_ON},
{"Off", CAM_CDS_MODE_OFF},
{"Auto",CAM_CDS_MODE_AUTO}
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
{ ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
{ ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
{ ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
{ ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
{ ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
{ ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
{ ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
{ ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
{ ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
{ ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
{ ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
{ ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
{ ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
{ ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
{ ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
{ ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
{ ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
{ ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
{ ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
{ ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
{ ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
{ ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
{ ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
{ ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
{ ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
{ ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
{ ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
{ ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
{ ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
{ ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
{ ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
{ ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
{ ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
{ ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
{ ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
{ ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
{ ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
{ ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
{ ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
{ ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
{ ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
CAM_COLOR_CORRECTION_ABERRATION_OFF },
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
CAM_COLOR_CORRECTION_ABERRATION_FAST },
{ ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
{ ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
{ ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
{ ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
{ ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
{ ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
{ ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
{ ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
{ ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
{ ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
{ ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
CAM_FOCUS_UNCALIBRATED },
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
CAM_FOCUS_APPROXIMATE },
{ ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
CAM_FOCUS_CALIBRATED }
};
const int32_t available_thumbnail_sizes[] = {0, 0,
176, 144,
320, 240,
432, 288,
480, 288,
512, 288,
512, 384};
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
{ ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
{ ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
};
/* Since there is no mapping for all the options some Android enum are not listed.
* Also, the order in this list is important because while mapping from HAL to Android it will
* traverse from lower to higher index which means that for HAL values that are map to different
* Android values, the traverse logic will select the first one found.
*/
const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
{ ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
};
camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
initialize: QCamera3HardwareInterface::initialize,
configure_streams: QCamera3HardwareInterface::configure_streams,
register_stream_buffers: NULL,
construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings,
process_capture_request: QCamera3HardwareInterface::process_capture_request,
get_metadata_vendor_tag_ops: NULL,
dump: QCamera3HardwareInterface::dump,
flush: QCamera3HardwareInterface::flush,
reserved: {0},
};
/*===========================================================================
* FUNCTION : QCamera3HardwareInterface
*
* DESCRIPTION: constructor of QCamera3HardwareInterface
*
* PARAMETERS :
* @cameraId : camera ID
*
* RETURN : none
*==========================================================================*/
QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId,
const camera_module_callbacks_t *callbacks)
: mCameraId(cameraId),
mCameraHandle(NULL),
mCameraOpened(false),
mCameraInitialized(false),
mCallbackOps(NULL),
mInputStream(NULL),
mMetadataChannel(NULL),
mPictureChannel(NULL),
mRawChannel(NULL),
mSupportChannel(NULL),
mRawDumpChannel(NULL),
mFirstRequest(false),
mFlush(false),
mParamHeap(NULL),
mParameters(NULL),
m_bIsVideo(false),
m_bIs4KVideo(false),
mEisEnable(0),
mLoopBackResult(NULL),
mMinProcessedFrameDuration(0),
mMinJpegFrameDuration(0),
mMinRawFrameDuration(0),
m_pPowerModule(NULL),
mMetaFrameCount(0),
mCallbacks(callbacks),
mCaptureIntent(0)
{
getLogLevel();
mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_2;
mCameraDevice.common.close = close_camera_device;
mCameraDevice.ops = &mCameraOps;
mCameraDevice.priv = this;
gCamCapability[cameraId]->version = CAM_HAL_V3;
// TODO: hardcode for now until mctl add support for min_num_pp_bufs
//TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
gCamCapability[cameraId]->min_num_pp_bufs = 3;
pthread_cond_init(&mRequestCond, NULL);
mPendingRequest = 0;
mCurrentRequestId = -1;
pthread_mutex_init(&mMutex, NULL);
for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
mDefaultMetadata[i] = NULL;
#ifdef HAS_MULTIMEDIA_HINTS
if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
}
#endif
char prop[PROPERTY_VALUE_MAX];
property_get("persist.camera.raw.dump", prop, "0");
mEnableRawDump = atoi(prop);
if (mEnableRawDump)
CDBG("%s: Raw dump from Camera HAL enabled", __func__);
}
/*===========================================================================
* FUNCTION : ~QCamera3HardwareInterface
*
* DESCRIPTION: destructor of QCamera3HardwareInterface
*
* PARAMETERS : none
*
* RETURN : none
*==========================================================================*/
QCamera3HardwareInterface::~QCamera3HardwareInterface()
{
CDBG("%s: E", __func__);
/* We need to stop all streams before deleting any stream */
if (mRawDumpChannel) {
mRawDumpChannel->stop();
}
// NOTE: 'camera3_stream_t *' objects are already freed at
// this stage by the framework
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (*it)->channel;
if (channel) {
channel->stop();
}
}
if (mSupportChannel)
mSupportChannel->stop();
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (*it)->channel;
if (channel)
delete channel;
free (*it);
}
if (mSupportChannel) {
delete mSupportChannel;
mSupportChannel = NULL;
}
if (mRawDumpChannel) {
delete mRawDumpChannel;
mRawDumpChannel = NULL;
}
mPictureChannel = NULL;
/* Clean up all channels */
if (mCameraInitialized) {
if (mMetadataChannel) {
mMetadataChannel->stop();
delete mMetadataChannel;
mMetadataChannel = NULL;
}
deinitParameters();
}
if (mCameraOpened)
closeCamera();
mPendingBuffersMap.mPendingBufferList.clear();
mPendingRequestsList.clear();
mPendingReprocessResultList.clear();
for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
if (mDefaultMetadata[i])
free_camera_metadata(mDefaultMetadata[i]);
pthread_cond_destroy(&mRequestCond);
pthread_mutex_destroy(&mMutex);
CDBG("%s: X", __func__);
}
/*===========================================================================
* FUNCTION : camEvtHandle
*
* DESCRIPTION: Function registered to mm-camera-interface to handle events
*
* PARAMETERS :
* @camera_handle : interface layer camera handle
* @evt : ptr to event
* @user_data : user data ptr
*
* RETURN : none
*==========================================================================*/
void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
mm_camera_event_t *evt,
void *user_data)
{
QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
camera3_notify_msg_t notify_msg;
if (obj && evt) {
switch(evt->server_event_type) {
case CAM_EVENT_TYPE_DAEMON_DIED:
ALOGE("%s: Fatal, camera daemon died", __func__);
memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
notify_msg.type = CAMERA3_MSG_ERROR;
notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
notify_msg.message.error.error_stream = NULL;
notify_msg.message.error.frame_number = 0;
obj->mCallbackOps->notify(obj->mCallbackOps, &notify_msg);
break;
default:
CDBG_HIGH("%s: Warning: Unhandled event %d", __func__,
evt->server_event_type);
break;
}
} else {
ALOGE("%s: NULL user_data/evt", __func__);
}
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS :
* @hw_device : double ptr for camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
int rc = 0;
if (mCameraOpened) {
*hw_device = NULL;
return PERMISSION_DENIED;
}
rc = openCamera();
if (rc == 0) {
*hw_device = &mCameraDevice.common;
} else
*hw_device = NULL;
#ifdef HAS_MULTIMEDIA_HINTS
if (rc == 0) {
if (m_pPowerModule) {
if (m_pPowerModule->powerHint) {
m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
(void *)"state=1");
}
}
}
#endif
return rc;
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::openCamera()
{
int rc = 0;
if (mCameraHandle) {
ALOGE("Failure: Camera already opened");
return ALREADY_EXISTS;
}
mCameraHandle = camera_open(mCameraId);
if (!mCameraHandle) {
ALOGE("camera_open failed.");
return UNKNOWN_ERROR;
}
mCameraOpened = true;
rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle, (void *)this);
if (rc < 0) {
ALOGE("%s: Error, failed to register event callback", __func__);
/* Not closing camera here since it is already handled in destructor */
return FAILED_TRANSACTION;
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : closeCamera
*
* DESCRIPTION: close camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera3HardwareInterface::closeCamera()
{
int rc = NO_ERROR;
rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
mCameraHandle = NULL;
mCameraOpened = false;
#ifdef HAS_MULTIMEDIA_HINTS
if (rc == NO_ERROR) {
if (m_pPowerModule) {
if (m_pPowerModule->powerHint) {
m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE,
(void *)"state=0");
}
}
}
#endif
return rc;
}
/*===========================================================================
* FUNCTION : initialize
*
* DESCRIPTION: Initialize frameworks callback functions
*
* PARAMETERS :
* @callback_ops : callback function to frameworks
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::initialize(
const struct camera3_callback_ops *callback_ops)
{
int rc;
pthread_mutex_lock(&mMutex);
rc = initParameters();
if (rc < 0) {
ALOGE("%s: initParamters failed %d", __func__, rc);
goto err1;
}
mCallbackOps = callback_ops;
pthread_mutex_unlock(&mMutex);
mCameraInitialized = true;
return 0;
err1:
pthread_mutex_unlock(&mMutex);
return rc;
}
/*===========================================================================
* FUNCTION : validateStreamDimensions
*
* DESCRIPTION: Check if the configuration requested are those advertised
*
* PARAMETERS :
* @stream_list : streams to be configured
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::validateStreamDimensions(
camera3_stream_configuration_t *streamList)
{
int rc = NO_ERROR;
int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2];
uint8_t jpeg_sizes_cnt = 0;
/*
* Loop through all streams requested in configuration
* Check if unsupported sizes have been requested on any of them
*/
for (size_t j = 0; j < streamList->num_streams; j++){
bool sizeFound = false;
camera3_stream_t *newStream = streamList->streams[j];
/*
* Sizes are different for each type of stream format check against
* appropriate table.
*/
switch (newStream->format) {
case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW10:
for (int i = 0;
i < gCamCapability[mCameraId]->supported_raw_dim_cnt; i++){
if (gCamCapability[mCameraId]->raw_dim[i].width
== (int32_t) newStream->width
&& gCamCapability[mCameraId]->raw_dim[i].height
== (int32_t) newStream->height) {
sizeFound = true;
break;
}
}
break;
case HAL_PIXEL_FORMAT_BLOB:
/* Generate JPEG sizes table */
makeTable(gCamCapability[mCameraId]->picture_sizes_tbl,
gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
available_processed_sizes);
jpeg_sizes_cnt = filterJpegSizes(
available_jpeg_sizes,
available_processed_sizes,
(gCamCapability[mCameraId]->picture_sizes_tbl_cnt) * 2,
MAX_SIZES_CNT * 2,
gCamCapability[mCameraId]->active_array_size,
gCamCapability[mCameraId]->max_downscale_factor);
/* Verify set size against generated sizes table */
for (int i = 0;i < jpeg_sizes_cnt/2; i++) {
if ((int32_t)(newStream->width) == available_jpeg_sizes[i*2] &&
(int32_t)(newStream->height) == available_jpeg_sizes[i*2+1]) {
sizeFound = true;
break;
}
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
default:
/* ZSL stream will be full active array size validate that*/
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
if ((int32_t)(newStream->width) ==
gCamCapability[mCameraId]->active_array_size.width
&& (int32_t)(newStream->height) ==
gCamCapability[mCameraId]->active_array_size.height) {
sizeFound = true;
}
/* We could potentially break here to enforce ZSL stream
* set from frameworks always has full active array size
* but it is not clear from spec if framework will always
* follow that, also we have logic to override to full array
* size, so keeping this logic lenient at the moment.
*/
}
/* Non ZSL stream still need to conform to advertised sizes*/
for (int i = 0;
i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt;i++){
if ((int32_t)(newStream->width) ==
gCamCapability[mCameraId]->picture_sizes_tbl[i].width
&& (int32_t)(newStream->height) ==
gCamCapability[mCameraId]->picture_sizes_tbl[i].height){
sizeFound = true;
break;
}
}
break;
} /* End of switch(newStream->format) */
/* We error out even if a single stream has unsupported size set */
if (!sizeFound) {
ALOGE("%s: Error: Unsupported size of %d x %d requested for stream"
"type:%d", __func__, newStream->width, newStream->height,
newStream->format);
rc = -EINVAL;
break;
}
} /* End of for each stream */
return rc;
}
/*===========================================================================
* FUNCTION : configureStreams
*
* DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
* and output streams.
*
* PARAMETERS :
* @stream_list : streams to be configured
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::configureStreams(
camera3_stream_configuration_t *streamList)
{
int rc = 0;
// Sanity check stream_list
if (streamList == NULL) {
ALOGE("%s: NULL stream configuration", __func__);
return BAD_VALUE;
}
if (streamList->streams == NULL) {
ALOGE("%s: NULL stream list", __func__);
return BAD_VALUE;
}
if (streamList->num_streams < 1) {
ALOGE("%s: Bad number of streams requested: %d", __func__,
streamList->num_streams);
return BAD_VALUE;
}
/* first invalidate all the steams in the mStreamList
* if they appear again, they will be validated */
for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
channel->stop();
(*it)->status = INVALID;
}
if (mRawDumpChannel) {
mRawDumpChannel->stop();
delete mRawDumpChannel;
mRawDumpChannel = NULL;
}
if (mSupportChannel)
mSupportChannel->stop();
if (mMetadataChannel) {
/* If content of mStreamInfo is not 0, there is metadata stream */
mMetadataChannel->stop();
}
pthread_mutex_lock(&mMutex);
/* Check whether we have video stream */
m_bIs4KVideo = false;
m_bIsVideo = false;
bool isZsl = false;
size_t videoWidth = 0;
size_t videoHeight = 0;
size_t rawStreamCnt = 0;
size_t stallStreamCnt = 0;
size_t processedStreamCnt = 0;
// Number of streams on ISP encoder path
size_t numStreamsOnEncoder = 0;
cam_dimension_t maxViewfinderSize;
bool bJpegExceeds4K = false;
maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
CDBG_HIGH("%s: stream[%d] type = %d, format = %d, width = %d, height = %d",
__func__, i, newStream->stream_type, newStream->format,
newStream->width, newStream->height);
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
isZsl = true;
}
if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
if (newStream->width > VIDEO_4K_WIDTH ||
newStream->height > VIDEO_4K_HEIGHT)
bJpegExceeds4K = true;
}
if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
(newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
m_bIsVideo = true;
if ((VIDEO_4K_WIDTH <= newStream->width) &&
(VIDEO_4K_HEIGHT <= newStream->height)) {
videoWidth = newStream->width;
videoHeight = newStream->height;
m_bIs4KVideo = true;
}
}
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
switch (newStream->format) {
case HAL_PIXEL_FORMAT_BLOB:
stallStreamCnt++;
if (newStream->width > (uint32_t)maxViewfinderSize.width ||
newStream->height > (uint32_t)maxViewfinderSize.height)
numStreamsOnEncoder++;
break;
case HAL_PIXEL_FORMAT_RAW10:
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
rawStreamCnt++;
break;
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCbCr_420_888:
default:
processedStreamCnt++;
if (newStream->width > (uint32_t)maxViewfinderSize.width ||
newStream->height > (uint32_t)maxViewfinderSize.height)
numStreamsOnEncoder++;
break;
}
}
}
/* Check if num_streams is sane */
if (stallStreamCnt > MAX_STALLING_STREAMS ||
rawStreamCnt > MAX_RAW_STREAMS ||
processedStreamCnt > MAX_PROCESSED_STREAMS) {
ALOGE("%s: Invalid stream configu: stall: %d, raw: %d, processed %d",
__func__, stallStreamCnt, rawStreamCnt, processedStreamCnt);
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
/* Check whether we have zsl stream or 4k video case */
if (isZsl && m_bIsVideo) {
ALOGE("%s: Currently invalid configuration ZSL&Video!", __func__);
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
/* Check if stream sizes are sane */
if (numStreamsOnEncoder > 2) {
ALOGE("%s: Number of streams on ISP encoder path exceeds limits of 2",
__func__);
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
/* Check if BLOB size is greater than 4k in 4k recording case */
if (m_bIs4KVideo && bJpegExceeds4K) {
ALOGE("%s: HAL doesn't support Blob size greater than 4k in 4k recording",
__func__);
pthread_mutex_unlock(&mMutex);
return -EINVAL;
}
rc = validateStreamDimensions(streamList);
if (rc != NO_ERROR) {
ALOGE("%s: Invalid stream configuration requested!", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
camera3_stream_t *inputStream = NULL;
camera3_stream_t *jpegStream = NULL;
cam_stream_size_info_t stream_config_info;
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
CDBG_HIGH("%s: newStream type = %d, stream format = %d stream size : %d x %d",
__func__, newStream->stream_type, newStream->format,
newStream->width, newStream->height);
//if the stream is in the mStreamList validate it
bool stream_exists = false;
for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
if ((*it)->stream == newStream) {
QCamera3Channel *channel =
(QCamera3Channel*)(*it)->stream->priv;
stream_exists = true;
delete channel;
(*it)->status = VALID;
(*it)->stream->priv = NULL;
(*it)->channel = NULL;
}
}
if (!stream_exists) {
//new stream
stream_info_t* stream_info;
stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
stream_info->stream = newStream;
stream_info->status = VALID;
stream_info->channel = NULL;
mStreamInfo.push_back(stream_info);
}
if (newStream->stream_type == CAMERA3_STREAM_INPUT
|| newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
if (inputStream != NULL) {
ALOGE("%s: Multiple input streams requested!", __func__);
pthread_mutex_unlock(&mMutex);
return BAD_VALUE;
}
inputStream = newStream;
}
if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
jpegStream = newStream;
}
}
mInputStream = inputStream;
cleanAndSortStreamInfo();
if (mMetadataChannel) {
delete mMetadataChannel;
mMetadataChannel = NULL;
}
if (mSupportChannel) {
delete mSupportChannel;
mSupportChannel = NULL;
}
/* get eis information for stream configuration */
cam_is_type_t is_type;
char is_type_value[PROPERTY_VALUE_MAX];
property_get("camera.is_type", is_type_value, "4");
is_type = static_cast<cam_is_type_t>(atoi(is_type_value));
//for camera use case, front camcorder and 4k video, no eis
if (gCamCapability[mCameraId]->position != CAM_POSITION_BACK ||
!m_bIsVideo || m_bIs4KVideo) {
is_type = IS_TYPE_NONE;
}
//Create metadata channel and initialize it
mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info, CAM_QCOM_FEATURE_NONE, is_type, this);
if (mMetadataChannel == NULL) {
ALOGE("%s: failed to allocate metadata channel", __func__);
rc = -ENOMEM;
pthread_mutex_unlock(&mMutex);
return rc;
}
rc = mMetadataChannel->initialize();
if (rc < 0) {
ALOGE("%s: metadata channel initialization failed", __func__);
delete mMetadataChannel;
mMetadataChannel = NULL;
pthread_mutex_unlock(&mMutex);
return rc;
}
/* Create dummy stream if there is one single raw or jpeg stream */
if (streamList->num_streams == 1 &&
(streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW10 ||
streamList->streams[0]->format == HAL_PIXEL_FORMAT_RAW16 ||
streamList->streams[0]->format == HAL_PIXEL_FORMAT_BLOB)) {
mSupportChannel = new QCamera3SupportChannel(
mCameraHandle->camera_handle,
mCameraHandle->ops,
&gCamCapability[mCameraId]->padding_info,
CAM_QCOM_FEATURE_NONE,
is_type,
this);
if (!mSupportChannel) {
ALOGE("%s: dummy channel cannot be created", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
}
bool isRawStreamRequested = false;
/* Allocate channel objects for the requested streams */
for (size_t i = 0; i < streamList->num_streams; i++) {
camera3_stream_t *newStream = streamList->streams[i];
uint32_t stream_usage = newStream->usage;
stream_config_info.stream_sizes[i].width = newStream->width;
stream_config_info.stream_sizes[i].height = newStream->height;
if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED && jpegStream){
//for zsl stream the size is active array size
stream_config_info.stream_sizes[i].width =
gCamCapability[mCameraId]->active_array_size.width;
stream_config_info.stream_sizes[i].height =
gCamCapability[mCameraId]->active_array_size.height;
stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
} else {
//for non zsl streams find out the format
switch (newStream->format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
{
if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
stream_config_info.type[i] = CAM_STREAM_TYPE_VIDEO;
} else {
stream_config_info.type[i] = CAM_STREAM_TYPE_PREVIEW;
}
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
}
break;
case HAL_PIXEL_FORMAT_YCbCr_420_888:
stream_config_info.type[i] = CAM_STREAM_TYPE_CALLBACK;
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
break;
case HAL_PIXEL_FORMAT_BLOB:
stream_config_info.type[i] = CAM_STREAM_TYPE_SNAPSHOT;
if (m_bIsVideo && !isZsl) {
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_PP_SUPERSET;
} else {
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
}
if (isZsl) {
stream_config_info.stream_sizes[i].width =
gCamCapability[mCameraId]->active_array_size.width;
stream_config_info.stream_sizes[i].height =
gCamCapability[mCameraId]->active_array_size.height;
} else if (m_bIs4KVideo) {
stream_config_info.stream_sizes[i].width = videoWidth;
stream_config_info.stream_sizes[i].height = videoHeight;
}
break;
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RAW10:
stream_config_info.type[i] = CAM_STREAM_TYPE_RAW;
isRawStreamRequested = true;
break;
default:
stream_config_info.type[i] = CAM_STREAM_TYPE_DEFAULT;
stream_config_info.postprocess_mask[i] = CAM_QCOM_FEATURE_NONE;
break;
}
}
if (newStream->priv == NULL) {
//New stream, construct channel
switch (newStream->stream_type) {
case CAMERA3_STREAM_INPUT:
newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ;
break;
case CAMERA3_STREAM_BIDIRECTIONAL:
newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ |
GRALLOC_USAGE_HW_CAMERA_WRITE;
break;
case CAMERA3_STREAM_OUTPUT:
/* For video encoding stream, set read/write rarely
* flag so that they may be set to un-cached */
if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
newStream->usage =
(GRALLOC_USAGE_SW_READ_RARELY |
GRALLOC_USAGE_SW_WRITE_RARELY |
GRALLOC_USAGE_HW_CAMERA_WRITE);
else
newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE;
break;
default:
ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type);
break;
}
if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
QCamera3Channel *channel = NULL;
switch (newStream->format) {
case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
case HAL_PIXEL_FORMAT_YCbCr_420_888:
newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers;
channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info,
this,
newStream,
(cam_stream_type_t) stream_config_info.type[i],
stream_config_info.postprocess_mask[i],
is_type);
if (channel == NULL) {
ALOGE("%s: allocation of channel failed", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->priv = channel;
break;
case HAL_PIXEL_FORMAT_RAW_OPAQUE:
case HAL_PIXEL_FORMAT_RAW16:
case HAL_PIXEL_FORMAT_RAW10:
newStream->max_buffers = QCamera3RawChannel::kMaxBuffers;
mRawChannel = new QCamera3RawChannel(
mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info,
this, newStream, CAM_QCOM_FEATURE_NONE,
is_type,
(newStream->format == HAL_PIXEL_FORMAT_RAW16));
if (mRawChannel == NULL) {
ALOGE("%s: allocation of raw channel failed", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->priv = (QCamera3Channel*)mRawChannel;
break;
case HAL_PIXEL_FORMAT_BLOB:
newStream->max_buffers = QCamera3PicChannel::kMaxBuffers;
mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle,
mCameraHandle->ops, captureResultCb,
&gCamCapability[mCameraId]->padding_info, this, newStream,
stream_config_info.postprocess_mask[i],
m_bIs4KVideo, is_type, mMetadataChannel);
if (mPictureChannel == NULL) {
ALOGE("%s: allocation of channel failed", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
newStream->priv = (QCamera3Channel*)mPictureChannel;
break;
default:
ALOGE("%s: not a supported format 0x%x", __func__, newStream->format);
break;
}
}
for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
if ((*it)->stream == newStream) {
(*it)->channel = (QCamera3Channel*) newStream->priv;
break;
}
}
} else {
// Channel already exists for this stream
// Do nothing for now
}
}
if (isZsl) {
mPictureChannel->overrideYuvSize(
gCamCapability[mCameraId]->active_array_size.width,
gCamCapability[mCameraId]->active_array_size.height);
} else if (mPictureChannel && m_bIs4KVideo) {
mPictureChannel->overrideYuvSize(videoWidth, videoHeight);
}
//RAW DUMP channel
if (mEnableRawDump && isRawStreamRequested == false){
cam_dimension_t rawDumpSize;
rawDumpSize = getMaxRawSize(mCameraId);
mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
mCameraHandle->ops,
rawDumpSize,
&gCamCapability[mCameraId]->padding_info,
this, CAM_QCOM_FEATURE_NONE, is_type);
if (!mRawDumpChannel) {
ALOGE("%s: Raw Dump channel cannot be created", __func__);
pthread_mutex_unlock(&mMutex);
return -ENOMEM;
}
}
stream_config_info.num_streams = streamList->num_streams;
if (mSupportChannel) {
stream_config_info.stream_sizes[stream_config_info.num_streams] =
QCamera3SupportChannel::kDim;
stream_config_info.type[stream_config_info.num_streams] =
CAM_STREAM_TYPE_CALLBACK;
stream_config_info.num_streams++;
}
if (mRawDumpChannel) {
cam_dimension_t rawSize;
rawSize = getMaxRawSize(mCameraId);
stream_config_info.stream_sizes[stream_config_info.num_streams] =
rawSize;
stream_config_info.type[stream_config_info.num_streams] =
CAM_STREAM_TYPE_RAW;
stream_config_info.num_streams++;
}
// settings/parameters don't carry over for new configureStreams
int32_t hal_version = CAM_HAL_V3;
memset(mParameters, 0, sizeof(metadata_buffer_t));
AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
sizeof(hal_version), &hal_version);
AddSetParmEntryToBatch(mParameters, CAM_INTF_META_STREAM_INFO,
sizeof(cam_stream_size_info_t), &stream_config_info);
int32_t tintless_value = 1;
AddSetParmEntryToBatch(mParameters,CAM_INTF_PARM_TINTLESS,
sizeof(tintless_value), &tintless_value);
//If EIS is enabled, turn it on for video
int32_t vsMode;
if (gCamCapability[mCameraId]->position == CAM_POSITION_BACK &&
mEisEnable && m_bIsVideo && !m_bIs4KVideo){
vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON;
} else {
vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
}
rc = AddSetParmEntryToBatch(mParameters,
CAM_INTF_PARM_DIS_ENABLE,
sizeof(vsMode), &vsMode);
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
/* Initialize mPendingRequestInfo and mPendnigBuffersMap */
mPendingRequestsList.clear();
mPendingFrameDropList.clear();
// Initialize/Reset the pending buffers list
mPendingBuffersMap.num_buffers = 0;
mPendingBuffersMap.mPendingBufferList.clear();
mPendingReprocessResultList.clear();
mFirstRequest = true;
//Get min frame duration for this streams configuration
deriveMinFrameDuration();
pthread_mutex_unlock(&mMutex);
return rc;
}
/*===========================================================================
* FUNCTION : validateCaptureRequest
*
* DESCRIPTION: validate a capture request from camera service
*
* PARAMETERS :
* @request : request from framework to process
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::validateCaptureRequest(
camera3_capture_request_t *request)
{
ssize_t idx = 0;
const camera3_stream_buffer_t *b;
CameraMetadata meta;
/* Sanity check the request */
if (request == NULL) {
ALOGE("%s: NULL capture request", __func__);
return BAD_VALUE;
}
if (request->settings == NULL && mFirstRequest) {
/*settings cannot be null for the first request*/
return BAD_VALUE;
}
uint32_t frameNumber = request->frame_number;
if (request->input_buffer != NULL &&
request->input_buffer->stream != mInputStream) {
ALOGE("%s: Request %d: Input buffer not from input stream!",
__FUNCTION__, frameNumber);
return BAD_VALUE;
}
if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
ALOGE("%s: Request %d: No output buffers provided!",
__FUNCTION__, frameNumber);
return BAD_VALUE;
}
if (request->input_buffer != NULL) {
b = request->input_buffer;
QCamera3Channel *channel =
static_cast<QCamera3Channel*>(b->stream->priv);
if (channel == NULL) {
ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->status != CAMERA3_BUFFER_STATUS_OK) {
ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->release_fence != -1) {
ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->buffer == NULL) {
ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
}
// Validate all buffers
b = request->output_buffers;
do {
QCamera3Channel *channel =
static_cast<QCamera3Channel*>(b->stream->priv);
if (channel == NULL) {
ALOGE("%s: Request %d: Buffer %ld: Unconfigured stream!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->status != CAMERA3_BUFFER_STATUS_OK) {
ALOGE("%s: Request %d: Buffer %ld: Status not OK!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->release_fence != -1) {
ALOGE("%s: Request %d: Buffer %ld: Has a release fence!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
if (b->buffer == NULL) {
ALOGE("%s: Request %d: Buffer %ld: NULL buffer handle!",
__func__, frameNumber, (long)idx);
return BAD_VALUE;
}
idx++;
b = request->output_buffers + idx;
} while (idx < (ssize_t)request->num_output_buffers);
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : deriveMinFrameDuration
*
* DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
* on currently configured streams.
*
* PARAMETERS : NONE
*
* RETURN : NONE
*
*==========================================================================*/
void QCamera3HardwareInterface::deriveMinFrameDuration()
{
int32_t maxJpegDim, maxProcessedDim, maxRawDim;
maxJpegDim = 0;
maxProcessedDim = 0;
maxRawDim = 0;
// Figure out maximum jpeg, processed, and raw dimensions
for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
// Input stream doesn't have valid stream_type
if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
continue;
int32_t dimension = (*it)->stream->width * (*it)->stream->height;
if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
if (dimension > maxJpegDim)
maxJpegDim = dimension;
} else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
(*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
(*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
if (dimension > maxRawDim)
maxRawDim = dimension;
} else {
if (dimension > maxProcessedDim)
maxProcessedDim = dimension;
}
}
//Assume all jpeg dimensions are in processed dimensions.
if (maxJpegDim > maxProcessedDim)
maxProcessedDim = maxJpegDim;
//Find the smallest raw dimension that is greater or equal to jpeg dimension
if (maxProcessedDim > maxRawDim) {
maxRawDim = INT32_MAX;
for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
i++) {
int32_t dimension =
gCamCapability[mCameraId]->raw_dim[i].width *
gCamCapability[mCameraId]->raw_dim[i].height;
if (dimension >= maxProcessedDim && dimension < maxRawDim)
maxRawDim = dimension;
}
}
//Find minimum durations for processed, jpeg, and raw
for (int i = 0; i < gCamCapability[mCameraId]->supported_raw_dim_cnt;
i++) {
if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
gCamCapability[mCameraId]->raw_dim[i].height) {
mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
break;
}
}
for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
if (maxProcessedDim ==
gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
break;
}
}
}
/*===========================================================================
* FUNCTION : getMinFrameDuration
*
* DESCRIPTION: get minimum frame draution based on the current maximum frame durations
* and current request configuration.
*
* PARAMETERS : @request: requset sent by the frameworks
*
* RETURN : min farme duration for a particular request
*
*==========================================================================*/
int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
{
bool hasJpegStream = false;
bool hasRawStream = false;
for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
const camera3_stream_t *stream = request->output_buffers[i].stream;
if (stream->format == HAL_PIXEL_FORMAT_BLOB)
hasJpegStream = true;
else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
stream->format == HAL_PIXEL_FORMAT_RAW10 ||
stream->format == HAL_PIXEL_FORMAT_RAW16)
hasRawStream = true;
}
if (!hasJpegStream)
return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
else
return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
}
/*===========================================================================
* FUNCTION : handlePendingReprocResults
*
* DESCRIPTION: check and notify on any pending reprocess results
*
* PARAMETERS :
* @frame_number : Pending request frame number
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
{
for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
j != mPendingReprocessResultList.end(); j++) {
if (j->frame_number == frame_number) {
mCallbackOps->notify(mCallbackOps, &j->notify_msg);
CDBG("%s: Delayed reprocess notify %d", __func__,
frame_number);
for (List<PendingRequestInfo>::iterator k = mPendingRequestsList.begin();
k != mPendingRequestsList.end(); k++) {
if (k->frame_number == j->frame_number) {
CDBG("%s: Found reprocess frame number %d in pending reprocess List "
"Take it out!!", __func__,
k->frame_number);
camera3_capture_result result;
memset(&result, 0, sizeof(camera3_capture_result));
result.frame_number = frame_number;
result.num_output_buffers = 1;
result.output_buffers = &j->buffer;
result.input_buffer = k->input_buffer;
result.result = k->settings;
result.partial_result = PARTIAL_RESULT_COUNT;
mCallbackOps->process_capture_result(mCallbackOps, &result);
mPendingRequestsList.erase(k);
mPendingRequest--;
break;
}
}
mPendingReprocessResultList.erase(j);
break;
}
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : handleMetadataWithLock
*
* DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
*
* PARAMETERS : @metadata_buf: metadata buffer
*
* RETURN :
*
*==========================================================================*/
void QCamera3HardwareInterface::handleMetadataWithLock(
mm_camera_super_buf_t *metadata_buf)
{
metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
int32_t frame_number_valid = *(int32_t *)
POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
uint32_t frame_number = *(uint32_t *)
POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
nsecs_t capture_time = *(int64_t *)
POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
cam_frame_dropped_t cam_frame_drop = *(cam_frame_dropped_t *)
POINTER_OF_META(CAM_INTF_META_FRAME_DROPPED, metadata);
camera3_notify_msg_t notify_msg;
int32_t urgent_frame_number_valid = *(int32_t *)
POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
uint32_t urgent_frame_number = *(uint32_t *)
POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
if (urgent_frame_number_valid) {
CDBG("%s: valid urgent frame_number = %d, capture_time = %lld",
__func__, urgent_frame_number, capture_time);
//Recieved an urgent Frame Number, handle it
//using partial results
for (List<PendingRequestInfo>::iterator i =
mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
CDBG("%s: Iterator Frame = %d urgent frame = %d",
__func__, i->frame_number, urgent_frame_number);
if (i->frame_number < urgent_frame_number &&
i->partial_result_cnt == 0) {
ALOGE("%s: Error: HAL missed urgent metadata for frame number %d",
__func__, i->frame_number);
}
if (i->frame_number == urgent_frame_number &&
i->bUrgentReceived == 0) {
camera3_capture_result_t result;
memset(&result, 0, sizeof(camera3_capture_result_t));
i->partial_result_cnt++;
i->bUrgentReceived = 1;
// Extract 3A metadata
result.result =
translateCbUrgentMetadataToResultMetadata(metadata);
// Populate metadata result
result.frame_number = urgent_frame_number;
result.num_output_buffers = 0;
result.output_buffers = NULL;
result.partial_result = i->partial_result_cnt;
mCallbackOps->process_capture_result(mCallbackOps, &result);
CDBG("%s: urgent frame_number = %d, capture_time = %lld",
__func__, result.frame_number, capture_time);
free_camera_metadata((camera_metadata_t *)result.result);
break;
}
}
}
if (!frame_number_valid) {
CDBG("%s: Not a valid normal frame number, used as SOF only", __func__);
mMetadataChannel->bufDone(metadata_buf);
free(metadata_buf);
goto done_metadata;
}
CDBG("%s: valid frame_number = %d, capture_time = %lld", __func__,
frame_number, capture_time);
// Go through the pending requests info and send shutter/results to frameworks
for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
camera3_capture_result_t result;
memset(&result, 0, sizeof(camera3_capture_result_t));
CDBG("%s: frame_number in the list is %d", __func__, i->frame_number);
i->partial_result_cnt++;
result.partial_result = i->partial_result_cnt;
// Flush out all entries with less or equal frame numbers.
mPendingRequest--;
// Check whether any stream buffer corresponding to this is dropped or not
// If dropped, then send the ERROR_BUFFER for the corresponding stream
if (cam_frame_drop.frame_dropped) {
camera3_notify_msg_t notify_msg;
for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
j != i->buffers.end(); j++) {
QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
for (uint32_t k = 0; k < cam_frame_drop.cam_stream_ID.num_streams; k++) {
if (streamID == cam_frame_drop.cam_stream_ID.streamID[k]) {
// Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
CDBG("%s: Start of reporting error frame#=%d, streamID=%d",
__func__, i->frame_number, streamID);
notify_msg.type = CAMERA3_MSG_ERROR;
notify_msg.message.error.frame_number = i->frame_number;
notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
notify_msg.message.error.error_stream = j->stream;
mCallbackOps->notify(mCallbackOps, &notify_msg);
CDBG("%s: End of reporting error frame#=%d, streamID=%d",
__func__, i->frame_number, streamID);
PendingFrameDropInfo PendingFrameDrop;
PendingFrameDrop.frame_number=i->frame_number;
PendingFrameDrop.stream_ID = streamID;
// Add the Frame drop info to mPendingFrameDropList
mPendingFrameDropList.push_back(PendingFrameDrop);
}
}
}
}
// Send empty metadata with already filled buffers for dropped metadata
// and send valid metadata with already filled buffers for current metadata
if (i->frame_number < frame_number) {
camera3_notify_msg_t notify_msg;
notify_msg.type = CAMERA3_MSG_SHUTTER;
notify_msg.message.shutter.frame_number = i->frame_number;
notify_msg.message.shutter.timestamp = capture_time -
(urgent_frame_number - i->frame_number) * NSEC_PER_33MSEC;
mCallbackOps->notify(mCallbackOps, &notify_msg);
i->timestamp = notify_msg.message.shutter.timestamp;
CDBG("%s: Support notification !!!! notify frame_number = %d, capture_time = %lld",
__func__, i->frame_number, notify_msg.message.shutter.timestamp);
CameraMetadata dummyMetadata;
dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP,
&i->timestamp, 1);
dummyMetadata.update(ANDROID_REQUEST_ID,
&(i->request_id), 1);
result.result = dummyMetadata.release();
} else {
// Send shutter notify to frameworks
notify_msg.type = CAMERA3_MSG_SHUTTER;
notify_msg.message.shutter.frame_number = i->frame_number;
notify_msg.message.shutter.timestamp = capture_time;
mCallbackOps->notify(mCallbackOps, &notify_msg);
i->timestamp = capture_time;
result.result = translateFromHalMetadata(metadata,
i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
i->capture_intent);
if (i->blob_request) {
{
//Dump tuning metadata if enabled and available
char prop[PROPERTY_VALUE_MAX];
memset(prop, 0, sizeof(prop));
property_get("persist.camera.dumpmetadata", prop, "0");
int32_t enabled = atoi(prop);
if (enabled && metadata->is_tuning_params_valid) {
dumpMetadataToFile(metadata->tuning_params,
mMetaFrameCount,
enabled,
"Snapshot",
frame_number);
}
}
mPictureChannel->queueReprocMetadata(metadata_buf);
} else {
// Return metadata buffer
mMetadataChannel->bufDone(metadata_buf);
free(metadata_buf);
}
}
if (!result.result) {
ALOGE("%s: metadata is NULL", __func__);
}
result.frame_number = i->frame_number;
result.num_output_buffers = 0;
result.output_buffers = NULL;
for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
j != i->buffers.end(); j++) {
if (j->buffer) {
result.num_output_buffers++;
}
}
if (result.num_output_buffers > 0) {
camera3_stream_buffer_t *result_buffers =
new camera3_stream_buffer_t[result.num_output_buffers];
if (!result_buffers) {
ALOGE("%s: Fatal error: out of memory", __func__);
}
size_t result_buffers_idx = 0;
for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
j != i->buffers.end(); j++) {
if (j->buffer) {
for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
m != mPendingFrameDropList.end(); m++) {
QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
__func__, frame_number, streamID);
m = mPendingFrameDropList.erase(m);
break;
}
}
for (List<PendingBufferInfo>::iterator k =
mPendingBuffersMap.mPendingBufferList.begin();
k != mPendingBuffersMap.mPendingBufferList.end(); k++) {
if (k->buffer == j->buffer->buffer) {
CDBG("%s: Found buffer %p in pending buffer List "
"for frame %d, Take it out!!", __func__,
k->buffer, k->frame_number);
mPendingBuffersMap.num_buffers--;
k = mPendingBuffersMap.mPendingBufferList.erase(k);
break;
}
}
result_buffers[result_buffers_idx++] = *(j->buffer);
free(j->buffer);
j->buffer = NULL;
}
}
result.output_buffers = result_buffers;
mCallbackOps->process_capture_result(mCallbackOps, &result);
CDBG("%s: meta frame_number = %d, capture_time = %lld",
__func__, result.frame_number, i->timestamp);
free_camera_metadata((camera_metadata_t *)result.result);
delete[] result_buffers;
} else {
mCallbackOps->process_capture_result(mCallbackOps, &result);
CDBG("%s: meta frame_number = %d, capture_time = %lld",
__func__, result.frame_number, i->timestamp);
free_camera_metadata((camera_metadata_t *)result.result);
}
// erase the element from the list
i = mPendingRequestsList.erase(i);
if (!mPendingReprocessResultList.empty()) {
handlePendingReprocResults(frame_number + 1);
}
}
done_metadata:
for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
i != mPendingRequestsList.end() ;i++) {
i->pipeline_depth++;
}
unblockRequestIfNecessary();
}
/*===========================================================================
* FUNCTION : handleBufferWithLock
*
* DESCRIPTION: Handles image buffer callback with mMutex lock held.
*
* PARAMETERS : @buffer: image buffer for the callback
* @frame_number: frame number of the image buffer
*
* RETURN :
*
*==========================================================================*/
void QCamera3HardwareInterface::handleBufferWithLock(
camera3_stream_buffer_t *buffer, uint32_t frame_number)
{
// If the frame number doesn't exist in the pending request list,
// directly send the buffer to the frameworks, and update pending buffers map
// Otherwise, book-keep the buffer.
List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
i++;
}
if (i == mPendingRequestsList.end()) {
// Verify all pending requests frame_numbers are greater
for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
j != mPendingRequestsList.end(); j++) {
if (j->frame_number < frame_number) {
ALOGE("%s: Error: pending frame number %d is smaller than %d",
__func__, j->frame_number, frame_number);
}
}
camera3_capture_result_t result;
memset(&result, 0, sizeof(camera3_capture_result_t));
result.result = NULL;
result.frame_number = frame_number;
result.num_output_buffers = 1;
result.partial_result = 0;
for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
m != mPendingFrameDropList.end(); m++) {
QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
CDBG("%s: Stream STATUS_ERROR frame_number=%d, streamID=%d",
__func__, frame_number, streamID);
m = mPendingFrameDropList.erase(m);
break;
}
}
result.output_buffers = buffer;
CDBG("%s: result frame_number = %d, buffer = %p",
__func__, frame_number, buffer->buffer);
for (List<PendingBufferInfo>::iterator k =
mPendingBuffersMap.mPendingBufferList.begin();
k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
if (k->buffer == buffer->buffer) {
CDBG("%s: Found Frame buffer, take it out from list",
__func__);
mPendingBuffersMap.num_buffers--;
k = mPendingBuffersMap.mPendingBufferList.erase(k);
break;
}
}
CDBG("%s: mPendingBuffersMap.num_buffers = %d",
__func__, mPendingBuffersMap.num_buffers);
mCallbackOps->process_capture_result(mCallbackOps, &result);
} else {
if (i->input_buffer) {
CameraMetadata settings;
camera3_notify_msg_t notify_msg;
memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
if(i->settings) {
settings = i->settings;
if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
} else {
ALOGE("%s: No timestamp in input settings! Using current one.",
__func__);
}
} else {
ALOGE("%s: Input settings missing!", __func__);
}
notify_msg.type = CAMERA3_MSG_SHUTTER;
notify_msg.message.shutter.frame_number = frame_number;
notify_msg.message.shutter.timestamp = capture_time;
sp<Fence> releaseFence = new Fence(i->input_buffer->release_fence);
int32_t rc = releaseFence->wait(Fence::TIMEOUT_NEVER);
if (rc != OK) {
ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
}
for (List<PendingBufferInfo>::iterator k =
mPendingBuffersMap.mPendingBufferList.begin();
k != mPendingBuffersMap.mPendingBufferList.end(); k++ ) {
if (k->buffer == buffer->buffer) {
CDBG("%s: Found Frame buffer, take it out from list",
__func__);
mPendingBuffersMap.num_buffers--;
k = mPendingBuffersMap.mPendingBufferList.erase(k);
break;
}
}
CDBG("%s: mPendingBuffersMap.num_buffers = %d",
__func__, mPendingBuffersMap.num_buffers);
bool notifyNow = true;
for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin();
j != mPendingRequestsList.end(); j++) {
if (j->frame_number < frame_number) {
notifyNow = false;
break;
}
}
if (notifyNow) {
camera3_capture_result result;
memset(&result, 0, sizeof(camera3_capture_result));
result.frame_number = frame_number;
result.result = i->settings;
result.input_buffer = i->input_buffer;
result.num_output_buffers = 1;
result.output_buffers = buffer;
result.partial_result = PARTIAL_RESULT_COUNT;
mCallbackOps->notify(mCallbackOps, &notify_msg);
mCallbackOps->process_capture_result(mCallbackOps, &result);
CDBG("%s: Notify reprocess now %d!", __func__, frame_number);
i = mPendingRequestsList.erase(i);
mPendingRequest--;
} else {
// Cache reprocess result for later
PendingReprocessResult pendingResult;
memset(&pendingResult, 0, sizeof(PendingReprocessResult));
pendingResult.notify_msg = notify_msg;
pendingResult.buffer = *buffer;
pendingResult.frame_number = frame_number;
mPendingReprocessResultList.push_back(pendingResult);
CDBG("%s: Cache reprocess result %d!", __func__, frame_number);
}
} else {
for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
j != i->buffers.end(); j++) {
if (j->stream == buffer->stream) {
if (j->buffer != NULL) {
ALOGE("%s: Error: buffer is already set", __func__);
} else {
j->buffer = (camera3_stream_buffer_t *)malloc(
sizeof(camera3_stream_buffer_t));
*(j->buffer) = *buffer;
CDBG("%s: cache buffer %p at result frame_number %d",
__func__, buffer, frame_number);
}
}
}
}
}
}
/*===========================================================================
* FUNCTION : unblockRequestIfNecessary
*
* DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
* that mMutex is held when this function is called.
*
* PARAMETERS :
*
* RETURN :
*
*==========================================================================*/
void QCamera3HardwareInterface::unblockRequestIfNecessary()
{
// Unblock process_capture_request
pthread_cond_signal(&mRequestCond);
}
/*===========================================================================
* FUNCTION : processCaptureRequest
*
* DESCRIPTION: process a capture request from camera service
*
* PARAMETERS :
* @request : request from framework to process
*
* RETURN :
*
*==========================================================================*/
int QCamera3HardwareInterface::processCaptureRequest(
camera3_capture_request_t *request)
{
int rc = NO_ERROR;
int32_t request_id;
CameraMetadata meta;
pthread_mutex_lock(&mMutex);
rc = validateCaptureRequest(request);
if (rc != NO_ERROR) {
ALOGE("%s: incoming request is not valid", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
meta = request->settings;
// For first capture request, send capture intent, and
// stream on all streams
if (mFirstRequest) {
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
rc = channel->registerBuffer(output.buffer);
if (rc < 0) {
ALOGE("%s: registerBuffer failed",
__func__);
pthread_mutex_unlock(&mMutex);
return -ENODEV;
}
}
if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
int32_t hal_version = CAM_HAL_V3;
uint8_t captureIntent =
meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
mCaptureIntent = captureIntent;
memset(mParameters, 0, sizeof(parm_buffer_t));
AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION,
sizeof(hal_version), &hal_version);
AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT,
sizeof(captureIntent), &captureIntent);
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
mParameters);
}
//First initialize all streams
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
rc = channel->initialize();
if (NO_ERROR != rc) {
ALOGE("%s : Channel initialization failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
if (mRawDumpChannel) {
rc = mRawDumpChannel->initialize();
if (rc != NO_ERROR) {
ALOGE("%s: Error: Raw Dump Channel init failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
if (mSupportChannel) {
rc = mSupportChannel->initialize();
if (rc < 0) {
ALOGE("%s: Support channel initialization failed", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
//Then start them.
CDBG_HIGH("%s: Start META Channel", __func__);
mMetadataChannel->start();
if (mSupportChannel) {
rc = mSupportChannel->start();
if (rc < 0) {
ALOGE("%s: Support channel start failed", __func__);
mMetadataChannel->stop();
pthread_mutex_unlock(&mMutex);
return rc;
}
}
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
CDBG_HIGH("%s: Start Regular Channel mask=%d", __func__, channel->getStreamTypeMask());
channel->start();
}
if (mRawDumpChannel) {
CDBG("%s: Starting raw dump stream",__func__);
rc = mRawDumpChannel->start();
if (rc != NO_ERROR) {
ALOGE("%s: Error Starting Raw Dump Channel", __func__);
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel =
(QCamera3Channel *)(*it)->stream->priv;
ALOGE("%s: Stopping Regular Channel mask=%d", __func__,
channel->getStreamTypeMask());
channel->stop();
}
if (mSupportChannel)
mSupportChannel->stop();
mMetadataChannel->stop();
pthread_mutex_unlock(&mMutex);
return rc;
}
}
}
uint32_t frameNumber = request->frame_number;
cam_stream_ID_t streamID;
if (meta.exists(ANDROID_REQUEST_ID)) {
request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
mCurrentRequestId = request_id;
CDBG("%s: Received request with id: %d",__func__, request_id);
} else if (mFirstRequest || mCurrentRequestId == -1){
ALOGE("%s: Unable to find request id field, \
& no previous id available", __func__);
return NAME_NOT_FOUND;
} else {
CDBG("%s: Re-using old request id", __func__);
request_id = mCurrentRequestId;
}
CDBG("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d",
__func__, __LINE__,
request->num_output_buffers,
request->input_buffer,
frameNumber);
// Acquire all request buffers first
streamID.num_streams = 0;
int blob_request = 0;
uint32_t snapshotStreamId = 0;
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
sp<Fence> acquireFence = new Fence(output.acquire_fence);
if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
//Call function to store local copy of jpeg data for encode params.
blob_request = 1;
snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
}
rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
if (rc != OK) {
ALOGE("%s: fence wait failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
streamID.streamID[streamID.num_streams] =
channel->getStreamID(channel->getStreamTypeMask());
streamID.num_streams++;
}
if (blob_request && mRawDumpChannel) {
CDBG("%s: Trigger Raw based on blob request if Raw dump is enabled", __func__);
streamID.streamID[streamID.num_streams] =
mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
streamID.num_streams++;
}
if(request->input_buffer == NULL) {
rc = setFrameParameters(request, streamID, snapshotStreamId);
if (rc < 0) {
ALOGE("%s: fail to set frame parameters", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
} else {
sp<Fence> acquireFence = new Fence(request->input_buffer->acquire_fence);
rc = acquireFence->wait(Fence::TIMEOUT_NEVER);
if (rc != OK) {
ALOGE("%s: input buffer fence wait failed %d", __func__, rc);
pthread_mutex_unlock(&mMutex);
return rc;
}
}
/* Update pending request list and pending buffers map */
PendingRequestInfo pendingRequest;
pendingRequest.frame_number = frameNumber;
pendingRequest.num_buffers = request->num_output_buffers;
pendingRequest.request_id = request_id;
pendingRequest.blob_request = blob_request;
pendingRequest.bUrgentReceived = 0;
pendingRequest.input_buffer = request->input_buffer;
pendingRequest.settings = request->settings;
pendingRequest.pipeline_depth = 0;
pendingRequest.partial_result_cnt = 0;
extractJpegMetadata(pendingRequest.jpegMetadata, request);
//extract capture intent
if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
mCaptureIntent =
meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
}
pendingRequest.capture_intent = mCaptureIntent;
for (size_t i = 0; i < request->num_output_buffers; i++) {
RequestedBufferInfo requestedBuf;
requestedBuf.stream = request->output_buffers[i].stream;
requestedBuf.buffer = NULL;
pendingRequest.buffers.push_back(requestedBuf);
// Add to buffer handle the pending buffers list
PendingBufferInfo bufferInfo;
bufferInfo.frame_number = frameNumber;
bufferInfo.buffer = request->output_buffers[i].buffer;
bufferInfo.stream = request->output_buffers[i].stream;
mPendingBuffersMap.mPendingBufferList.push_back(bufferInfo);
mPendingBuffersMap.num_buffers++;
CDBG("%s: frame = %d, buffer = %p, stream = %p, stream format = %d",
__func__, frameNumber, bufferInfo.buffer, bufferInfo.stream,
bufferInfo.stream->format);
}
CDBG("%s: mPendingBuffersMap.num_buffers = %d",
__func__, mPendingBuffersMap.num_buffers);
mPendingRequestsList.push_back(pendingRequest);
if(mFlush) {
pthread_mutex_unlock(&mMutex);
return NO_ERROR;
}
// Notify metadata channel we receive a request
mMetadataChannel->request(NULL, frameNumber);
// Call request on other streams
for (size_t i = 0; i < request->num_output_buffers; i++) {
const camera3_stream_buffer_t& output = request->output_buffers[i];
QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
if (channel == NULL) {
ALOGE("%s: invalid channel pointer for stream", __func__);
continue;
}
if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
QCamera3RegularChannel* inputChannel = NULL;
if(request->input_buffer != NULL){
//Try to get the internal format
inputChannel = (QCamera3RegularChannel*)
request->input_buffer->stream->priv;
if(inputChannel == NULL ){
ALOGE("%s: failed to get input channel handle", __func__);
pthread_mutex_unlock(&mMutex);
return NO_INIT;
}
metadata_buffer_t reproc_meta;
rc = setReprocParameters(request, &reproc_meta, snapshotStreamId);
if (NO_ERROR == rc) {
rc = channel->request(output.buffer, frameNumber,
request->input_buffer, &reproc_meta);
if (rc < 0) {
ALOGE("%s: Fail to request on picture channel", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
} else {
ALOGE("%s: fail to set reproc parameters", __func__);
pthread_mutex_unlock(&mMutex);
return rc;
}
} else
rc = channel->request(output.buffer, frameNumber,
NULL, mParameters);
} else {
CDBG("%s: %d, request with buffer %p, frame_number %d", __func__,
__LINE__, output.buffer, frameNumber);
rc = channel->request(output.buffer, frameNumber);
}
if (rc < 0)
ALOGE("%s: request failed", __func__);
}
if(request->input_buffer == NULL) {
/*set the parameters to backend*/
mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
}
mFirstRequest = false;
// Added a timed condition wait
struct timespec ts;
uint8_t isValidTimeout = 1;
rc = clock_gettime(CLOCK_REALTIME, &ts);
if (rc < 0) {
isValidTimeout = 0;
ALOGE("%s: Error reading the real time clock!!", __func__);
}
else {
// Make timeout as 5 sec for request to be honored
ts.tv_sec += 5;
}
//Block on conditional variable
mPendingRequest++;
while (mPendingRequest >= MAX_INFLIGHT_REQUESTS) {
if (!isValidTimeout) {
CDBG("%s: Blocking on conditional wait", __func__);
pthread_cond_wait(&mRequestCond, &mMutex);
}
else {
CDBG("%s: Blocking on timed conditional wait", __func__);
rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
if (rc == ETIMEDOUT) {
rc = -ENODEV;
ALOGE("%s: Unblocked on timeout!!!!", __func__);
break;
}
}
CDBG("%s: Unblocked", __func__);
}
pthread_mutex_unlock(&mMutex);
return rc;
}
/*===========================================================================
* FUNCTION : dump
*
* DESCRIPTION:
*
* PARAMETERS :
*
*
* RETURN :
*==========================================================================*/
void QCamera3HardwareInterface::dump(int fd)
{
pthread_mutex_lock(&mMutex);
dprintf(fd, "\n Camera HAL3 information Begin \n");
dprintf(fd, "\nNumber of pending requests: %d \n",
mPendingRequestsList.size());
dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
for(List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
i != mPendingRequestsList.end(); i++) {
dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
i->frame_number, i->num_buffers, i->request_id, i->blob_request,
i->input_buffer);
}
dprintf(fd, "\nPending buffer map: Number of buffers: %d\n",
mPendingBuffersMap.num_buffers);
dprintf(fd, "-------+-------------\n");
dprintf(fd, " Frame | Stream type \n");
dprintf(fd, "-------+-------------\n");
for(List<PendingBufferInfo>::iterator i =
mPendingBuffersMap.mPendingBufferList.begin();
i != mPendingBuffersMap.mPendingBufferList.end(); i++) {
dprintf(fd, " %5d | %11d \n",
i->frame_number, i->stream->stream_type);
}
dprintf(fd, "-------+-------------\n");
dprintf(fd, "\nPending frame drop list: %d\n",
mPendingFrameDropList.size());
dprintf(fd, "-------+-----------\n");
dprintf(fd, " Frame | Stream ID \n");
dprintf(fd, "-------+-----------\n");
for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
i != mPendingFrameDropList.end(); i++) {
dprintf(fd, " %5d | %9d \n",
i->frame_number, i->stream_ID);
}
dprintf(fd, "-------+-----------\n");
dprintf(fd, "\n Camera HAL3 information End \n");
pthread_mutex_unlock(&mMutex);
return;
}
/*===========================================================================
* FUNCTION : flush
*
* DESCRIPTION:
*
* PARAMETERS :
*
*
* RETURN :
*==========================================================================*/
int QCamera3HardwareInterface::flush()
{
unsigned int frameNum = 0;
camera3_notify_msg_t notify_msg;
camera3_capture_result_t result;
camera3_stream_buffer_t *pStream_Buf = NULL;
FlushMap flushMap;
CDBG("%s: Unblocking Process Capture Request", __func__);
pthread_mutex_lock(&mMutex);
mFlush = true;
pthread_mutex_unlock(&mMutex);
memset(&result, 0, sizeof(camera3_capture_result_t));
// Stop the Streams/Channels
for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
it != mStreamInfo.end(); it++) {
QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
channel->stop();
(*it)->status = INVALID;
}
if (mSupportChannel) {
mSupportChannel->stop();
}
if (mRawDumpChannel) {
mRawDumpChannel->stop();
}
if (mMetadataChannel) {
/* If content of mStreamInfo is not 0, there is metadata stream */
mMetadataChannel->stop();
}
// Mutex Lock
pthread_mutex_lock(&mMutex);
// Unblock process_capture_request
mPendingRequest = 0;
pthread_cond_signal(&mRequestCond);
List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin();
frameNum = i->frame_number;
CDBG("%s: Oldest frame num on mPendingRequestsList = %d",
__func__, frameNum);
// Go through the pending buffers and group them depending
// on frame number
for (List<PendingBufferInfo>::iterator k =
mPendingBuffersMap.mPendingBufferList.begin();
k != mPendingBuffersMap.mPendingBufferList.end();) {
if (k->frame_number < frameNum) {
ssize_t idx = flushMap.indexOfKey(k->frame_number);
if (idx == NAME_NOT_FOUND) {
Vector<PendingBufferInfo> pending;
pending.add(*k);
flushMap.add(k->frame_number, pending);
} else {
Vector<PendingBufferInfo> &pending =
flushMap.editValueFor(k->frame_number);
pending.add(*k);
}
mPendingBuffersMap.num_buffers--;
k = mPendingBuffersMap.mPendingBufferList.erase(k);
} else {
k++;
}
}
for (size_t i = 0; i < flushMap.size(); i++) {
uint32_t frame_number = flushMap.keyAt(i);
const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
// Send Error notify to frameworks for each buffer for which
// metadata buffer is already sent
CDBG("%s: Sending ERROR BUFFER for frame %d number of buffer %d",
__func__, frame_number, pending.size());
pStream_Buf = new camera3_stream_buffer_t[pending.size()];
if (NULL == pStream_Buf) {
ALOGE("%s: No memory for pending buffers array", __func__);
pthread_mutex_unlock(&mMutex);
return NO_MEMORY;
}
for (size_t j = 0; j < pending.size(); j++) {
const PendingBufferInfo &info = pending.itemAt(j);
notify_msg.type = CAMERA3_MSG_ERROR;
notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
notify_msg.message.error.error_stream = info.stream;
notify_msg.message.error.frame_number = frame_number;
pStream_Buf[j].acquire_fence = -1;
pStream_Buf[j].release_fence = -1;
pStream_Buf[j].buffer = info.buffer;
pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
pStream_Buf[j].stream = info.stream;
mCallbackOps->notify(mCallbackOps, &notify_msg);
CDBG("%s: notify frame_number = %d stream %p", __func__,
frame_number, info.stream);
}
result.result = NULL;
result.frame_number = frame_number;
result.num_output_buffers = pending.size();
result.output_buffers = pStream_Buf;
mCallbackOps->process_capture_result(mCallbackOps, &result);
delete [] pStream_Buf;
}
CDBG("%s:Sending ERROR REQUEST for all pending requests", __func__);
flushMap.clear();
for (List<PendingBufferInfo>::iterator k =
mPendingBuffersMap.mPendingBufferList.begin();
k != mPendingBuffersMap.mPendingBufferList.end();) {
ssize_t idx = flushMap.indexOfKey(k->frame_number);
if (idx == NAME_NOT_FOUND) {
Vector<PendingBufferInfo> pending;
pending.add(*k);
flushMap.add(k->frame_number, pending);
} else {
Vector<PendingBufferInfo> &pending =
flushMap.editValueFor(k->frame_number);
pending.add(*k);
}
mPendingBuffersMap.num_buffers--;
k = mPendingBuffersMap.mPendingBufferList.erase(k);
}
// Go through the pending requests info and send error request to framework
for (size_t i = 0; i < flushMap.size(); i++) {
uint32_t frame_number = flushMap.keyAt(i);
const Vector<PendingBufferInfo> &pending = flushMap.valueAt(i);
CDBG("%s:Sending ERROR REQUEST for frame %d",
__func__, frame_number);
// Send shutter notify to frameworks
notify_msg.type = CAMERA3_MSG_ERROR;
notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
notify_msg.message.error.error_stream = NULL;
notify_msg.message.error.frame_number = frame_number;
mCallbackOps->notify(mCallbackOps, &notify_msg);
pStream_Buf = new camera3_stream_buffer_t[pending.size()];
if (NULL == pStream_Buf) {
ALOGE("%s: No memory for pending buffers array", __func__);
pthread_mutex_unlock(&mMutex);
return NO_MEMORY;
}
for (size_t j = 0; j < pending.size(); j++) {
const PendingBufferInfo &info = pending.itemAt(j);
pStream_Buf[j].acquire_fence = -1;
pStream_Buf[j].release_fence = -1;
pStream_Buf[j].buffer = info.buffer;
pStream_Buf[j].status = CAMERA3_BUFFER_STATUS_ERROR;
pStream_Buf[j].stream = info.stream;
}
result.num_output_buffers = pending.size();
result.output_buffers = pStream_Buf;
result.result = NULL;
result.frame_number = frame_number;
mCallbackOps->process_capture_result(mCallbackOps, &result);
delete [] pStream_Buf;
}
/* Reset pending buffer list and requests list */
mPendingRequestsList.clear();
/* Reset pending frame Drop list and requests list */
mPendingFrameDropList.clear();
flushMap.clear();
mPendingBuffersMap.num_buffers = 0;
mPendingBuffersMap.mPendingBufferList.clear();
mPendingReprocessResultList.clear();
CDBG("%s: Cleared all the pending buffers ", __func__);
mFlush = false;
mFirstRequest = true;
pthread_mutex_unlock(&mMutex);
return 0;
}
/*===========================================================================
* FUNCTION : captureResultCb
*
* DESCRIPTION: Callback handler for all capture result
* (streams, as well as metadata)
*
* PARAMETERS :
* @metadata : metadata information
* @buffer : actual gralloc buffer to be returned to frameworks.
* NULL if metadata.
*
* RETURN : NONE
*==========================================================================*/
void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
camera3_stream_buffer_t *buffer, uint32_t frame_number)
{
pthread_mutex_lock(&mMutex);
/* Assume flush() is called before any reprocessing. Send
* notify and result immediately upon receipt of any callback*/
if (mLoopBackResult) {
/* Send notify */
camera3_notify_msg_t notify_msg;
notify_msg.type = CAMERA3_MSG_SHUTTER;
notify_msg.message.shutter.frame_number = mLoopBackResult->frame_number;
notify_msg.message.shutter.timestamp = mLoopBackTimestamp;
mCallbackOps->notify(mCallbackOps, &notify_msg);
/* Send capture result */
mCallbackOps->process_capture_result(mCallbackOps, mLoopBackResult);
free_camera_metadata((camera_metadata_t *)mLoopBackResult->result);
free(mLoopBackResult);
mLoopBackResult = NULL;
}
if (metadata_buf)
handleMetadataWithLock(metadata_buf);
else
handleBufferWithLock(buffer, frame_number);
pthread_mutex_unlock(&mMutex);
return;
}
/*===========================================================================
* FUNCTION : translateFromHalMetadata
*
* DESCRIPTION:
*
* PARAMETERS :
* @metadata : metadata information from callback
* @timestamp: metadata buffer timestamp
* @request_id: request id
* @jpegMetadata: additional jpeg metadata
*
* RETURN : camera_metadata_t*
* metadata in a format specified by fwk
*==========================================================================*/
camera_metadata_t*
QCamera3HardwareInterface::translateFromHalMetadata(
metadata_buffer_t *metadata,
nsecs_t timestamp,
int32_t request_id,
const CameraMetadata& jpegMetadata,
uint8_t pipeline_depth,
uint8_t capture_intent)
{
CameraMetadata camMetadata;
camera_metadata_t* resultMetadata;
if (jpegMetadata.entryCount())
camMetadata.append(jpegMetadata);
camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
if (IS_META_AVAILABLE(CAM_INTF_META_FRAME_NUMBER, metadata)) {
int64_t frame_number = *(uint32_t *) POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &frame_number, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_FPS_RANGE, metadata)) {
int32_t fps_range[2];
cam_fps_range_t * float_range =
(cam_fps_range_t *)POINTER_OF_PARAM(CAM_INTF_PARM_FPS_RANGE, metadata);
fps_range[0] = (int32_t)float_range->min_fps;
fps_range[1] = (int32_t)float_range->max_fps;
camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
fps_range, 2);
CDBG("%s: urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
__func__, fps_range[0], fps_range[1]);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata)) {
int32_t *expCompensation =
(int32_t *)POINTER_OF_META(CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata);
camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
expCompensation, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_BESTSHOT_MODE, metadata)) {
uint8_t sceneMode =
*((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_BESTSHOT_MODE, metadata));
uint8_t fwkSceneMode =
(uint8_t)lookupFwkName(SCENE_MODES_MAP,
sizeof(SCENE_MODES_MAP)/
sizeof(SCENE_MODES_MAP[0]), sceneMode);
camMetadata.update(ANDROID_CONTROL_SCENE_MODE,
&fwkSceneMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_AEC_LOCK, metadata)) {
uint8_t ae_lock =
*((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AEC_LOCK, metadata));
camMetadata.update(ANDROID_CONTROL_AE_LOCK,
&ae_lock, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_AWB_LOCK, metadata)) {
uint8_t awb_lock =
*((uint32_t *)POINTER_OF_META(CAM_INTF_PARM_AWB_LOCK, metadata));
camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &awb_lock, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_FACE_DETECTION, metadata)){
cam_face_detection_data_t *faceDetectionInfo =
(cam_face_detection_data_t *)POINTER_OF_META(CAM_INTF_META_FACE_DETECTION, metadata);
uint8_t numFaces = faceDetectionInfo->num_faces_detected;
int32_t faceIds[MAX_ROI];
uint8_t faceScores[MAX_ROI];
int32_t faceRectangles[MAX_ROI * 4];
int32_t faceLandmarks[MAX_ROI * 6];
int j = 0, k = 0;
for (int i = 0; i < numFaces; i++) {
faceIds[i] = faceDetectionInfo->faces[i].face_id;
faceScores[i] = faceDetectionInfo->faces[i].score;
convertToRegions(faceDetectionInfo->faces[i].face_boundary,
faceRectangles+j, -1);
convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k);
j+= 4;
k+= 6;
}
if (numFaces <= 0) {
memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
}
camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces);
camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
faceRectangles, numFaces*4);
camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
faceLandmarks, numFaces*6);
}
if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_MODE, metadata)){
uint8_t *color_correct_mode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_MODE, metadata);
camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_EDGE_MODE, metadata)) {
cam_edge_application_t *edgeApplication =
(cam_edge_application_t *)POINTER_OF_META(CAM_INTF_META_EDGE_MODE, metadata);
uint8_t edgeStrength = (uint8_t)edgeApplication->sharpness;
camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
camMetadata.update(ANDROID_EDGE_STRENGTH, &edgeStrength, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_POWER, metadata)) {
uint8_t *flashPower =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_POWER, metadata);
camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_FIRING_TIME, metadata)) {
int64_t *flashFiringTime =
(int64_t *)POINTER_OF_META(CAM_INTF_META_FLASH_FIRING_TIME, metadata);
camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_STATE, metadata)) {
uint8_t flashState =
*((uint8_t *)POINTER_OF_META(CAM_INTF_META_FLASH_STATE, metadata));
if (!gCamCapability[mCameraId]->flash_available) {
flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
}
camMetadata.update(ANDROID_FLASH_STATE, &flashState, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_FLASH_MODE, metadata)){
uint8_t flashMode = *((uint8_t*)
POINTER_OF_META(CAM_INTF_META_FLASH_MODE, metadata));
uint8_t fwk_flashMode = lookupFwkName(FLASH_MODES_MAP,
sizeof(FLASH_MODES_MAP), flashMode);
camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_HOTPIXEL_MODE, metadata)) {
uint8_t *hotPixelMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_HOTPIXEL_MODE, metadata);
camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_LENS_APERTURE, metadata)){
float *lensAperture =
(float *)POINTER_OF_META(CAM_INTF_META_LENS_APERTURE, metadata);
camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FILTERDENSITY, metadata)) {
float *filterDensity =
(float *)POINTER_OF_META(CAM_INTF_META_LENS_FILTERDENSITY, metadata);
camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata)){
float *focalLength =
(float *)POINTER_OF_META(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata);
camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata)) {
uint8_t *opticalStab =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata);
camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_PARM_DIS_ENABLE, metadata)) {
uint8_t *vsMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_PARM_DIS_ENABLE, metadata);
camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, vsMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata)) {
uint8_t *noiseRedMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata);
camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata)) {
uint8_t *noiseRedStrength =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_NOISE_REDUCTION_STRENGTH, metadata);
camMetadata.update(ANDROID_NOISE_REDUCTION_STRENGTH, noiseRedStrength, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SCALER_CROP_REGION, metadata)) {
cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *)
POINTER_OF_META(CAM_INTF_META_SCALER_CROP_REGION, metadata);
int32_t scalerCropRegion[4];
scalerCropRegion[0] = hScalerCropRegion->left;
scalerCropRegion[1] = hScalerCropRegion->top;
scalerCropRegion[2] = hScalerCropRegion->width;
scalerCropRegion[3] = hScalerCropRegion->height;
camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata)){
int64_t *sensorExpTime =
(int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata);
CDBG("%s: sensorExpTime = %lld", __func__, *sensorExpTime);
camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata)){
int64_t *sensorFameDuration =
(int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata);
CDBG("%s: sensorFameDuration = %lld", __func__, *sensorFameDuration);
camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata)){
int64_t *sensorRollingShutterSkew =
(int64_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW,
metadata);
CDBG("%s: sensorRollingShutterSkew = %lld", __func__, *sensorRollingShutterSkew);
camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
sensorRollingShutterSkew, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SENSOR_SENSITIVITY, metadata)){
int32_t sensorSensitivity =
*((int32_t *)POINTER_OF_META(CAM_INTF_META_SENSOR_SENSITIVITY, metadata));
CDBG("%s: sensorSensitivity = %d", __func__, sensorSensitivity);
camMetadata.update(ANDROID_SENSOR_SENSITIVITY, &sensorSensitivity, 1);
//calculate the noise profile based on sensitivity
double noise_profile_S = computeNoiseModelEntryS(sensorSensitivity);
double noise_profile_O = computeNoiseModelEntryO(sensorSensitivity);
double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i +=2) {
noise_profile[i] = noise_profile_S;
noise_profile[i+1] = noise_profile_O;
}
CDBG("%s: noise model entry (S, O) is (%f, %f)", __func__,
noise_profile_S, noise_profile_O);
camMetadata.update( ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
2 * gCamCapability[mCameraId]->num_color_channels);
}
if (IS_META_AVAILABLE(CAM_INTF_META_SHADING_MODE, metadata)) {
uint8_t *shadingMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_SHADING_MODE, metadata);
camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata)) {
uint8_t *faceDetectMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata);
uint8_t fwk_faceDetectMode = (uint8_t)lookupFwkName(FACEDETECT_MODES_MAP,
sizeof(FACEDETECT_MODES_MAP)/sizeof(FACEDETECT_MODES_MAP[0]), *faceDetectMode);
camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata)) {
uint8_t *histogramMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata);
camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata)){
uint8_t *sharpnessMapMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata);
camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
sharpnessMapMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata)){
cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *)
POINTER_OF_META(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata);
camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP,
(int32_t*)sharpnessMap->sharpness,
CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT);
}
if (IS_META_AVAILABLE(CAM_INTF_META_LENS_SHADING_MAP, metadata)) {
cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *)
POINTER_OF_META(CAM_INTF_META_LENS_SHADING_MAP, metadata);
int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height;
int map_width = gCamCapability[mCameraId]->lens_shading_map_size.width;
camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
(float*)lensShadingMap->lens_shading,
4*map_width*map_height);
}
if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_MODE, metadata)) {
uint8_t *toneMapMode =
(uint8_t *)POINTER_OF_META(CAM_INTF_META_TONEMAP_MODE, metadata);
camMetadata.update(ANDROID_TONEMAP_MODE, toneMapMode, 1);
}
if (IS_META_AVAILABLE(CAM_INTF_META_TONEMAP_CURVES, metadata)){
//Populate CAM_INTF_META_TONEMAP_CURVES
/* ch0 = G, ch 1 = B, ch 2 = R*/
cam_rgb_tonemap_curves *tonemap = (cam_rgb_tonemap_curves *)
POINTER_OF_META(CAM_INTF_META_TONEMAP_CURVES, metadata);
if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
ALOGE("%s: Fatal: tonemap_points_cnt %d exceeds max value of %d",
__func__, tonemap->tonemap_points_cnt,
CAM_MAX_TONEMAP_CURVE_SIZE);
tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
}
camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
(float*)tonemap->curves[0].tonemap_points,
tonemap->tonemap_points_cnt * 2);
camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
(float*)tonemap->curves[1].tonemap_points,
tonemap->tonemap_points_cnt * 2);
camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
(float*)tonemap->curves[2].tonemap_points,
tonemap->tonemap_points_cnt * 2);
}
if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata)){
cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*)
POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata);
camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4);
}
if (IS_META_AVAILABLE(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata)){
cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*)
POINTER_OF_META(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata);
camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
(camera_metadata_rational_t*)colorCorrectionMatrix->trans