blob: 51222ba7a7bffea351084426e0c44a0a33d7f9fb [file] [log] [blame]
/* Copyright (c) 2012-2014, The Linux Foundataion. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
* * Neither the name of The Linux Foundation nor the names of its
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#define LOG_TAG "QCamera2HWI"
#include <cutils/properties.h>
#include <hardware/camera.h>
#include <stdio.h>
#include <stdlib.h>
#include <utils/Errors.h>
#include <gralloc_priv.h>
#include <gui/Surface.h>
#include "QCamera2HWI.h"
#include "QCameraMem.h"
#define MAP_TO_DRIVER_COORDINATE(val, base, scale, offset) (val * scale / base + offset)
#define CAMERA_MIN_STREAMING_BUFFERS 3
#define EXTRA_ZSL_PREVIEW_STREAM_BUF 2
#define CAMERA_MIN_JPEG_ENCODING_BUFFERS 2
#define CAMERA_MIN_VIDEO_BUFFERS 9
#define CAMERA_LONGSHOT_STAGES 4
//This multiplier signifies extra buffers that we need to allocate
//for the output of pproc
#define CAMERA_PPROC_OUT_BUFFER_MULTIPLIER 2
#define HDR_CONFIDENCE_THRESHOLD 0.4
namespace qcamera {
cam_capability_t *gCamCaps[MM_CAMERA_MAX_NUM_SENSORS];
static pthread_mutex_t g_camlock = PTHREAD_MUTEX_INITIALIZER;
volatile uint32_t gCamHalLogLevel = 1;
camera_device_ops_t QCamera2HardwareInterface::mCameraOps = {
set_preview_window: QCamera2HardwareInterface::set_preview_window,
set_callbacks: QCamera2HardwareInterface::set_CallBacks,
enable_msg_type: QCamera2HardwareInterface::enable_msg_type,
disable_msg_type: QCamera2HardwareInterface::disable_msg_type,
msg_type_enabled: QCamera2HardwareInterface::msg_type_enabled,
start_preview: QCamera2HardwareInterface::start_preview,
stop_preview: QCamera2HardwareInterface::stop_preview,
preview_enabled: QCamera2HardwareInterface::preview_enabled,
store_meta_data_in_buffers: QCamera2HardwareInterface::store_meta_data_in_buffers,
start_recording: QCamera2HardwareInterface::start_recording,
stop_recording: QCamera2HardwareInterface::stop_recording,
recording_enabled: QCamera2HardwareInterface::recording_enabled,
release_recording_frame: QCamera2HardwareInterface::release_recording_frame,
auto_focus: QCamera2HardwareInterface::auto_focus,
cancel_auto_focus: QCamera2HardwareInterface::cancel_auto_focus,
take_picture: QCamera2HardwareInterface::take_picture,
cancel_picture: QCamera2HardwareInterface::cancel_picture,
set_parameters: QCamera2HardwareInterface::set_parameters,
get_parameters: QCamera2HardwareInterface::get_parameters,
put_parameters: QCamera2HardwareInterface::put_parameters,
send_command: QCamera2HardwareInterface::send_command,
release: QCamera2HardwareInterface::release,
dump: QCamera2HardwareInterface::dump,
};
int32_t QCamera2HardwareInterface::getEffectValue(const char *effect)
{
uint32_t cnt = 0;
while(NULL != QCameraParameters::EFFECT_MODES_MAP[cnt].desc) {
if(!strcmp(QCameraParameters::EFFECT_MODES_MAP[cnt].desc, effect)) {
return QCameraParameters::EFFECT_MODES_MAP[cnt].val;
}
cnt++;
}
return 0;
}
/*===========================================================================
* FUNCTION : set_preview_window
*
* DESCRIPTION: set preview window.
*
* PARAMETERS :
* @device : ptr to camera device struct
* @window : window ops table
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
struct preview_stream_ops *window)
{
int rc = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("%s: NULL camera device", __func__);
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
if (rc == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, &apiResult);
rc = apiResult.status;
}
hw->unlockAPI();
return rc;
}
/*===========================================================================
* FUNCTION : set_CallBacks
*
* DESCRIPTION: set callbacks for notify and data
*
* PARAMETERS :
* @device : ptr to camera device struct
* @notify_cb : notify cb
* @data_cb : data cb
* @data_cb_timestamp : video data cd with timestamp
* @get_memory : ops table for request gralloc memory
* @user : user data ptr
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::set_CallBacks(struct camera_device *device,
camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void *user)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
qcamera_sm_evt_setcb_payload_t payload;
payload.notify_cb = notify_cb;
payload.data_cb = data_cb;
payload.data_cb_timestamp = data_cb_timestamp;
payload.get_memory = get_memory;
payload.user = user;
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t rc = hw->processAPI(QCAMERA_SM_EVT_SET_CALLBACKS, (void *)&payload);
if (rc == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_SET_CALLBACKS, &apiResult);
}
hw->unlockAPI();
}
/*===========================================================================
* FUNCTION : enable_msg_type
*
* DESCRIPTION: enable certain msg type
*
* PARAMETERS :
* @device : ptr to camera device struct
* @msg_type : msg type mask
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::enable_msg_type(struct camera_device *device, int32_t msg_type)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t rc = hw->processAPI(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, (void *)msg_type);
if (rc == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_ENABLE_MSG_TYPE, &apiResult);
}
hw->unlockAPI();
}
/*===========================================================================
* FUNCTION : disable_msg_type
*
* DESCRIPTION: disable certain msg type
*
* PARAMETERS :
* @device : ptr to camera device struct
* @msg_type : msg type mask
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::disable_msg_type(struct camera_device *device, int32_t msg_type)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t rc = hw->processAPI(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, (void *)msg_type);
if (rc == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_DISABLE_MSG_TYPE, &apiResult);
}
hw->unlockAPI();
}
/*===========================================================================
* FUNCTION : msg_type_enabled
*
* DESCRIPTION: if certain msg type is enabled
*
* PARAMETERS :
* @device : ptr to camera device struct
* @msg_type : msg type mask
*
* RETURN : 1 -- enabled
* 0 -- not enabled
*==========================================================================*/
int QCamera2HardwareInterface::msg_type_enabled(struct camera_device *device, int32_t msg_type)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, (void *)msg_type);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_MSG_TYPE_ENABLED, &apiResult);
ret = apiResult.enabled;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : start_preview
*
* DESCRIPTION: start preview
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::start_preview(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_START_PREVIEW", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
qcamera_sm_evt_enum_t evt = QCAMERA_SM_EVT_START_PREVIEW;
if (hw->isNoDisplayMode()) {
evt = QCAMERA_SM_EVT_START_NODISPLAY_PREVIEW;
}
ret = hw->processAPI(evt, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(evt, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
hw->m_bPreviewStarted = true;
CDBG_HIGH("[KPI Perf] %s: X", __func__);
return ret;
}
/*===========================================================================
* FUNCTION : stop_preview
*
* DESCRIPTION: stop preview
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::stop_preview(struct camera_device *device)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_STOP_PREVIEW", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_PREVIEW, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_STOP_PREVIEW, &apiResult);
}
hw->unlockAPI();
CDBG_HIGH("[KPI Perf] %s: X", __func__);
}
/*===========================================================================
* FUNCTION : preview_enabled
*
* DESCRIPTION: if preview is running
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : 1 -- running
* 0 -- not running
*==========================================================================*/
int QCamera2HardwareInterface::preview_enabled(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_PREVIEW_ENABLED, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_PREVIEW_ENABLED, &apiResult);
ret = apiResult.enabled;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : store_meta_data_in_buffers
*
* DESCRIPTION: if need to store meta data in buffers for video frame
*
* PARAMETERS :
* @device : ptr to camera device struct
* @enable : flag if enable
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::store_meta_data_in_buffers(
struct camera_device *device, int enable)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, (void *)enable);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_STORE_METADATA_IN_BUFS, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : start_recording
*
* DESCRIPTION: start recording
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::start_recording(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_START_RECORDING", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_START_RECORDING, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_START_RECORDING, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
hw->m_bRecordStarted = true;
CDBG_HIGH("[KPI Perf] %s: X", __func__);
return ret;
}
/*===========================================================================
* FUNCTION : stop_recording
*
* DESCRIPTION: stop recording
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::stop_recording(struct camera_device *device)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_STOP_RECORDING", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t ret = hw->processAPI(QCAMERA_SM_EVT_STOP_RECORDING, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_STOP_RECORDING, &apiResult);
}
hw->unlockAPI();
CDBG_HIGH("[KPI Perf] %s: X", __func__);
}
/*===========================================================================
* FUNCTION : recording_enabled
*
* DESCRIPTION: if recording is running
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : 1 -- running
* 0 -- not running
*==========================================================================*/
int QCamera2HardwareInterface::recording_enabled(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_RECORDING_ENABLED, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_RECORDING_ENABLED, &apiResult);
ret = apiResult.enabled;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : release_recording_frame
*
* DESCRIPTION: return recording frame back
*
* PARAMETERS :
* @device : ptr to camera device struct
* @opaque : ptr to frame to be returned
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::release_recording_frame(
struct camera_device *device, const void *opaque)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
CDBG_HIGH("%s: E", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, (void *)opaque);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE_RECORIDNG_FRAME, &apiResult);
}
hw->unlockAPI();
CDBG_HIGH("%s: X", __func__);
}
/*===========================================================================
* FUNCTION : auto_focus
*
* DESCRIPTION: start auto focus
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::auto_focus(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
CDBG_HIGH("[KPI Perf] %s : E PROFILE_AUTO_FOCUS", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_START_AUTO_FOCUS, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_START_AUTO_FOCUS, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
CDBG_HIGH("[KPI Perf] %s : X", __func__);
return ret;
}
/*===========================================================================
* FUNCTION : cancel_auto_focus
*
* DESCRIPTION: cancel auto focus
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::cancel_auto_focus(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
ALOGE("[KPI Perf] %s : E PROFILE_CANCEL_AUTO_FOCUS", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_STOP_AUTO_FOCUS, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
CDBG_HIGH("[KPI Perf] %s : X", __func__);
return ret;
}
/*===========================================================================
* FUNCTION : take_picture
*
* DESCRIPTION: take picture
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::take_picture(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_TAKE_PICTURE", __func__);
hw->lockAPI();
qcamera_api_result_t apiResult;
/** Added support for Retro-active Frames:
* takePicture() is called before preparing Snapshot to indicate the
* mm-camera-channel to pick up legacy frames even
* before LED estimation is triggered.
*/
CDBG_HIGH("%s: [ZSL Retro]: numRetroSnap %d, isLiveSnap %d, isZSL %d, isHDR %d",
__func__, hw->mParameters.getNumOfRetroSnapshots(),
hw->isLiveSnapshot(), hw->isZSLMode(), hw->isHDRMode());
// Check for Retro-active Frames
if ((hw->mParameters.getNumOfRetroSnapshots() > 0) &&
!hw->isLiveSnapshot() && hw->isZSLMode() &&
!hw->isHDRMode() && !hw->isLongshotEnabled()) {
// Set Retro Picture Mode
hw->setRetroPicture(1);
hw->m_bLedAfAecLock = 0;
CDBG_HIGH("%s: [ZSL Retro] mode", __func__);
/* Call take Picture for total number of snapshots required.
This includes the number of retro frames and normal frames */
ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
if (ret == NO_ERROR) {
// Wait for retro frames, before calling prepare snapshot
CDBG_HIGH("%s:[ZSL Retro] Wait for Retro frames to be done", __func__);
hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
ret = apiResult.status;
}
// Start Preparing for normal Frames
CDBG_HIGH("%s: [ZSL Retro] Start Prepare Snapshot", __func__);
/* Prepare snapshot in case LED needs to be flashed */
ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
ret = apiResult.status;
CDBG_HIGH("%s: [ZSL Retro] Prep Snapshot done", __func__);
}
}
else {
hw->setRetroPicture(0);
CDBG_HIGH("%s: [ZSL Retro] Normal Pic Taking Mode", __func__);
CDBG_HIGH("%s: [ZSL Retro] Start Prepare Snapshot", __func__);
/* Prepare snapshot in case LED needs to be flashed */
if (hw->mFlashNeeded == 1 || hw->mParameters.isChromaFlashEnabled()) {
// Start Preparing for normal Frames
CDBG_HIGH("%s: [ZSL Retro] Start Prepare Snapshot", __func__);
/* Prepare snapshot in case LED needs to be flashed */
ret = hw->processAPI(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_PREPARE_SNAPSHOT, &apiResult);
ret = apiResult.status;
CDBG_HIGH("%s: [ZSL Retro] Prep Snapshot done", __func__);
}
}
/* Regardless what the result value for prepare_snapshot,
* go ahead with capture anyway. Just like the way autofocus
* is handled in capture case. */
/* capture */
CDBG_HIGH("%s: [ZSL Retro] Capturing normal frames", __func__);
ret = hw->processAPI(QCAMERA_SM_EVT_TAKE_PICTURE, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_TAKE_PICTURE, &apiResult);
ret = apiResult.status;
}
}
hw->unlockAPI();
CDBG_HIGH("[KPI Perf] %s: X", __func__);
return ret;
}
/*===========================================================================
* FUNCTION : cancel_picture
*
* DESCRIPTION: cancel current take picture request
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::cancel_picture(struct camera_device *device)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_CANCEL_PICTURE, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_CANCEL_PICTURE, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : set_parameters
*
* DESCRIPTION: set camera parameters
*
* PARAMETERS :
* @device : ptr to camera device struct
* @parms : string of packed parameters
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::set_parameters(struct camera_device *device,
const char *parms)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_SET_PARAMS, (void *)parms);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_SET_PARAMS, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : get_parameters
*
* DESCRIPTION: query camera parameters
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : packed parameters in a string
*==========================================================================*/
char* QCamera2HardwareInterface::get_parameters(struct camera_device *device)
{
char *ret = NULL;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return NULL;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t rc = hw->processAPI(QCAMERA_SM_EVT_GET_PARAMS, NULL);
if (rc == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_GET_PARAMS, &apiResult);
ret = apiResult.params;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : put_parameters
*
* DESCRIPTION: return camera parameters string back to HAL
*
* PARAMETERS :
* @device : ptr to camera device struct
* @parm : ptr to parameter string to be returned
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::put_parameters(struct camera_device *device,
char *parm)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t ret = hw->processAPI(QCAMERA_SM_EVT_PUT_PARAMS, (void *)parm);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_PUT_PARAMS, &apiResult);
}
hw->unlockAPI();
}
/*===========================================================================
* FUNCTION : send_command
*
* DESCRIPTION: command to be executed
*
* PARAMETERS :
* @device : ptr to camera device struct
* @cmd : cmd to be executed
* @arg1 : ptr to optional argument1
* @arg2 : ptr to optional argument2
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::send_command(struct camera_device *device,
int32_t cmd,
int32_t arg1,
int32_t arg2)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
qcamera_sm_evt_command_payload_t payload;
memset(&payload, 0, sizeof(qcamera_sm_evt_command_payload_t));
payload.cmd = cmd;
payload.arg1 = arg1;
payload.arg2 = arg2;
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_SEND_COMMAND, (void *)&payload);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_SEND_COMMAND, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : release
*
* DESCRIPTION: release camera resource
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : none
*==========================================================================*/
void QCamera2HardwareInterface::release(struct camera_device *device)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
int32_t ret = hw->processAPI(QCAMERA_SM_EVT_RELEASE, NULL);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_RELEASE, &apiResult);
}
hw->unlockAPI();
}
/*===========================================================================
* FUNCTION : dump
*
* DESCRIPTION: dump camera status
*
* PARAMETERS :
* @device : ptr to camera device struct
* @fd : fd for status to be dumped to
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::dump(struct camera_device *device, int fd)
{
int ret = NO_ERROR;
//Log level property is read when "adb shell dumpsys media.camera" is
//called so that the log level can be controlled without restarting
//media server
getLogLevel();
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_DUMP, (void *)fd);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_DUMP, &apiResult);
ret = apiResult.status;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : close_camera_device
*
* DESCRIPTION: close camera device
*
* PARAMETERS :
* @device : ptr to camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::close_camera_device(hw_device_t *hw_dev)
{
int ret = NO_ERROR;
CDBG_HIGH("[KPI Perf] %s: E",__func__);
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(
reinterpret_cast<camera_device_t *>(hw_dev)->priv);
if (!hw) {
ALOGE("%s: NULL camera device", __func__);
return BAD_VALUE;
}
delete hw;
CDBG_HIGH("[KPI Perf] %s: X",__func__);
return ret;
}
/*===========================================================================
* FUNCTION : register_face_image
*
* DESCRIPTION: register a face image into imaging lib for face authenticatio/
* face recognition
*
* PARAMETERS :
* @device : ptr to camera device struct
* @img_ptr : ptr to image buffer
* @config : ptr to config about input image, i.e., format, dimension, and etc.
*
* RETURN : >=0 unique ID of face registerd.
* <0 failure.
*==========================================================================*/
int QCamera2HardwareInterface::register_face_image(struct camera_device *device,
void *img_ptr,
cam_pp_offline_src_config_t *config)
{
int ret = NO_ERROR;
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
if (!hw) {
ALOGE("NULL camera device");
return BAD_VALUE;
}
qcamera_sm_evt_reg_face_payload_t payload;
memset(&payload, 0, sizeof(qcamera_sm_evt_reg_face_payload_t));
payload.img_ptr = img_ptr;
payload.config = config;
hw->lockAPI();
qcamera_api_result_t apiResult;
ret = hw->processAPI(QCAMERA_SM_EVT_REG_FACE_IMAGE, (void *)&payload);
if (ret == NO_ERROR) {
hw->waitAPIResult(QCAMERA_SM_EVT_REG_FACE_IMAGE, &apiResult);
ret = apiResult.handle;
}
hw->unlockAPI();
return ret;
}
/*===========================================================================
* FUNCTION : QCamera2HardwareInterface
*
* DESCRIPTION: constructor of QCamera2HardwareInterface
*
* PARAMETERS :
* @cameraId : camera ID
*
* RETURN : none
*==========================================================================*/
QCamera2HardwareInterface::QCamera2HardwareInterface(int cameraId)
: mCameraId(cameraId),
mCameraHandle(NULL),
mCameraOpened(false),
mPreviewWindow(NULL),
mMsgEnabled(0),
mStoreMetaDataInFrame(0),
m_stateMachine(this),
m_postprocessor(this),
m_thermalAdapter(QCameraThermalAdapter::getInstance()),
m_cbNotifier(this),
m_bShutterSoundPlayed(false),
m_bPreviewStarted(false),
m_bRecordStarted(false),
m_currentFocusState(CAM_AF_NOT_FOCUSED),
m_pPowerModule(NULL),
mDumpFrmCnt(0),
mDumpSkipCnt(0),
mThermalLevel(QCAMERA_THERMAL_NO_ADJUSTMENT),
m_HDRSceneEnabled(false),
mLongshotEnabled(false),
m_max_pic_width(0),
m_max_pic_height(0),
mLiveSnapshotThread(0),
mFlashNeeded(false),
mCaptureRotation(0),
mIs3ALocked(false),
mZoomLevel(0),
mSnapshotJob(-1),
mPostviewJob(-1),
mMetadataJob(-1),
mReprocJob(-1),
mRawdataJob(-1)
{
getLogLevel();
mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
mCameraDevice.common.version = HARDWARE_DEVICE_API_VERSION(1, 0);
mCameraDevice.common.close = close_camera_device;
mCameraDevice.ops = &mCameraOps;
mCameraDevice.priv = this;
pthread_mutex_init(&m_lock, NULL);
pthread_cond_init(&m_cond, NULL);
m_apiResultList = NULL;
pthread_mutex_init(&m_evtLock, NULL);
pthread_cond_init(&m_evtCond, NULL);
memset(&m_evtResult, 0, sizeof(qcamera_api_result_t));
pthread_mutex_init(&m_parm_lock, NULL);
memset(m_channels, 0, sizeof(m_channels));
#ifdef HAS_MULTIMEDIA_HINTS
if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) {
ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID);
}
#endif
memset(mDeffOngoingJobs, 0, sizeof(mDeffOngoingJobs));
mDefferedWorkThread.launch(defferedWorkRoutine, this);
mDefferedWorkThread.sendCmd(CAMERA_CMD_TYPE_START_DATA_PROC, FALSE, FALSE);
}
/*===========================================================================
* FUNCTION : ~QCamera2HardwareInterface
*
* DESCRIPTION: destructor of QCamera2HardwareInterface
*
* PARAMETERS : none
*
* RETURN : none
*==========================================================================*/
QCamera2HardwareInterface::~QCamera2HardwareInterface()
{
mDefferedWorkThread.sendCmd(CAMERA_CMD_TYPE_STOP_DATA_PROC, TRUE, TRUE);
mDefferedWorkThread.exit();
closeCamera();
pthread_mutex_destroy(&m_lock);
pthread_cond_destroy(&m_cond);
pthread_mutex_destroy(&m_evtLock);
pthread_cond_destroy(&m_evtCond);
pthread_mutex_destroy(&m_parm_lock);
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS :
* @hw_device : double ptr for camera device struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::openCamera(struct hw_device_t **hw_device)
{
int rc = NO_ERROR;
if (mCameraOpened) {
*hw_device = NULL;
return PERMISSION_DENIED;
}
CDBG_HIGH("[KPI Perf] %s: E PROFILE_OPEN_CAMERA camera id %d",
__func__,mCameraId);
rc = openCamera();
if (rc == NO_ERROR){
*hw_device = &mCameraDevice.common;
if (m_thermalAdapter.init(this) != 0) {
ALOGE("Init thermal adapter failed");
}
}
else
*hw_device = NULL;
return rc;
}
/*===========================================================================
* FUNCTION : openCamera
*
* DESCRIPTION: open camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::openCamera()
{
int32_t l_curr_width = 0;
int32_t l_curr_height = 0;
m_max_pic_width = 0;
m_max_pic_height = 0;
int i;
if (mCameraHandle) {
ALOGE("Failure: Camera already opened");
return ALREADY_EXISTS;
}
mCameraHandle = camera_open(mCameraId);
if (!mCameraHandle) {
ALOGE("camera_open failed.");
return UNKNOWN_ERROR;
}
if (NULL == gCamCaps[mCameraId])
initCapabilities(mCameraId,mCameraHandle);
mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
camEvtHandle,
(void *) this);
/* get max pic size for jpeg work buf calculation*/
for(i = 0; i < gCamCaps[mCameraId]->picture_sizes_tbl_cnt - 1; i++)
{
l_curr_width = gCamCaps[mCameraId]->picture_sizes_tbl[i].width;
l_curr_height = gCamCaps[mCameraId]->picture_sizes_tbl[i].height;
if ((l_curr_width * l_curr_height) >
(m_max_pic_width * m_max_pic_height)) {
m_max_pic_width = l_curr_width;
m_max_pic_height = l_curr_height;
}
}
int32_t rc = m_postprocessor.init(jpegEvtHandle, this);
if (rc != 0) {
ALOGE("Init Postprocessor failed");
mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
mCameraHandle = NULL;
return UNKNOWN_ERROR;
}
// update padding info from jpeg
cam_padding_info_t padding_info;
m_postprocessor.getJpegPaddingReq(padding_info);
if (gCamCaps[mCameraId]->padding_info.width_padding < padding_info.width_padding) {
gCamCaps[mCameraId]->padding_info.width_padding = padding_info.width_padding;
}
if (gCamCaps[mCameraId]->padding_info.height_padding < padding_info.height_padding) {
gCamCaps[mCameraId]->padding_info.height_padding = padding_info.height_padding;
}
if (gCamCaps[mCameraId]->padding_info.plane_padding < padding_info.plane_padding) {
gCamCaps[mCameraId]->padding_info.plane_padding = padding_info.plane_padding;
}
mParameters.init(gCamCaps[mCameraId], mCameraHandle, this, this);
mCameraOpened = true;
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : closeCamera
*
* DESCRIPTION: close camera
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::closeCamera()
{
int rc = NO_ERROR;
int i;
if (!mCameraOpened) {
return NO_ERROR;
}
pthread_mutex_lock(&m_parm_lock);
// set open flag to false
mCameraOpened = false;
// deinit Parameters
mParameters.deinit();
pthread_mutex_unlock(&m_parm_lock);
// exit notifier
m_cbNotifier.exit();
// stop and deinit postprocessor
m_postprocessor.stop();
m_postprocessor.deinit();
//free all pending api results here
if(m_apiResultList != NULL) {
api_result_list *apiResultList = m_apiResultList;
api_result_list *apiResultListNext;
while (apiResultList != NULL) {
apiResultListNext = apiResultList->next;
free(apiResultList);
apiResultList = apiResultListNext;
}
}
m_thermalAdapter.deinit();
// delete all channels if not already deleted
for (i = 0; i < QCAMERA_CH_TYPE_MAX; i++) {
if (m_channels[i] != NULL) {
m_channels[i]->stop();
delete m_channels[i];
m_channels[i] = NULL;
}
}
rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
mCameraHandle = NULL;
return rc;
}
#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
/*===========================================================================
* FUNCTION : initCapabilities
*
* DESCRIPTION: initialize camera capabilities in static data struct
*
* PARAMETERS :
* @cameraId : camera Id
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::initCapabilities(int cameraId,mm_camera_vtbl_t *cameraHandle)
{
int rc = NO_ERROR;
QCameraHeapMemory *capabilityHeap = NULL;
/* Allocate memory for capability buffer */
capabilityHeap = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), NON_SECURE);
if(rc != OK) {
ALOGE("%s: No memory for cappability", __func__);
goto allocate_failed;
}
/* Map memory for capability buffer */
memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
CAM_MAPPING_BUF_TYPE_CAPABILITY,
capabilityHeap->getFd(0),
sizeof(cam_capability_t));
if(rc < 0) {
ALOGE("%s: failed to map capability buffer", __func__);
goto map_failed;
}
/* Query Capability */
rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
if(rc < 0) {
ALOGE("%s: failed to query capability",__func__);
goto query_failed;
}
gCamCaps[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
if (!gCamCaps[cameraId]) {
ALOGE("%s: out of memory", __func__);
goto query_failed;
}
memcpy(gCamCaps[cameraId], DATA_PTR(capabilityHeap,0),
sizeof(cam_capability_t));
rc = NO_ERROR;
query_failed:
cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
CAM_MAPPING_BUF_TYPE_CAPABILITY);
map_failed:
capabilityHeap->deallocate();
delete capabilityHeap;
allocate_failed:
return rc;
}
/*===========================================================================
* FUNCTION : getCapabilities
*
* DESCRIPTION: query camera capabilities
*
* PARAMETERS :
* @cameraId : camera Id
* @info : camera info struct to be filled in with camera capabilities
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::getCapabilities(int cameraId,
struct camera_info *info)
{
int rc = NO_ERROR;
struct camera_info *p_info;
pthread_mutex_lock(&g_camlock);
p_info = get_cam_info(cameraId);
p_info->device_version = CAMERA_DEVICE_API_VERSION_1_0;
p_info->static_camera_characteristics = NULL;
memcpy(info, p_info, sizeof (struct camera_info));
pthread_mutex_unlock(&g_camlock);
return rc;
}
/*===========================================================================
* FUNCTION : prepareTorchCamera
*
* DESCRIPTION: initializes the camera ( if needed )
* so torch can be configured.
*
* PARAMETERS :
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::prepareTorchCamera()
{
int rc = NO_ERROR;
if ( ( !m_stateMachine.isPreviewRunning() ) &&
!m_stateMachine.isPreviewReady() &&
( m_channels[QCAMERA_CH_TYPE_PREVIEW] == NULL ) ) {
rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
}
return rc;
}
/*===========================================================================
* FUNCTION : releaseTorchCamera
*
* DESCRIPTION: releases all previously acquired camera resources ( if any )
* needed for torch configuration.
*
* PARAMETERS :
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::releaseTorchCamera()
{
if ( !m_stateMachine.isPreviewRunning() &&
!m_stateMachine.isPreviewReady() &&
( m_channels[QCAMERA_CH_TYPE_PREVIEW] != NULL ) ) {
delete m_channels[QCAMERA_CH_TYPE_PREVIEW];
m_channels[QCAMERA_CH_TYPE_PREVIEW] = NULL;
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : getBufNumRequired
*
* DESCRIPTION: return number of stream buffers needed for given stream type
*
* PARAMETERS :
* @stream_type : type of stream
*
* RETURN : number of buffers needed
*==========================================================================*/
uint8_t QCamera2HardwareInterface::getBufNumRequired(cam_stream_type_t stream_type)
{
int bufferCnt = 0;
int minCaptureBuffers = mParameters.getNumOfSnapshots();
char value[PROPERTY_VALUE_MAX];
bool raw_yuv = false;
int zslQBuffers = mParameters.getZSLQueueDepth();
int minCircularBufNum = mParameters.getMaxUnmatchedFramesInQueue() +
CAMERA_MIN_JPEG_ENCODING_BUFFERS;
int maxStreamBuf = minCaptureBuffers + mParameters.getMaxUnmatchedFramesInQueue() +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getNumOfExtraBuffersForImageProc() +
EXTRA_ZSL_PREVIEW_STREAM_BUF;
int minUndequeCount = 0;
if (!isNoDisplayMode()) {
if(mPreviewWindow != NULL) {
if (mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,&minUndequeCount)
!= 0) {
ALOGE("get_min_undequeued_buffer_count failed");
//TODO: hardcoded because MIN_UNDEQUEUED_BUFFERS not defined
//minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
minUndequeCount = 2;
}
} else {
//preview window might not be set at this point. So, query directly
//from BufferQueue implementation of gralloc buffers.
//minUndequeCount = BufferQueue::MIN_UNDEQUEUED_BUFFERS;
//hardcoded because MIN_UNDEQUEUED_BUFFERS not defined. REVISIT
minUndequeCount = 2;
}
}
// Get buffer count for the particular stream type
switch (stream_type) {
case CAM_STREAM_TYPE_PREVIEW:
{
if (mParameters.isZSLMode()) {
// We need to add two extra streming buffers to add
// flexibility in forming matched super buf in ZSL queue.
// with number being 'zslQBuffers + minCircularBufNum'
// we see preview buffers sometimes get dropped at CPP
// and super buf is not forming in ZSL Q for long time.
bufferCnt = zslQBuffers + minCircularBufNum +
mParameters.getNumOfExtraBuffersForImageProc() +
EXTRA_ZSL_PREVIEW_STREAM_BUF;
} else {
bufferCnt = CAMERA_MIN_STREAMING_BUFFERS +
mParameters.getMaxUnmatchedFramesInQueue();
}
bufferCnt += minUndequeCount;
}
break;
case CAM_STREAM_TYPE_POSTVIEW:
{
bufferCnt = minCaptureBuffers*CAMERA_PPROC_OUT_BUFFER_MULTIPLIER +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getNumOfExtraBuffersForImageProc();
if (bufferCnt > maxStreamBuf) {
bufferCnt = maxStreamBuf;
}
bufferCnt += minUndequeCount;
}
break;
case CAM_STREAM_TYPE_SNAPSHOT:
{
if (mParameters.isZSLMode() || mLongshotEnabled) {
if (minCaptureBuffers == 1 && !mLongshotEnabled) {
// Single ZSL snapshot case
bufferCnt = zslQBuffers + CAMERA_MIN_STREAMING_BUFFERS +
mParameters.getNumOfExtraBuffersForImageProc();
}
else {
// ZSL Burst or Longshot case
bufferCnt = zslQBuffers + minCircularBufNum +
mParameters.getNumOfExtraBuffersForImageProc();
}
} else {
bufferCnt = minCaptureBuffers*CAMERA_PPROC_OUT_BUFFER_MULTIPLIER +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getNumOfExtraBuffersForImageProc();
if (bufferCnt > maxStreamBuf) {
bufferCnt = maxStreamBuf;
}
}
}
break;
case CAM_STREAM_TYPE_RAW:
property_get("persist.camera.raw_yuv", value, "0");
raw_yuv = atoi(value) > 0 ? true : false;
if (isRdiMode() || raw_yuv) {
CDBG_HIGH("RDI_DEBUG %s[%d]: CAM_STREAM_TYPE_RAW",
__func__, __LINE__);
bufferCnt = zslQBuffers + minCircularBufNum;
} else if (mParameters.isZSLMode()) {
bufferCnt = zslQBuffers + minCircularBufNum;
} else {
bufferCnt = minCaptureBuffers*CAMERA_PPROC_OUT_BUFFER_MULTIPLIER +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getNumOfExtraBuffersForImageProc();
if (bufferCnt > maxStreamBuf) {
bufferCnt = maxStreamBuf;
}
}
break;
case CAM_STREAM_TYPE_VIDEO:
{
bufferCnt = CAMERA_MIN_VIDEO_BUFFERS;
}
break;
case CAM_STREAM_TYPE_METADATA:
{
if (mParameters.isZSLMode()) {
bufferCnt = zslQBuffers + minCircularBufNum +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getNumOfExtraBuffersForImageProc();
} else {
bufferCnt = minCaptureBuffers +
mParameters.getNumOfExtraHDRInBufsIfNeeded() -
mParameters.getNumOfExtraHDROutBufsIfNeeded() +
mParameters.getMaxUnmatchedFramesInQueue() +
CAMERA_MIN_STREAMING_BUFFERS +
mParameters.getNumOfExtraBuffersForImageProc();
if (bufferCnt > zslQBuffers + minCircularBufNum) {
bufferCnt = zslQBuffers + minCircularBufNum;
}
}
}
break;
case CAM_STREAM_TYPE_OFFLINE_PROC:
{
bufferCnt = minCaptureBuffers;
if (mLongshotEnabled) {
bufferCnt = CAMERA_LONGSHOT_STAGES;
}
}
break;
case CAM_STREAM_TYPE_DEFAULT:
case CAM_STREAM_TYPE_MAX:
default:
bufferCnt = 0;
break;
}
return bufferCnt;
}
/*===========================================================================
* FUNCTION : allocateStreamBuf
*
* DESCRIPTION: alocate stream buffers
*
* PARAMETERS :
* @stream_type : type of stream
* @size : size of buffer
* @stride : stride of buffer
* @scanline : scanline of buffer
* @bufferCnt : [IN/OUT] minimum num of buffers to be allocated.
* could be modified during allocation if more buffers needed
*
* RETURN : ptr to a memory obj that holds stream buffers.
* NULL if failed
*==========================================================================*/
QCameraMemory *QCamera2HardwareInterface::allocateStreamBuf(cam_stream_type_t stream_type,
int size,
int stride,
int scanline,
uint8_t &bufferCnt)
{
int rc = NO_ERROR;
QCameraMemory *mem = NULL;
bool bCachedMem = QCAMERA_ION_USE_CACHE;
bool bPoolMem = false;
char value[PROPERTY_VALUE_MAX];
property_get("persist.camera.mem.usepool", value, "1");
if (atoi(value) == 1) {
bPoolMem = true;
}
// Allocate stream buffer memory object
switch (stream_type) {
case CAM_STREAM_TYPE_PREVIEW:
{
if (isNoDisplayMode()) {
mem = new QCameraStreamMemory(mGetMemory,
bCachedMem,
(bPoolMem) ? &m_memoryPool : NULL,
stream_type);
} else {
cam_dimension_t dim;
QCameraGrallocMemory *grallocMemory =
new QCameraGrallocMemory(mGetMemory);
mParameters.getStreamDimension(stream_type, dim);
if (grallocMemory)
grallocMemory->setWindowInfo(mPreviewWindow, dim.width,
dim.height, stride, scanline,
mParameters.getPreviewHalPixelFormat());
mem = grallocMemory;
}
}
break;
case CAM_STREAM_TYPE_POSTVIEW:
{
if (isNoDisplayMode() || isPreviewRestartEnabled()) {
mem = new QCameraStreamMemory(mGetMemory, bCachedMem);
} else {
cam_dimension_t dim;
QCameraGrallocMemory *grallocMemory =
new QCameraGrallocMemory(mGetMemory);
mParameters.getStreamDimension(stream_type, dim);
if (grallocMemory)
grallocMemory->setWindowInfo(mPreviewWindow, dim.width,
dim.height, stride, scanline,
mParameters.getPreviewHalPixelFormat());
mem = grallocMemory;
}
}
break;
case CAM_STREAM_TYPE_SNAPSHOT:
case CAM_STREAM_TYPE_RAW:
case CAM_STREAM_TYPE_METADATA:
case CAM_STREAM_TYPE_OFFLINE_PROC:
mem = new QCameraStreamMemory(mGetMemory,
bCachedMem,
(bPoolMem) ? &m_memoryPool : NULL,
stream_type);
break;
case CAM_STREAM_TYPE_VIDEO:
{
char value[PROPERTY_VALUE_MAX];
property_get("persist.camera.mem.usecache", value, "1");
if (atoi(value) == 0) {
bCachedMem = QCAMERA_ION_USE_NOCACHE;
}
CDBG_HIGH("%s: vidoe buf using cached memory = %d", __func__, bCachedMem);
mem = new QCameraVideoMemory(mGetMemory, bCachedMem);
}
break;
case CAM_STREAM_TYPE_DEFAULT:
case CAM_STREAM_TYPE_MAX:
default:
break;
}
if (!mem) {
return NULL;
}
if (bufferCnt > 0) {
if (mParameters.isSecureMode() &&
(stream_type == CAM_STREAM_TYPE_RAW) &&
(mParameters.isRdiMode())) {
ALOGD("%s: Allocating %d secure buffers of size %d ", __func__, bufferCnt, size);
rc = mem->allocate(bufferCnt, size, SECURE);
} else {
rc = mem->allocate(bufferCnt, size, NON_SECURE);
}
if (rc < 0) {
delete mem;
return NULL;
}
bufferCnt = mem->getCnt();
}
return mem;
}
/*===========================================================================
* FUNCTION : allocateMoreStreamBuf
*
* DESCRIPTION: alocate more stream buffers from the memory object
*
* PARAMETERS :
* @mem_obj : memory object ptr
* @size : size of buffer
* @bufferCnt : [IN/OUT] additional number of buffers to be allocated.
* output will be the number of total buffers
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::allocateMoreStreamBuf(QCameraMemory *mem_obj,
int size,
uint8_t &bufferCnt)
{
int rc = NO_ERROR;
if (bufferCnt > 0) {
rc = mem_obj->allocateMore(bufferCnt, size);
bufferCnt = mem_obj->getCnt();
}
return rc;
}
/*===========================================================================
* FUNCTION : allocateStreamInfoBuf
*
* DESCRIPTION: alocate stream info buffer
*
* PARAMETERS :
* @stream_type : type of stream
*
* RETURN : ptr to a memory obj that holds stream info buffer.
* NULL if failed
*==========================================================================*/
QCameraHeapMemory *QCamera2HardwareInterface::allocateStreamInfoBuf(
cam_stream_type_t stream_type)
{
int rc = NO_ERROR;
const char *effect;
char value[PROPERTY_VALUE_MAX];
bool raw_yuv = false;
QCameraHeapMemory *streamInfoBuf = new QCameraHeapMemory(QCAMERA_ION_USE_CACHE);
if (!streamInfoBuf) {
ALOGE("allocateStreamInfoBuf: Unable to allocate streamInfo object");
return NULL;
}
rc = streamInfoBuf->allocate(1, sizeof(cam_stream_info_t), NON_SECURE);
if (rc < 0) {
ALOGE("allocateStreamInfoBuf: Failed to allocate stream info memory");
delete streamInfoBuf;
return NULL;
}
cam_stream_info_t *streamInfo = (cam_stream_info_t *)streamInfoBuf->getPtr(0);
memset(streamInfo, 0, sizeof(cam_stream_info_t));
streamInfo->stream_type = stream_type;
rc = mParameters.getStreamFormat(stream_type, streamInfo->fmt);
rc = mParameters.getStreamDimension(stream_type, streamInfo->dim);
rc = mParameters.getStreamRotation(stream_type, streamInfo->pp_config, streamInfo->dim);
streamInfo->num_bufs = getBufNumRequired(stream_type);
streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
streamInfo->is_secure = NON_SECURE;
switch (stream_type) {
case CAM_STREAM_TYPE_SNAPSHOT:
if ((mParameters.isZSLMode() && mParameters.getRecordingHintValue() != true) ||
mLongshotEnabled) {
streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
} else {
streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
streamInfo->num_of_burst = mParameters.getNumOfSnapshots()
+ mParameters.getNumOfExtraHDRInBufsIfNeeded()
- mParameters.getNumOfExtraHDROutBufsIfNeeded()
+ mParameters.getNumOfExtraBuffersForImageProc();
}
break;
case CAM_STREAM_TYPE_RAW:
property_get("persist.camera.raw_yuv", value, "0");
raw_yuv = atoi(value) > 0 ? true : false;
if (mParameters.isZSLMode() || isRdiMode() || raw_yuv) {
CDBG_HIGH("RDI_DEBUG %s[%d]: CAM_STREAM_TYPE_RAW",
__func__, __LINE__);
streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
} else {
streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
streamInfo->num_of_burst = mParameters.getNumOfSnapshots();
}
if (mParameters.isSecureMode() && mParameters.isRdiMode()) {
streamInfo->is_secure = SECURE;
} else {
streamInfo->is_secure = NON_SECURE;
}
break;
case CAM_STREAM_TYPE_POSTVIEW:
if (mLongshotEnabled) {
streamInfo->streaming_mode = CAM_STREAMING_MODE_CONTINUOUS;
} else {
streamInfo->streaming_mode = CAM_STREAMING_MODE_BURST;
streamInfo->num_of_burst = mParameters.getNumOfSnapshots()
+ mParameters.getNumOfExtraHDRInBufsIfNeeded()
- mParameters.getNumOfExtraHDROutBufsIfNeeded()
+ mParameters.getNumOfExtraBuffersForImageProc();
}
break;
case CAM_STREAM_TYPE_VIDEO:
streamInfo->useAVTimer = mParameters.isAVTimerEnabled();
streamInfo->dis_enable = mParameters.isDISEnabled();
case CAM_STREAM_TYPE_PREVIEW:
if (mParameters.getRecordingHintValue()) {
const char* dis_param = mParameters.get(QCameraParameters::KEY_QC_DIS);
bool disEnabled = (dis_param != NULL)
&& !strcmp(dis_param,QCameraParameters::VALUE_ENABLE);
if(disEnabled) {
char value[PROPERTY_VALUE_MAX];
property_get("persist.camera.is_type", value, "0");
streamInfo->is_type = static_cast<cam_is_type_t>(atoi(value));
} else {
streamInfo->is_type = IS_TYPE_NONE;
}
}
if (mParameters.isSecureMode()) {
streamInfo->is_secure = SECURE;
}
break;
default:
break;
}
ALOGD("%s: Stream type %d is secure: %d", __func__, stream_type, streamInfo->is_secure);
if ((!isZSLMode() ||
(isZSLMode() && (stream_type != CAM_STREAM_TYPE_SNAPSHOT))) &&
!mParameters.isHDREnabled()) {
//set flip mode based on Stream type;
int flipMode = mParameters.getFlipMode(stream_type);
if (flipMode > 0) {
streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_FLIP;
streamInfo->pp_config.flip = flipMode;
}
}
if (!isZSLMode()) {
if ((gCamCaps[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) &&
!mParameters.isOptiZoomEnabled()) {
streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS;
streamInfo->pp_config.sharpness = mParameters.getInt(QCameraParameters::KEY_QC_SHARPNESS);
}
if (gCamCaps[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_EFFECT) {
streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_EFFECT;
effect = mParameters.get(CameraParameters::KEY_EFFECT);
streamInfo->pp_config.effect = getEffectValue(effect);
}
if (mParameters.isWNREnabled() && (mParameters.getRecordingHintValue() == false)) {
streamInfo->pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D;
streamInfo->pp_config.denoise2d.denoise_enable = 1;
streamInfo->pp_config.denoise2d.process_plates = mParameters.getWaveletDenoiseProcessPlate();
}
}
return streamInfoBuf;
}
/*===========================================================================
* FUNCTION : setPreviewWindow
*
* DESCRIPTION: set preview window impl
*
* PARAMETERS :
* @window : ptr to window ops table struct
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::setPreviewWindow(
struct preview_stream_ops *window)
{
mPreviewWindow = window;
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : setCallBacks
*
* DESCRIPTION: set callbacks impl
*
* PARAMETERS :
* @notify_cb : notify cb
* @data_cb : data cb
* @data_cb_timestamp : data cb with time stamp
* @get_memory : request memory ops table
* @user : user data ptr
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::setCallBacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void *user)
{
mNotifyCb = notify_cb;
mDataCb = data_cb;
mDataCbTimestamp = data_cb_timestamp;
mGetMemory = get_memory;
mCallbackCookie = user;
m_cbNotifier.setCallbacks(notify_cb, data_cb, data_cb_timestamp, user);
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : enableMsgType
*
* DESCRIPTION: enable msg type impl
*
* PARAMETERS :
* @msg_type : msg type mask to be enabled
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::enableMsgType(int32_t msg_type)
{
mMsgEnabled |= msg_type;
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : disableMsgType
*
* DESCRIPTION: disable msg type impl
*
* PARAMETERS :
* @msg_type : msg type mask to be disabled
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::disableMsgType(int32_t msg_type)
{
mMsgEnabled &= ~msg_type;
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : msgTypeEnabled
*
* DESCRIPTION: impl to determine if certain msg_type is enabled
*
* PARAMETERS :
* @msg_type : msg type mask
*
* RETURN : 0 -- not enabled
* none 0 -- enabled
*==========================================================================*/
int QCamera2HardwareInterface::msgTypeEnabled(int32_t msg_type)
{
return (mMsgEnabled & msg_type);
}
/*===========================================================================
* FUNCTION : msgTypeEnabledWithLock
*
* DESCRIPTION: impl to determine if certain msg_type is enabled with lock
*
* PARAMETERS :
* @msg_type : msg type mask
*
* RETURN : 0 -- not enabled
* none 0 -- enabled
*==========================================================================*/
int QCamera2HardwareInterface::msgTypeEnabledWithLock(int32_t msg_type)
{
int enabled = 0;
lockAPI();
enabled = mMsgEnabled & msg_type;
unlockAPI();
return enabled;
}
/*===========================================================================
* FUNCTION : startPreview
*
* DESCRIPTION: start preview impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::startPreview()
{
int32_t rc = NO_ERROR;
CDBG_HIGH("%s: E", __func__);
// start preview stream
if (mParameters.isZSLMode() && mParameters.getRecordingHintValue() !=true) {
rc = startChannel(QCAMERA_CH_TYPE_ZSL);
} else {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
}
CDBG_HIGH("%s: X", __func__);
return rc;
}
/*===========================================================================
* FUNCTION : stopPreview
*
* DESCRIPTION: stop preview impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::stopPreview()
{
CDBG_HIGH("%s: E", __func__);
// stop preview stream
stopChannel(QCAMERA_CH_TYPE_ZSL);
stopChannel(QCAMERA_CH_TYPE_PREVIEW);
// delete all channels from preparePreview
unpreparePreview();
//reset focus state
m_currentFocusState = CAM_AF_NOT_FOCUSED;
CDBG_HIGH("%s: X", __func__);
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : storeMetaDataInBuffers
*
* DESCRIPTION: enable store meta data in buffers for video frames impl
*
* PARAMETERS :
* @enable : flag if need enable
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::storeMetaDataInBuffers(int enable)
{
mStoreMetaDataInFrame = enable;
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : startRecording
*
* DESCRIPTION: start recording impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::startRecording()
{
int32_t rc = NO_ERROR;
CDBG_HIGH("%s: E", __func__);
if (mParameters.getRecordingHintValue() == false) {
ALOGE("%s: start recording when hint is false, stop preview first", __func__);
stopPreview();
// Set recording hint to TRUE
mParameters.updateRecordingHintValue(TRUE);
rc = preparePreview();
if (rc == NO_ERROR) {
rc = startChannel(QCAMERA_CH_TYPE_PREVIEW);
}
}
if (rc == NO_ERROR) {
rc = startChannel(QCAMERA_CH_TYPE_VIDEO);
}
#ifdef HAS_MULTIMEDIA_HINTS
if (rc == NO_ERROR) {
if (m_pPowerModule) {
if (m_pPowerModule->powerHint) {
m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=1");
}
}
}
#endif
CDBG_HIGH("%s: X", __func__);
return rc;
}
/*===========================================================================
* FUNCTION : stopRecording
*
* DESCRIPTION: stop recording impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::stopRecording()
{
int rc = stopChannel(QCAMERA_CH_TYPE_VIDEO);
CDBG_HIGH("%s: E", __func__);
#ifdef HAS_MULTIMEDIA_HINTS
if (m_pPowerModule) {
if (m_pPowerModule->powerHint) {
m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, (void *)"state=0");
}
}
#endif
CDBG_HIGH("%s: X", __func__);
return rc;
}
/*===========================================================================
* FUNCTION : releaseRecordingFrame
*
* DESCRIPTION: return video frame impl
*
* PARAMETERS :
* @opaque : ptr to video frame to be returned
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::releaseRecordingFrame(const void * opaque)
{
int32_t rc = UNKNOWN_ERROR;
QCameraVideoChannel *pChannel =
(QCameraVideoChannel *)m_channels[QCAMERA_CH_TYPE_VIDEO];
CDBG_HIGH("%s: opaque data = %p", __func__,opaque);
if(pChannel != NULL) {
rc = pChannel->releaseFrame(opaque, mStoreMetaDataInFrame > 0);
}
return rc;
}
/*===========================================================================
* FUNCTION : autoFocus
*
* DESCRIPTION: start auto focus impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::autoFocus()
{
int rc = NO_ERROR;
cam_focus_mode_type focusMode = mParameters.getFocusMode();
switch (focusMode) {
case CAM_FOCUS_MODE_AUTO:
case CAM_FOCUS_MODE_MACRO:
case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
rc = mCameraHandle->ops->do_auto_focus(mCameraHandle->camera_handle);
break;
case CAM_FOCUS_MODE_INFINITY:
case CAM_FOCUS_MODE_FIXED:
case CAM_FOCUS_MODE_EDOF:
default:
ALOGE("%s: No ops in focusMode (%d)", __func__, focusMode);
rc = sendEvtNotify(CAMERA_MSG_FOCUS, true, 0);
break;
}
return rc;
}
/*===========================================================================
* FUNCTION : cancelAutoFocus
*
* DESCRIPTION: cancel auto focus impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::cancelAutoFocus()
{
int rc = NO_ERROR;
cam_focus_mode_type focusMode = mParameters.getFocusMode();
switch (focusMode) {
case CAM_FOCUS_MODE_AUTO:
case CAM_FOCUS_MODE_MACRO:
case CAM_FOCUS_MODE_CONTINOUS_VIDEO:
case CAM_FOCUS_MODE_CONTINOUS_PICTURE:
rc = mCameraHandle->ops->cancel_auto_focus(mCameraHandle->camera_handle);
m_currentFocusState = CAM_AF_CANCELLED;
break;
case CAM_FOCUS_MODE_INFINITY:
case CAM_FOCUS_MODE_FIXED:
case CAM_FOCUS_MODE_EDOF:
default:
CDBG("%s: No ops in focusMode (%d)", __func__, focusMode);
break;
}
return rc;
}
/*===========================================================================
* FUNCTION : processUFDumps
*
* DESCRIPTION: process UF jpeg dumps for refocus support
*
* PARAMETERS :
* @evt : payload of jpeg event, including information about jpeg encoding
* status, jpeg size and so on.
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*
* NOTE : none
*==========================================================================*/
bool QCamera2HardwareInterface::processUFDumps(qcamera_jpeg_evt_payload_t *evt)
{
bool ret = true;
if (mParameters.isUbiRefocus()) {
int index = getOutputImageCount();
bool allFocusImage = (index == ((int)mParameters.UfOutputCount()-1));
char name[CAM_FN_CNT];
camera_memory_t *jpeg_mem = NULL;
omx_jpeg_ouput_buf_t *jpeg_out = NULL;
uint32_t dataLen;
uint8_t *dataPtr;
if (!m_postprocessor.getJpegMemOpt()) {
dataLen = evt->out_data.buf_filled_len;
dataPtr = evt->out_data.buf_vaddr;
} else {
jpeg_out = (omx_jpeg_ouput_buf_t*) evt->out_data.buf_vaddr;
if (!jpeg_out) {
ALOGE("%s:%d] Null pointer detected", __func__, __LINE__);
return false;
}
jpeg_mem = (camera_memory_t *)jpeg_out->mem_hdl;
if (!jpeg_mem) {
ALOGE("%s:%d] Null pointer detected", __func__, __LINE__);
return false;
}
dataPtr = (uint8_t *)jpeg_mem->data;
dataLen = jpeg_mem->size;
}
if (allFocusImage) {
strncpy(name, "AllFocusImage", CAM_FN_CNT - 1);
index = -1;
} else {
strncpy(name, "0", CAM_FN_CNT - 1);
}
CAM_DUMP_TO_FILE("/data/local/ubifocus", name, index, "jpg",
dataPtr, dataLen);
CDBG("%s:%d] Dump the image %d %d allFocusImage %d", __func__, __LINE__,
getOutputImageCount(), index, allFocusImage);
setOutputImageCount(getOutputImageCount() + 1);
if (!allFocusImage) {
ret = false;
}
}
return ret;
}
/*===========================================================================
* FUNCTION : configureAdvancedCapture
*
* DESCRIPTION: configure Advanced Capture.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureAdvancedCapture()
{
CDBG_HIGH("%s: E",__func__);
int32_t rc = NO_ERROR;
setOutputImageCount(0);
mParameters.setDisplayFrame(FALSE);
if (mParameters.isUbiFocusEnabled()) {
rc = configureAFBracketing();
} else if (mParameters.isOptiZoomEnabled()) {
rc = configureOptiZoom();
} else if (mParameters.isChromaFlashEnabled()) {
rc = configureFlashBracketing();
} else if (mParameters.isHDREnabled()) {
rc = configureHDRBracketing();
} else if (mParameters.isAEBracketEnabled()) {
rc = configureAEBracketing();
} else {
ALOGE("%s: No Advanced Capture feature enabled!! ", __func__);
rc = BAD_VALUE;
}
CDBG_HIGH("%s: X",__func__);
return rc;
}
/*===========================================================================
* FUNCTION : configureAFBracketing
*
* DESCRIPTION: configure AF Bracketing.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureAFBracketing(bool enable)
{
CDBG_HIGH("%s: E",__func__);
int32_t rc = NO_ERROR;
cam_af_bracketing_t *af_bracketing_need;
af_bracketing_need =
&gCamCaps[mCameraId]->ubifocus_af_bracketing_need;
//Enable AF Bracketing.
cam_af_bracketing_t afBracket;
memset(&afBracket, 0, sizeof(cam_af_bracketing_t));
afBracket.enable = enable;
afBracket.burst_count = af_bracketing_need->burst_count;
for(int8_t i = 0; i < MAX_AF_BRACKETING_VALUES; i++) {
afBracket.focus_steps[i] = af_bracketing_need->focus_steps[i];
CDBG_HIGH("%s: focus_step[%d] = %d", __func__, i, afBracket.focus_steps[i]);
}
//Send cmd to backend to set AF Bracketing for Ubi Focus.
rc = mParameters.commitAFBracket(afBracket);
if ( NO_ERROR != rc ) {
ALOGE("%s: cannot configure AF bracketing", __func__);
return rc;
}
if (enable) {
mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
mIs3ALocked = true;
}
CDBG_HIGH("%s: X",__func__);
return rc;
}
/*===========================================================================
* FUNCTION : configureFlashBracketing
*
* DESCRIPTION: configure Flash Bracketing.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureFlashBracketing(bool enable)
{
CDBG_HIGH("%s: E",__func__);
int32_t rc = NO_ERROR;
cam_flash_bracketing_t flashBracket;
memset(&flashBracket, 0, sizeof(cam_flash_bracketing_t));
flashBracket.enable = enable;
//TODO: Hardcoded value.
flashBracket.burst_count = 2;
//Send cmd to backend to set Flash Bracketing for chroma flash.
rc = mParameters.commitFlashBracket(flashBracket);
if ( NO_ERROR != rc ) {
ALOGE("%s: cannot configure AF bracketing", __func__);
}
CDBG_HIGH("%s: X",__func__);
return rc;
}
/*===========================================================================
* FUNCTION : configureHDRBracketing
*
* DESCRIPTION: configure HDR Bracketing.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureHDRBracketing()
{
CDBG_HIGH("%s: E",__func__);
int32_t rc = NO_ERROR;
// 'values' should be in "idx1,idx2,idx3,..." format
uint8_t hdrFrameCount = gCamCaps[mCameraId]->hdr_bracketing_setting.num_frames;
ALOGE("%s : HDR values %d, %d frame count: %d",
__func__,
(int8_t) gCamCaps[mCameraId]->hdr_bracketing_setting.exp_val.values[0],
(int8_t) gCamCaps[mCameraId]->hdr_bracketing_setting.exp_val.values[1],
hdrFrameCount);
// Enable AE Bracketing for HDR
cam_exp_bracketing_t aeBracket;
memset(&aeBracket, 0, sizeof(cam_exp_bracketing_t));
aeBracket.mode =
gCamCaps[mCameraId]->hdr_bracketing_setting.exp_val.mode;
String8 tmp;
for ( unsigned int i = 0; i < hdrFrameCount ; i++ ) {
tmp.appendFormat("%d",
(int8_t) gCamCaps[mCameraId]->hdr_bracketing_setting.exp_val.values[i]);
tmp.append(",");
}
if (mParameters.isHDR1xFrameEnabled()
&& mParameters.isHDR1xExtraBufferNeeded()) {
tmp.appendFormat("%d", 0);
tmp.append(",");
}
if( !tmp.isEmpty() &&
( MAX_EXP_BRACKETING_LENGTH > tmp.length() ) ) {
//Trim last comma
memset(aeBracket.values, '\0', MAX_EXP_BRACKETING_LENGTH);
memcpy(aeBracket.values, tmp.string(), tmp.length() - 1);
}
ALOGE("%s : HDR config values %s",
__func__,
aeBracket.values);
rc = mParameters.setHDRAEBracket(aeBracket);
if ( NO_ERROR != rc ) {
ALOGE("%s: cannot configure HDR bracketing", __func__);
return rc;
}
CDBG_HIGH("%s: X",__func__);
return rc;
}
/*===========================================================================
* FUNCTION : configureAEBracketing
*
* DESCRIPTION: configure AE Bracketing.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureAEBracketing()
{
CDBG_HIGH("%s: E",__func__);
int32_t rc = NO_ERROR;
rc = mParameters.setAEBracketing();
if ( NO_ERROR != rc ) {
ALOGE("%s: cannot configure AE bracketing", __func__);
return rc;
}
CDBG_HIGH("%s: X",__func__);
return rc;
}
/*===========================================================================
* FUNCTION : configureOptiZoom
*
* DESCRIPTION: configure Opti Zoom.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::configureOptiZoom()
{
int32_t rc = NO_ERROR;
//store current zoom level.
mZoomLevel = (uint8_t) mParameters.getInt(CameraParameters::KEY_ZOOM);
//set zoom level to 1x;
mParameters.setAndCommitZoom(0);
mParameters.set3ALock(QCameraParameters::VALUE_TRUE);
mIs3ALocked = true;
return rc;
}
/*===========================================================================
* FUNCTION : startAdvancedCapture
*
* DESCRIPTION: starts advanced capture based on capture type
*
* PARAMETERS :
* @pChannel : channel.
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::startAdvancedCapture(
QCameraPicChannel *pChannel)
{
CDBG_HIGH("%s: Start bracketig",__func__);
int32_t rc = NO_ERROR;
if(mParameters.isUbiFocusEnabled()) {
rc = pChannel->startAdvancedCapture(MM_CAMERA_AF_BRACKETING);
} else if (mParameters.isChromaFlashEnabled()) {
rc = pChannel->startAdvancedCapture(MM_CAMERA_FLASH_BRACKETING);
} else if (mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
rc = pChannel->startAdvancedCapture(MM_CAMERA_AE_BRACKETING);
} else if (mParameters.isOptiZoomEnabled()) {
rc = pChannel->startAdvancedCapture(MM_CAMERA_ZOOM_1X);
} else {
ALOGE("%s: No Advanced Capture feature enabled!",__func__);
rc = BAD_VALUE;
}
return rc;
}
/*===========================================================================
* FUNCTION : takePicture
*
* DESCRIPTION: take picture impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::takePicture()
{
int rc = NO_ERROR;
// Get total number for snapshots (retro + regular)
uint8_t numSnapshots = mParameters.getNumOfSnapshots();
// Get number of retro-active snapshots
uint8_t numRetroSnapshots = mParameters.getNumOfRetroSnapshots();
CDBG_HIGH("%s: E", __func__);
// Check if retro-active snapshots are not enabled
if (!isRetroPicture() || !mParameters.isZSLMode()) {
numRetroSnapshots = 0;
CDBG_HIGH("%s: [ZSL Retro] Reset retro snaphot count to zero", __func__);
}
if (mParameters.isUbiFocusEnabled() ||
mParameters.isOptiZoomEnabled() ||
mParameters.isHDREnabled() ||
mParameters.isChromaFlashEnabled() ||
mParameters.isAEBracketEnabled()) {
rc = configureAdvancedCapture();
if (rc == NO_ERROR) {
numSnapshots = mParameters.getBurstCountForAdvancedCapture();
}
}
CDBG_HIGH("%s: [ZSL Retro] numSnapshots = %d, numRetroSnapshots = %d",
__func__, numSnapshots, numRetroSnapshots);
getOrientation();
if (mParameters.isZSLMode()) {
QCameraPicChannel *pZSLChannel =
(QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
if (NULL != pZSLChannel) {
// start postprocessor
rc = m_postprocessor.start(pZSLChannel);
if (rc != NO_ERROR) {
ALOGE("%s: cannot start postprocessor", __func__);
return rc;
}
if (mParameters.isUbiFocusEnabled() ||
mParameters.isOptiZoomEnabled() ||
mParameters.isHDREnabled() ||
mParameters.isChromaFlashEnabled() ||
mParameters.isAEBracketEnabled()) {
rc = startAdvancedCapture(pZSLChannel);
if (rc != NO_ERROR) {
ALOGE("%s: cannot start zsl advanced capture", __func__);
return rc;
}
}
if ( mLongshotEnabled ) {
mCameraHandle->ops->start_zsl_snapshot(
mCameraHandle->camera_handle,
pZSLChannel->getMyHandle());
}
rc = pZSLChannel->takePicture(numSnapshots, numRetroSnapshots);
if (rc != NO_ERROR) {
ALOGE("%s: cannot take ZSL picture, stop pproc", __func__);
m_postprocessor.stop();
return rc;
}
} else {
ALOGE("%s: ZSL channel is NULL", __func__);
return UNKNOWN_ERROR;
}
} else {
// start snapshot
if (mParameters.isJpegPictureFormat() ||
mParameters.isNV16PictureFormat() ||
mParameters.isNV21PictureFormat()) {
if (!isLongshotEnabled()) {
rc = addCaptureChannel();
// normal capture case
// need to stop preview channel
stopChannel(QCAMERA_CH_TYPE_PREVIEW);
delChannel(QCAMERA_CH_TYPE_PREVIEW);
rc = declareSnapshotStreams();
if (NO_ERROR != rc) {
delChannel(QCAMERA_CH_TYPE_CAPTURE);
return rc;
}
waitDefferedWork(mSnapshotJob);
waitDefferedWork(mMetadataJob);
waitDefferedWork(mRawdataJob);
{
DefferWorkArgs args;
DefferAllocBuffArgs allocArgs;
memset(&args, 0, sizeof(DefferWorkArgs));
memset(&allocArgs, 0, sizeof(DefferAllocBuffArgs));
allocArgs.ch = m_channels[QCAMERA_CH_TYPE_CAPTURE];
allocArgs.type = CAM_STREAM_TYPE_POSTVIEW;
args.allocArgs = allocArgs;
mPostviewJob = queueDefferedWork(CMD_DEFF_ALLOCATE_BUFF,
args);
if ( mPostviewJob == -1)
rc = UNKNOWN_ERROR;
}
waitDefferedWork(mPostviewJob);
} else {
// normal capture case
// need to stop preview channel
stopChannel(QCAMERA_CH_TYPE_PREVIEW);
delChannel(QCAMERA_CH_TYPE_PREVIEW);
rc = declareSnapshotStreams();
if (NO_ERROR != rc) {
return rc;
}
rc = addCaptureChannel();
}
if ((rc == NO_ERROR) &&
(NULL != m_channels[QCAMERA_CH_TYPE_CAPTURE])) {
// configure capture channel
rc = m_channels[QCAMERA_CH_TYPE_CAPTURE]->config();
if (rc != NO_ERROR) {
ALOGE("%s: cannot configure capture channel", __func__);
delChannel(QCAMERA_CH_TYPE_CAPTURE);
return rc;
}
DefferWorkArgs args;
memset(&args, 0, sizeof(DefferWorkArgs));
args.pprocArgs = m_channels[QCAMERA_CH_TYPE_CAPTURE];
mReprocJob = queueDefferedWork(CMD_DEFF_PPROC_START,
args);
// start catpure channel
rc = m_channels[QCAMERA_CH_TYPE_CAPTURE]->start();
if (rc != NO_ERROR) {
ALOGE("%s: cannot start capture channel", __func__);
delChannel(QCAMERA_CH_TYPE_CAPTURE);
return rc;
}
QCameraPicChannel *pCapChannel =
(QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
if (NULL != pCapChannel) {
if (mParameters.isUbiFocusEnabled()|
mParameters.isChromaFlashEnabled()) {
rc = startAdvancedCapture(pCapChannel);
if (rc != NO_ERROR) {
ALOGE("%s: cannot start advanced capture", __func__);
return rc;
}
}
}
if ( mLongshotEnabled ) {
rc = longShot();
if (NO_ERROR != rc) {
delChannel(QCAMERA_CH_TYPE_CAPTURE);
return rc;
}
}
} else {
ALOGE("%s: cannot add capture channel", __func__);
return rc;
}
} else {
stopChannel(QCAMERA_CH_TYPE_PREVIEW);
delChannel(QCAMERA_CH_TYPE_PREVIEW);
rc = addRawChannel();
if (rc == NO_ERROR) {
// start postprocessor
rc = m_postprocessor.start(m_channels[QCAMERA_CH_TYPE_RAW]);
if (rc != NO_ERROR) {
ALOGE("%s: cannot start postprocessor", __func__);
delChannel(QCAMERA_CH_TYPE_RAW);
return rc;
}
rc = startChannel(QCAMERA_CH_TYPE_RAW);
if (rc != NO_ERROR) {
ALOGE("%s: cannot start raw channel", __func__);
m_postprocessor.stop();
delChannel(QCAMERA_CH_TYPE_RAW);
return rc;
}
} else {
ALOGE("%s: cannot add raw channel", __func__);
return rc;
}
}
}
CDBG_HIGH("%s: X", __func__);
return rc;
}
/*===========================================================================
* FUNCTION : declareSnapshotStreams
*
* DESCRIPTION: Configure backend with expected snapshot streams
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::declareSnapshotStreams()
{
int rc = NO_ERROR;
// Update stream info configuration
pthread_mutex_lock(&m_parm_lock);
rc = mParameters.setStreamConfigure(true, mLongshotEnabled);
if (rc != NO_ERROR) {
ALOGE("%s: setStreamConfigure failed %d", __func__, rc);
pthread_mutex_unlock(&m_parm_lock);
return rc;
}
pthread_mutex_unlock(&m_parm_lock);
return rc;
}
/*===========================================================================
* FUNCTION : longShot
*
* DESCRIPTION: Queue one more ZSL frame
* in the longshot pipe.
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int32_t QCamera2HardwareInterface::longShot()
{
int32_t rc = NO_ERROR;
uint8_t numSnapshots = mParameters.getNumOfSnapshots();
QCameraPicChannel *pChannel = NULL;
if (mParameters.isZSLMode()) {
pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
} else {
pChannel = (QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_CAPTURE];
}
if (NULL != pChannel) {
rc = pChannel->takePicture(numSnapshots, 0);
} else {
ALOGE(" %s : Capture channel not initialized!", __func__);
rc = NO_INIT;
goto end;
}
end:
return rc;
}
/*===========================================================================
* FUNCTION : stopCaptureChannel
*
* DESCRIPTION: Stops capture channel
*
* PARAMETERS :
* @destroy : Set to true to stop and delete camera channel.
* Set to false to only stop capture channel.
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::stopCaptureChannel(bool destroy)
{
if (mParameters.isJpegPictureFormat() ||
mParameters.isNV16PictureFormat() ||
mParameters.isNV21PictureFormat()) {
stopChannel(QCAMERA_CH_TYPE_CAPTURE);
if (destroy) {
// Destroy camera channel but dont release context
delChannel(QCAMERA_CH_TYPE_CAPTURE, false);
}
}
return NO_ERROR;
}
/*===========================================================================
* FUNCTION : cancelPicture
*
* DESCRIPTION: cancel picture impl
*
* PARAMETERS : none
*
* RETURN : int32_t type of status
* NO_ERROR -- success
* none-zero failure code
*==========================================================================*/
int QCamera2HardwareInterface::cancelPicture()
{
waitDefferedWork(mReprocJob);
//stop post processor
m_postprocessor.stop();
mParameters.setDisplayFrame(TRUE);
if ( mParameters.isHDREnabled() || mParameters.isAEBracketEnabled()) {
mParameters.stopAEBracket();
}
if (mParameters.isZSLMode()) {
QCameraPicChannel *pZSLChannel =
(QCameraPicChannel *)m_channels[QCAMERA_CH_TYPE_ZSL];
if (NULL != pZSLChannel) {
pZSLChannel->cancelPicture();
}
} else {
// normal capture case
if (mParameters.isJpegPictureFormat() ||
mParameters.isNV16PictureFormat() ||
mParameters.isNV21PictureFormat()) {
stopChannel(QCAMERA_CH_TYPE_CAPTURE);
delChannel(QCAMERA_CH_TYPE_CAPTURE);
} else {
stopChannel(QCAMERA_CH_TYPE_RAW);
delChannel(QCAMERA_CH_TYPE_RAW);