blob: e67d514d21d0c069558cb634af4a25cf528a7d6f [file] [log] [blame] [edit]
/*
**
** Copyright 2008, The Android Open Source Project
** Copyright 2010, Samsung Electronics Co. LTD
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
/*!
* \file ExynosCameraHWInterface.h
* \brief source file for Android Camera HAL
* \author thun.hwang(thun.hwang@samsung.com)
* \date 2010/06/03
*
* <b>Revision History: </b>
* - 2011/12/31 : thun.hwang(thun.hwang@samsung.com) \n
* Initial version
*
* - 2012/02/01 : Sangwoo, Park(sw5771.park@samsung.com) \n
* Adjust Android Standard features
*
* - 2012/03/14 : sangwoo.park(sw5771.park@samsung.com) \n
* Change file, class name to ExynosXXX.
*
*/
#include <sys/types.h>
#include <sys/stat.h>
//#define LOG_NDEBUG 0
#define LOG_TAG "ExynosCameraHWInterface"
#include <utils/Log.h>
#include "ExynosCameraHWInterface.h"
#include "exynos_format.h"
#define VIDEO_COMMENT_MARKER_H (0xFFBE)
#define VIDEO_COMMENT_MARKER_L (0xFFBF)
#define VIDEO_COMMENT_MARKER_LENGTH (4)
#define JPEG_EOI_MARKER (0xFFD9)
#define HIBYTE(x) (((x) >> 8) & 0xFF)
#define LOBYTE(x) ((x) & 0xFF)
/*TODO: This values will be changed */
#define BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR "0.10,1.20,Infinity"
#define FRONT_CAMERA_FOCUS_DISTANCES_STR "0.20,0.25,Infinity"
#define BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR "0.10,0.20,Infinity"
#define BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR "0.10,1.20,Infinity"
#define BACK_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity"
#define FRONT_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity"
// This hack does two things:
// -- it sets preview to NV21 (YUV420SP)
// -- it sets gralloc to YV12
//
// The reason being: the samsung encoder understands only yuv420sp, and gralloc
// does yv12 and rgb565. So what we do is we break up the interleaved UV in
// separate V and U planes, which makes preview look good, and enabled the
// encoder as well.
//
// FIXME: Samsung needs to enable support for proper yv12 coming out of the
// camera, and to fix their video encoder to work with yv12.
// FIXME: It also seems like either Samsung's YUV420SP (NV21) or img's YV12 has
// the color planes switched. We need to figure which side is doing it
// wrong and have the respective party fix it.
namespace android {
static const int INITIAL_SKIP_FRAME = 8;
static const int EFFECT_SKIP_FRAME = 1;
gralloc_module_t const* ExynosCameraHWInterface::m_grallocHal;
ExynosCameraHWInterface::ExynosCameraHWInterface(int cameraId, camera_device_t *dev)
:
m_captureInProgress(false),
m_skipFrame(0),
m_notifyCb(0),
m_dataCb(0),
m_dataCbTimestamp(0),
m_callbackCookie(0),
m_msgEnabled(0),
m_faceDetected(false),
m_halDevice(dev),
m_numOfAvailableVideoBuf(0)
{
ALOGV("DEBUG(%s):", __func__);
int ret = 0;
m_previewWindow = NULL;
m_secCamera = ExynosCamera::createInstance();
for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
m_previewHeap[i] = NULL;
m_previewBufHandle[i] = NULL;
m_previewStride[i] = 0;
m_avaliblePreviewBufHandle[i] = false;
m_flagGrallocLocked[i] = false;
m_matchedGrallocIndex[i] = -1;
m_grallocVirtAddr[i] = NULL;
}
m_minUndequeuedBufs = 0;
#ifndef USE_3DNR_DMAOUT
m_cntVideoBuf = 0;
#endif
m_oldPictureBufQueueHead = NULL;
m_getMemoryCb = NULL;
m_exynosPreviewCSC = NULL;
m_exynosPictureCSC = NULL;
m_exynosVideoCSC = NULL;
m_frameMetadata.number_of_faces = 0;
m_frameMetadata.faces = m_faces;
for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
m_videoHeap[i] = NULL;
m_resizedVideoHeap[i] = NULL;
}
m_ion_client = ion_client_create();
for (int i = 0; i < NUM_OF_PICTURE_BUF; i++)
m_pictureHeap[i] = NULL;
m_rawHeap = NULL;
m_exitAutoFocusThread = false;
m_exitPreviewThread = false;
m_exitVideoThread = false;
/* whether the PreviewThread is active in preview or stopped. we
* create the thread but it is initially in stopped state.
*/
m_previewRunning = false;
m_videoRunning = false;
m_pictureRunning = false;
#ifndef USE_3DNR_DMAOUT
m_videoStart = false;
#endif
m_previewStartDeferred = false;
m_recordingHint = false;
if (!m_grallocHal) {
ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
if (ret)
ALOGE("ERR(%s):Fail on loading gralloc HAL", __func__);
}
if (m_secCamera->create(cameraId) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->create(%d)", __func__, cameraId);
return;
}
m_initDefaultParameters(cameraId);
CSC_METHOD cscMethod = CSC_METHOD_HW;
m_exynosPreviewCSC = csc_init(cscMethod);
if (m_exynosPreviewCSC == NULL)
ALOGE("ERR(%s):csc_init() fail", __func__);
m_exynosPictureCSC = csc_init(cscMethod);
if (m_exynosPictureCSC == NULL)
ALOGE("ERR(%s):csc_init() fail", __func__);
m_exynosVideoCSC = csc_init(cscMethod);
if (m_exynosVideoCSC == NULL)
ALOGE("ERR(%s):csc_init() fail", __func__);
m_previewThread = new PreviewThread(this);
m_videoThread = new VideoThread(this);
m_autoFocusThread = new AutoFocusThread(this);
m_pictureThread = new PictureThread(this);
}
ExynosCameraHWInterface::~ExynosCameraHWInterface()
{
close(m_ion_client);
this->release();
}
status_t ExynosCameraHWInterface::setPreviewWindow(preview_stream_ops *w)
{
m_previewWindow = w;
ALOGV("DEBUG(%s):m_previewWindow %p", __func__, m_previewWindow);
if (m_previewWindow == NULL) {
ALOGV("DEBUG(%s):preview window is NULL!", __func__);
return OK;
}
m_previewLock.lock();
if (m_previewRunning == true && m_previewStartDeferred == false) {
ALOGV("DEBUG(%s):stop preview (window change)", __func__);
m_stopPreviewInternal();
}
if (m_previewWindow->get_min_undequeued_buffer_count(m_previewWindow, &m_minUndequeuedBufs) != 0) {
ALOGE("ERR(%s):could not retrieve min undequeued buffer count", __func__);
return INVALID_OPERATION;
}
if (NUM_OF_PREVIEW_BUF <= m_minUndequeuedBufs) {
ALOGE("ERR(%s):min undequeued buffer count %d is too high (expecting at most %d)", __func__,
m_minUndequeuedBufs, NUM_OF_PREVIEW_BUF - 1);
}
if (m_previewWindow->set_buffer_count(m_previewWindow, NUM_OF_PREVIEW_BUF) != 0) {
ALOGE("ERR(%s):could not set buffer count", __func__);
return INVALID_OPERATION;
}
int previewW, previewH;
int hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
m_params.getPreviewSize(&previewW, &previewH);
const char *str_preview_format = m_params.getPreviewFormat();
ALOGV("DEBUG(%s):str preview format %s width : %d height : %d ", __func__, str_preview_format, previewW, previewH);
if (!strcmp(str_preview_format,
CameraParameters::PIXEL_FORMAT_RGB565)) {
hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565;
} else if (!strcmp(str_preview_format,
CameraParameters::PIXEL_FORMAT_RGBA8888)) {
hal_pixel_format = HAL_PIXEL_FORMAT_RGBA_8888;
} else if (!strcmp(str_preview_format,
CameraParameters::PIXEL_FORMAT_YUV420SP)) {
hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
} else if (!strcmp(str_preview_format,
CameraParameters::PIXEL_FORMAT_YUV420P))
hal_pixel_format = HAL_PIXEL_FORMAT_YV12;
if (m_previewWindow->set_usage(m_previewWindow,
GRALLOC_USAGE_SW_WRITE_OFTEN |
#ifdef USE_EGL
#else
GRALLOC_USAGE_HWC_HWOVERLAY |
#endif
GRALLOC_USAGE_HW_ION) != 0) {
ALOGE("ERR(%s):could not set usage on gralloc buffer", __func__);
return INVALID_OPERATION;
}
if (m_previewWindow->set_buffers_geometry(m_previewWindow,
previewW, previewH,
hal_pixel_format) != 0) {
ALOGE("ERR(%s):could not set buffers geometry to %s",
__func__, str_preview_format);
return INVALID_OPERATION;
}
if (m_previewRunning == true && m_previewStartDeferred == true) {
ALOGV("DEBUG(%s):start/resume preview", __func__);
if (m_startPreviewInternal() == true) {
m_previewStartDeferred = false;
m_previewCondition.signal();
}
}
m_previewLock.unlock();
return OK;
}
void ExynosCameraHWInterface::setCallbacks(camera_notify_callback notify_cb,
camera_data_callback data_cb,
camera_data_timestamp_callback data_cb_timestamp,
camera_request_memory get_memory,
void *user)
{
m_notifyCb = notify_cb;
m_dataCb = data_cb;
m_dataCbTimestamp = data_cb_timestamp;
m_getMemoryCb = get_memory;
m_callbackCookie = user;
}
void ExynosCameraHWInterface::enableMsgType(int32_t msgType)
{
ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x",
__func__, msgType, m_msgEnabled);
m_msgEnabled |= msgType;
m_previewLock.lock();
if ( msgType & CAMERA_MSG_PREVIEW_FRAME
&& m_previewRunning == true
&& m_previewStartDeferred == true) {
ALOGV("DEBUG(%s):starting deferred preview", __func__);
if (m_startPreviewInternal() == true) {
m_previewStartDeferred = false;
m_previewCondition.signal();
}
}
m_previewLock.unlock();
ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled);
}
void ExynosCameraHWInterface::disableMsgType(int32_t msgType)
{
ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x",
__func__, msgType, m_msgEnabled);
m_msgEnabled &= ~msgType;
ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled);
}
bool ExynosCameraHWInterface::msgTypeEnabled(int32_t msgType)
{
return (m_msgEnabled & msgType);
}
status_t ExynosCameraHWInterface::startPreview()
{
int ret = OK;
ALOGV("DEBUG(%s):", __func__);
Mutex::Autolock lock(m_stateLock);
if (m_captureInProgress == true) {
ALOGE("%s : capture in progress, not allowed", __func__);
return INVALID_OPERATION;
}
m_previewLock.lock();
if (m_previewRunning == true) {
ALOGE("%s : preview thread already running", __func__);
m_previewLock.unlock();
return INVALID_OPERATION;
}
m_previewRunning = true;
m_previewStartDeferred = false;
if (m_previewWindow == NULL) {
if (!(m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
ALOGV("DEBUG(%s):deferring", __func__);
m_previewStartDeferred = true;
m_previewLock.unlock();
return NO_ERROR;
}
ALOGE("%s(%d): m_previewWindow is NULL", __func__, __LINE__);
return UNKNOWN_ERROR;
}
if (m_startPreviewInternal() == true) {
m_previewCondition.signal();
ret = OK;
} else {
ret = UNKNOWN_ERROR;
}
m_previewLock.unlock();
return ret;
}
void ExynosCameraHWInterface::stopPreview()
{
ALOGV("DEBUG(%s):", __func__);
/* request that the preview thread stop. */
m_previewLock.lock();
m_stopPreviewInternal();
m_previewLock.unlock();
}
bool ExynosCameraHWInterface::previewEnabled()
{
Mutex::Autolock lock(m_previewLock);
ALOGV("DEBUG(%s):%d", __func__, m_previewRunning);
return m_previewRunning;
}
status_t ExynosCameraHWInterface::storeMetaDataInBuffers(bool enable)
{
if (!enable) {
ALOGE("Non-m_frameMetadata buffer mode is not supported!");
return INVALID_OPERATION;
}
return OK;
}
status_t ExynosCameraHWInterface::startRecording()
{
ALOGV("DEBUG(%s):", __func__);
Mutex::Autolock lock(m_videoLock);
int videoW, videoH, videoFormat, videoFramesize;
m_secCamera->getVideoSize(&videoW, &videoH);
videoFormat = m_secCamera->getVideoFormat();
videoFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), videoW, videoH);
int orgVideoFrameSize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), m_orgVideoRect.w, m_orgVideoRect.h);
for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
#ifdef USE_3DNR_DMAOUT
ExynosBuffer videoBuf;
if (m_videoHeap[i] != NULL) {
m_videoHeap[i]->release(m_videoHeap[i]);
m_videoHeap[i] = 0;
}
m_videoHeap[i] = m_getMemoryCb(-1, videoFramesize, 1, NULL);
if (!m_videoHeap[i]) {
ALOGE("ERR(%s):m_getMemoryCb(m_videoHeap[%d], size(%d) fail", __func__, i, videoFramesize);
return UNKNOWN_ERROR;
}
m_getAlignedYUVSize(videoFormat, videoW, videoH, &videoBuf);
videoBuf.virt.extP[0] = (char *)m_videoHeap[i]->data;
for (int j = 1; j < 3; j++) {
if (videoBuf.size.extS[j] != 0)
videoBuf.virt.extP[j] = videoBuf.virt.extP[j-1] + videoBuf.size.extS[j-1];
else
videoBuf.virt.extP[j] = NULL;
}
videoBuf.reserved.p = i;
m_secCamera->setVideoBuf(&videoBuf);
#endif
// original VideoSized heap
if (m_resizedVideoHeap[i] != NULL) {
m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]);
m_resizedVideoHeap[i] = 0;
}
m_resizedVideoHeap[i] = m_getMemoryCb(-1, orgVideoFrameSize, 1, NULL);
if (!m_resizedVideoHeap[i]) {
ALOGE("ERR(%s):m_getMemoryCb(m_resizedVideoHeap[%d], size(%d) fail", __func__, i, orgVideoFrameSize);
return UNKNOWN_ERROR;
}
}
if (m_videoRunning == false) {
if (m_secCamera->startVideo() == false) {
ALOGE("ERR(%s):Fail on m_secCamera->startVideo()", __func__);
return UNKNOWN_ERROR;
}
m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF;
#ifdef USE_3DNR_DMAOUT
m_videoRunning = true;
m_videoCondition.signal();
#else
m_videoStart = true;
#endif
}
return NO_ERROR;
}
void ExynosCameraHWInterface::stopRecording()
{
ALOGV("DEBUG(%s):", __func__);
#ifndef USE_3DNR_DMAOUT
m_videoStart = false;
#endif
if (m_videoRunning == true) {
m_videoRunning = false;
Mutex::Autolock lock(m_videoLock);
m_videoCondition.signal();
/* wait until video thread is stopped */
m_videoStoppedCondition.wait(m_videoLock);
} else
ALOGV("DEBUG(%s):video not running, doing nothing", __func__);
}
bool ExynosCameraHWInterface::recordingEnabled()
{
return m_videoStart;
}
void ExynosCameraHWInterface::releaseRecordingFrame(const void *opaque)
{
// This lock makes video lock up
// Mutex::Autolock lock(m_videoLock);
int i;
bool find = false;
// HACK : this causes recording slow
/*
for (i = 0; i < NUM_OF_VIDEO_BUF; i++) {
if ((char *)m_videoHeap[i]->data == (char *)opaque) {
find = true;
break;
}
}
if (find == true) {
ExynosBuffer videoBuf;
videoBuf.reserved.p = i;
m_secCamera->putVideoBuf(&videoBuf);
m_numOfAvailableVideoBuf++;
if (NUM_OF_VIDEO_BUF <= m_numOfAvailableVideoBuf)
m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF;
} else {
ALOGV("DEBUG(%s):no matched index(%p)", __func__, (char *)opaque);
}
*/
}
status_t ExynosCameraHWInterface::autoFocus()
{
ALOGV("DEBUG(%s):", __func__);
/* signal m_autoFocusThread to run once */
m_focusCondition.signal();
return NO_ERROR;
}
status_t ExynosCameraHWInterface::cancelAutoFocus()
{
if (m_secCamera->cancelAutoFocus() == false) {
ALOGE("ERR(%s):Fail on m_secCamera->cancelAutoFocus()", __func__);
return UNKNOWN_ERROR;
}
return NO_ERROR;
}
status_t ExynosCameraHWInterface::takePicture()
{
Mutex::Autolock lock(m_stateLock);
if (m_captureInProgress == true) {
ALOGE("%s : capture already in progress", __func__);
return INVALID_OPERATION;
}
if (m_pictureRunning == false) {
ALOGI("%s(%d): m_pictureRunning is false", __func__, __LINE__);
if (m_startPictureInternal() == false) {
ALOGE("%s(%d): m_startPictureInternal() fail!!!", __func__, __LINE__);
return INVALID_OPERATION;
}
}
m_pictureLock.lock();
m_captureInProgress = true;
m_pictureLock.unlock();
if (m_pictureThread->run("CameraPictureThread", PRIORITY_DEFAULT) != NO_ERROR) {
ALOGE("%s : couldn't run picture thread", __func__);
return INVALID_OPERATION;
}
return NO_ERROR;
}
status_t ExynosCameraHWInterface::cancelPicture()
{
ALOGV("DEBUG(%s):", __func__);
if (m_pictureThread.get()) {
ALOGV("DEBUG(%s):waiting for picture thread to exit", __func__);
m_pictureThread->requestExitAndWait();
ALOGV("DEBUG(%s):picture thread has exited", __func__);
}
return NO_ERROR;
}
status_t ExynosCameraHWInterface::setParameters(const CameraParameters& params)
{
ALOGV("DEBUG(%s):", __func__);
status_t ret = NO_ERROR;
/* if someone calls us while picture thread is running, it could screw
* up the sensor quite a bit so return error. we can't wait because
* that would cause deadlock with the callbacks
*/
m_stateLock.lock();
if (m_captureInProgress == true) {
m_stateLock.unlock();
m_pictureLock.lock();
m_pictureCondition.waitRelative(m_pictureLock, (2000 * 1000000));
m_pictureLock.unlock();
}
m_stateLock.unlock();
///////////////////////////////////////////////////
// Google Official API : Camera.Parameters
// http://developer.android.com/reference/android/hardware/Camera.Parameters.html
///////////////////////////////////////////////////
// recording hint
const char *newRecordingHint = params.get(CameraParameters::KEY_RECORDING_HINT);
if (newRecordingHint != NULL) {
if (strcmp(newRecordingHint, "true") == 0)
m_recordingHint = true;
else
m_recordingHint = false;
m_secCamera->setRecordingHint(m_recordingHint);
}
// preview size
int newPreviewW = 0;
int newPreviewH = 0;
int newCalPreviewW = 0;
int newCalPreviewH = 0;
int previewMaxW = 0;
int previewMaxH = 0;
params.getPreviewSize(&newPreviewW, &newPreviewH);
// In general, it will show preview max size
m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH);
newCalPreviewW = previewMaxW;
newCalPreviewH = previewMaxH;
// When recording, it will show video max size
if (m_recordingHint == true) {
m_secCamera->getSupportedVideoSizes(&newCalPreviewW, &newCalPreviewH);
if ( previewMaxW < newCalPreviewW
|| previewMaxH < newCalPreviewH) {
newCalPreviewW = previewMaxW;
newCalPreviewH = previewMaxH;
}
}
m_orgPreviewRect.w = newPreviewW;
m_orgPreviewRect.h = newPreviewH;
// TODO : calibrate original preview ratio
//m_getRatioSize(newCalPreviewW, newCalPreviewH, newPreviewW, newPreviewH, &newPreviewW, &newPreviewH);
newPreviewW = newCalPreviewW;
newPreviewH = newCalPreviewH;
const char *strNewPreviewFormat = params.getPreviewFormat();
ALOGV("DEBUG(%s):newPreviewW x newPreviewH = %dx%d, format = %s",
__func__, newPreviewW, newPreviewH, strNewPreviewFormat);
if (0 < newPreviewW &&
0 < newPreviewH &&
strNewPreviewFormat != NULL &&
m_isSupportedPreviewSize(newPreviewW, newPreviewH) == true) {
int newPreviewFormat = 0;
if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGB565))
newPreviewFormat = V4L2_PIX_FMT_RGB565;
else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGBA8888))
newPreviewFormat = V4L2_PIX_FMT_RGB32;
else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420SP))
newPreviewFormat = V4L2_PIX_FMT_NV21;
else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420P))
newPreviewFormat = V4L2_PIX_FMT_YVU420M;
else if (!strcmp(strNewPreviewFormat, "yuv420sp_custom"))
newPreviewFormat = V4L2_PIX_FMT_NV12T;
else if (!strcmp(strNewPreviewFormat, "yuv422i"))
newPreviewFormat = V4L2_PIX_FMT_YUYV;
else if (!strcmp(strNewPreviewFormat, "yuv422p"))
newPreviewFormat = V4L2_PIX_FMT_YUV422P;
else
newPreviewFormat = V4L2_PIX_FMT_NV21; //for 3rd party
m_orgPreviewRect.colorFormat = newPreviewFormat;
int curPreviewW, curPreviewH;
m_secCamera->getPreviewSize(&curPreviewW, &curPreviewH);
int curPreviewFormat = m_secCamera->getPreviewFormat();
if (curPreviewW != newPreviewW ||
curPreviewH != newPreviewH ||
curPreviewFormat != newPreviewFormat) {
if ( m_secCamera->setPreviewSize(newPreviewW, newPreviewH) == false
|| m_secCamera->setPreviewFormat(newPreviewFormat) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setPreviewSize(width(%d), height(%d), format(%d))",
__func__, newPreviewW, newPreviewH, newPreviewFormat);
ret = UNKNOWN_ERROR;
} else {
if (m_previewWindow) {
if (m_previewRunning == true && m_previewStartDeferred == false) {
ALOGE("ERR(%s):preview is running, cannot change size and format!", __func__);
ret = INVALID_OPERATION;
}
ALOGV("DEBUG(%s):m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow);
ALOGV("DEBUG(%s):m_previewWindow->set_buffers_geometry (%p)", __func__,
m_previewWindow->set_buffers_geometry);
m_previewWindow->set_buffers_geometry(m_previewWindow,
newPreviewW, newPreviewH,
newPreviewFormat);
ALOGV("DEBUG(%s):DONE m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow);
}
m_params.setPreviewSize(newPreviewW, newPreviewH);
m_params.setPreviewFormat(strNewPreviewFormat);
}
}
else {
ALOGV("DEBUG(%s):preview size and format has not changed", __func__);
}
} else {
ALOGE("ERR(%s):Invalid preview size(%dx%d)", __func__, newPreviewW, newPreviewH);
ret = INVALID_OPERATION;
}
int newPictureW = 0;
int newPictureH = 0;
params.getPictureSize(&newPictureW, &newPictureH);
ALOGV("DEBUG(%s):newPictureW x newPictureH = %dx%d", __func__, newPictureW, newPictureH);
if (0 < newPictureW && 0 < newPictureH) {
int orgPictureW, orgPictureH = 0;
m_secCamera->getPictureSize(&orgPictureW, &orgPictureH);
if (m_secCamera->setPictureSize(newPictureW, newPictureH) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setPictureSize(width(%d), height(%d))",
__func__, newPictureW, newPictureH);
ret = UNKNOWN_ERROR;
} else {
int tempW, tempH = 0;
m_secCamera->getPictureSize(&tempW, &tempH);
if (tempW != orgPictureW || tempH != orgPictureH) {
if (m_pictureRunning == true) {
if (m_stopPictureInternal() == false)
ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__);
if (m_startPictureInternal() == false)
ALOGE("ERR(%s):m_startPictureInternal() fail", __func__);
}
}
m_orgPictureRect.w = newPictureW;
m_orgPictureRect.h = newPictureH;
m_params.setPictureSize(newPictureW, newPictureH);
}
}
// picture format
const char *newPictureFormat = params.getPictureFormat();
ALOGV("DEBUG(%s):newPictureFormat %s", __func__, newPictureFormat);
if (newPictureFormat != NULL) {
int value = 0;
if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGB565))
value = V4L2_PIX_FMT_RGB565;
else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGBA8888))
value = V4L2_PIX_FMT_RGB32;
else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_YUV420SP))
value = V4L2_PIX_FMT_NV21;
else if (!strcmp(newPictureFormat, "yuv420sp_custom"))
value = V4L2_PIX_FMT_NV12T;
else if (!strcmp(newPictureFormat, "yuv420p"))
value = V4L2_PIX_FMT_YUV420;
else if (!strcmp(newPictureFormat, "yuv422i"))
value = V4L2_PIX_FMT_YUYV;
else if (!strcmp(newPictureFormat, "uyv422i_custom")) //Zero copy UYVY format
value = V4L2_PIX_FMT_UYVY;
else if (!strcmp(newPictureFormat, "uyv422i")) //Non-zero copy UYVY format
value = V4L2_PIX_FMT_UYVY;
else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_JPEG))
value = V4L2_PIX_FMT_YUYV;
else if (!strcmp(newPictureFormat, "yuv422p"))
value = V4L2_PIX_FMT_YUV422P;
else
value = V4L2_PIX_FMT_NV21; //for 3rd party
if (value != m_secCamera->getPictureFormat()) {
if (m_secCamera->setPictureFormat(value) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setPictureFormat(format(%d))", __func__, value);
ret = UNKNOWN_ERROR;
} else {
m_orgPictureRect.colorFormat = value;
m_params.setPictureFormat(newPictureFormat);
}
}
}
// JPEG image quality
int newJpegQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
ALOGV("DEBUG(%s):newJpegQuality %d", __func__, newJpegQuality);
// we ignore bad values
if (newJpegQuality >=1 && newJpegQuality <= 100) {
if (m_secCamera->setJpegQuality(newJpegQuality) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setJpegQuality(quality(%d))", __func__, newJpegQuality);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_JPEG_QUALITY, newJpegQuality);
}
}
// JPEG thumbnail size
int newJpegThumbnailW = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
int newJpegThumbnailH = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
if (0 <= newJpegThumbnailW && 0 <= newJpegThumbnailH) {
if (m_secCamera->setJpegThumbnailSize(newJpegThumbnailW, newJpegThumbnailH) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailSize(width(%d), height(%d))", __func__, newJpegThumbnailW, newJpegThumbnailH);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, newJpegThumbnailW);
m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, newJpegThumbnailH);
}
}
// JPEG thumbnail quality
int newJpegThumbnailQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
ALOGV("DEBUG(%s):newJpegThumbnailQuality %d", __func__, newJpegThumbnailQuality);
// we ignore bad values
if (newJpegThumbnailQuality >=1 && newJpegThumbnailQuality <= 100) {
if (m_secCamera->setJpegThumbnailQuality(newJpegThumbnailQuality) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailQuality(quality(%d))",
__func__, newJpegThumbnailQuality);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, newJpegThumbnailQuality);
}
}
// Video size
int newVideoW = 0;
int newVideoH = 0;
params.getVideoSize(&newVideoW, &newVideoH);
ALOGV("DEBUG(%s):newVideoW (%d) newVideoH (%d)", __func__, newVideoW, newVideoH);
if (0 < newVideoW && 0 < newVideoH && m_videoStart == false) {
m_orgVideoRect.w = newVideoW;
m_orgVideoRect.h = newVideoH;
if (m_secCamera->setVideoSize(newVideoW, newVideoH) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setVideoSize(width(%d), height(%d))",
__func__, newVideoW, newVideoH);
ret = UNKNOWN_ERROR;
}
m_params.setVideoSize(newVideoW, newVideoH);
}
// video stablization
const char *newVideoStabilization = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
bool currVideoStabilization = m_secCamera->getVideoStabilization();
ALOGV("DEBUG(%s):newVideoStabilization %s", __func__, newVideoStabilization);
if (newVideoStabilization != NULL) {
bool toggle = false;
if (!strcmp(newVideoStabilization, "true"))
toggle = true;
if ( currVideoStabilization != toggle) {
if (m_secCamera->setVideoStabilization(toggle) == false) {
ALOGE("ERR(%s):setVideoStabilization() fail", __func__);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_VIDEO_STABILIZATION, newVideoStabilization);
}
}
}
// 3dnr
const char *new3dnr = params.get("3dnr");
ALOGV("DEBUG(%s):new3drn %s", __func__, new3dnr);
if (new3dnr != NULL) {
bool toggle = false;
if (!strcmp(new3dnr, "true"))
toggle = true;
if (m_secCamera->set3DNR(toggle) == false) {
ALOGE("ERR(%s):set3DNR() fail", __func__);
ret = UNKNOWN_ERROR;
} else {
m_params.set("3dnr", new3dnr);
}
}
// odc
const char *newOdc = params.get("odc");
ALOGV("DEBUG(%s):newOdc %s", __func__, new3dnr);
if (newOdc != NULL) {
bool toggle = false;
if (!strcmp(newOdc, "true"))
toggle = true;
if (m_secCamera->setODC(toggle) == false) {
ALOGE("ERR(%s):setODC() fail", __func__);
ret = UNKNOWN_ERROR;
} else {
m_params.set("odc", newOdc);
}
}
// frame rate
int newFrameRate = params.getPreviewFrameRate();
ALOGV("DEBUG(%s):newFrameRate %d", __func__, newFrameRate);
// ignore any fps request, we're determine fps automatically based
// on scene mode. don't return an error because it causes CTS failure.
if (newFrameRate != m_params.getPreviewFrameRate()) {
if (m_secCamera->setPreviewFrameRate(newFrameRate) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setPreviewFrameRate(%d)", __func__, newFrameRate);
ret = UNKNOWN_ERROR;
} else {
m_params.setPreviewFrameRate(newFrameRate);
}
}
// zoom
int newZoom = params.getInt(CameraParameters::KEY_ZOOM);
ALOGV("DEBUG(%s):newZoom %d", __func__, newZoom);
if (0 <= newZoom) {
if (m_secCamera->setZoom(newZoom) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setZoom(newZoom(%d))", __func__, newZoom);
ret = UNKNOWN_ERROR;
}
else {
m_params.set(CameraParameters::KEY_ZOOM, newZoom);
}
}
// rotation
int newRotation = params.getInt(CameraParameters::KEY_ROTATION);
ALOGV("DEBUG(%s):newRotation %d", __func__, newRotation);
if (0 <= newRotation) {
ALOGV("DEBUG(%s):set orientation:%d", __func__, newRotation);
if (m_secCamera->setRotation(newRotation) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setRotation(%d)", __func__, newRotation);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_ROTATION, newRotation);
}
}
// auto exposure lock
const char *newAutoExposureLock = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
if (newAutoExposureLock != NULL) {
bool toggle = false;
if (!strcmp(newAutoExposureLock, "true"))
toggle = true;
if (m_secCamera->setAutoExposureLock(toggle) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setAutoExposureLock()", __func__);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, newAutoExposureLock);
}
}
// exposure
int minExposureCompensation = params.getInt(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION);
int maxExposureCompensation = params.getInt(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION);
int newExposureCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
ALOGV("DEBUG(%s):newExposureCompensation %d", __func__, newExposureCompensation);
if ((minExposureCompensation <= newExposureCompensation) &&
(newExposureCompensation <= maxExposureCompensation)) {
if (m_secCamera->setExposureCompensation(newExposureCompensation) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setExposureCompensation(exposure(%d))", __func__, newExposureCompensation);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, newExposureCompensation);
}
}
// auto white balance lock
const char *newAutoWhitebalanceLock = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
if (newAutoWhitebalanceLock != NULL) {
bool toggle = false;
if (!strcmp(newAutoWhitebalanceLock, "true"))
toggle = true;
if (m_secCamera->setAutoWhiteBalanceLock(toggle) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setAutoWhiteBalanceLock()", __func__);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, newAutoWhitebalanceLock);
}
}
// white balance
const char *newWhiteBalance = params.get(CameraParameters::KEY_WHITE_BALANCE);
ALOGV("DEBUG(%s):newWhiteBalance %s", __func__, newWhiteBalance);
if (newWhiteBalance != NULL) {
int value = -1;
if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_AUTO))
value = ExynosCamera::WHITE_BALANCE_AUTO;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_INCANDESCENT))
value = ExynosCamera::WHITE_BALANCE_INCANDESCENT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_FLUORESCENT))
value = ExynosCamera::WHITE_BALANCE_FLUORESCENT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT))
value = ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_DAYLIGHT))
value = ExynosCamera::WHITE_BALANCE_DAYLIGHT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT))
value = ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_TWILIGHT))
value = ExynosCamera::WHITE_BALANCE_TWILIGHT;
else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_SHADE))
value = ExynosCamera::WHITE_BALANCE_SHADE;
else {
ALOGE("ERR(%s):Invalid white balance(%s)", __func__, newWhiteBalance); //twilight, shade, warm_flourescent
ret = UNKNOWN_ERROR;
}
if (0 <= value) {
if (m_secCamera->setWhiteBalance(value) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setWhiteBalance(white(%d))", __func__, value);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_WHITE_BALANCE, newWhiteBalance);
}
}
}
// Metering
// This is the additional API(not Google API).
// But, This is set berfore the below KEY_METERING_AREAS.
const char *strNewMetering = params.get("metering");
ALOGV("DEBUG(%s):strNewMetering %s", __func__, strNewMetering);
if (strNewMetering != NULL) {
int newMetering = -1;
if (!strcmp(strNewMetering, "average"))
newMetering = ExynosCamera::METERING_MODE_AVERAGE;
else if (!strcmp(strNewMetering, "center"))
newMetering = ExynosCamera::METERING_MODE_CENTER;
else if (!strcmp(strNewMetering, "matrix"))
newMetering = ExynosCamera::METERING_MODE_MATRIX;
else if (!strcmp(strNewMetering, "spot"))
newMetering = ExynosCamera::METERING_MODE_SPOT;
else {
ALOGE("ERR(%s):Invalid metering newMetering(%s)", __func__, strNewMetering);
ret = UNKNOWN_ERROR;
}
if (0 <= newMetering) {
if (m_secCamera->setMeteringMode(newMetering) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setMeteringMode(%d)", __func__, newMetering);
ret = UNKNOWN_ERROR;
} else {
m_params.set("metering", strNewMetering);
}
}
}
// metering areas
const char *newMeteringAreas = params.get(CameraParameters::KEY_METERING_AREAS);
int maxNumMeteringAreas = m_secCamera->getMaxNumMeteringAreas();
if (newMeteringAreas != NULL && maxNumMeteringAreas != 0) {
// ex : (-10,-10,0,0,300),(0,0,10,10,700)
ExynosRect2 *rect2s = new ExynosRect2[maxNumMeteringAreas];
int *weights = new int[maxNumMeteringAreas];
int validMeteringAreas = m_bracketsStr2Ints((char *)newMeteringAreas, maxNumMeteringAreas, rect2s, weights);
if (0 < validMeteringAreas) {
for (int i = 0; i < validMeteringAreas; i++) {
rect2s[i].x1 = m_calibratePosition(2000, newPreviewW, rect2s[i].x1 + 1000);
rect2s[i].y1 = m_calibratePosition(2000, newPreviewH, rect2s[i].y1 + 1000);
rect2s[i].x2 = m_calibratePosition(2000, newPreviewW, rect2s[i].x2 + 1000);
rect2s[i].y2 = m_calibratePosition(2000, newPreviewH, rect2s[i].y2 + 1000);
}
if (m_secCamera->setMeteringAreas(validMeteringAreas, rect2s, weights) == false) {
ALOGE("ERR(%s):setMeteringAreas(%s) fail", __func__, newMeteringAreas);
ret = UNKNOWN_ERROR;
}
else {
m_params.set(CameraParameters::KEY_METERING_AREAS, newMeteringAreas);
}
}
delete [] rect2s;
delete [] weights;
}
// anti banding
const char *newAntibanding = params.get(CameraParameters::KEY_ANTIBANDING);
ALOGV("DEBUG(%s):newAntibanding %s", __func__, newAntibanding);
if (newAntibanding != NULL) {
int value = -1;
if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_AUTO))
value = ExynosCamera::ANTIBANDING_AUTO;
else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_50HZ))
value = ExynosCamera::ANTIBANDING_50HZ;
else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_60HZ))
value = ExynosCamera::ANTIBANDING_60HZ;
else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_OFF))
value = ExynosCamera::ANTIBANDING_OFF;
else {
ALOGE("ERR(%s):Invalid antibanding value(%s)", __func__, newAntibanding);
ret = UNKNOWN_ERROR;
}
if (0 <= value) {
if (m_secCamera->setAntibanding(value) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setAntibanding(%d)", __func__, value);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_ANTIBANDING, newAntibanding);
}
}
}
// scene mode
const char *strNewSceneMode = params.get(CameraParameters::KEY_SCENE_MODE);
const char *strCurSceneMode = m_params.get(CameraParameters::KEY_SCENE_MODE);
// fps range
int newMinFps = 0;
int newMaxFps = 0;
int curMinFps = 0;
int curMaxFps = 0;
params.getPreviewFpsRange(&newMinFps, &newMaxFps);
m_params.getPreviewFpsRange(&curMinFps, &curMaxFps);
/* our fps range is determined by the sensor, reject any request
* that isn't exactly what we're already at.
* but the check is performed when requesting only changing fps range
*/
if (strNewSceneMode && strCurSceneMode) {
if (!strcmp(strNewSceneMode, strCurSceneMode)) {
if ((newMinFps != curMinFps) || (newMaxFps != curMaxFps)) {
ALOGW("%s : requested newMinFps = %d, newMaxFps = %d not allowed",
__func__, newMinFps, newMaxFps);
ALOGE("%s : curMinFps = %d, curMaxFps = %d",
__func__, curMinFps, curMaxFps);
ret = UNKNOWN_ERROR;
}
}
} else {
/* Check basic validation if scene mode is different */
if ((newMaxFps < newMinFps) ||
(newMinFps < 0) || (newMaxFps < 0))
ret = UNKNOWN_ERROR;
}
if (strNewSceneMode != NULL) {
int newSceneMode = -1;
const char *strNewFlashMode = params.get(CameraParameters::KEY_FLASH_MODE);
const char *strNewFocusMode = params.get(CameraParameters::KEY_FOCUS_MODE);
// fps range is (15000,30000) by default.
m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)");
m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000");
if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_AUTO)) {
newSceneMode = ExynosCamera::SCENE_MODE_AUTO;
} else {
// defaults for non-auto scene modes
if (m_secCamera->getSupportedFocusModes() != 0)
strNewFocusMode = CameraParameters::FOCUS_MODE_AUTO;
strNewFlashMode = CameraParameters::FLASH_MODE_OFF;
if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_ACTION)) {
newSceneMode = ExynosCamera::SCENE_MODE_ACTION;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PORTRAIT)) {
newSceneMode = ExynosCamera::SCENE_MODE_PORTRAIT;
strNewFlashMode = CameraParameters::FLASH_MODE_AUTO;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_LANDSCAPE)) {
newSceneMode = ExynosCamera::SCENE_MODE_LANDSCAPE;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT)) {
newSceneMode = ExynosCamera::SCENE_MODE_NIGHT;
m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(4000,30000)");
m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "4000,30000");
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT)) {
newSceneMode = ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_THEATRE)) {
newSceneMode = ExynosCamera::SCENE_MODE_THEATRE;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_BEACH)) {
newSceneMode = ExynosCamera::SCENE_MODE_BEACH;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SNOW)) {
newSceneMode = ExynosCamera::SCENE_MODE_SNOW;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SUNSET)) {
newSceneMode = ExynosCamera::SCENE_MODE_SUNSET;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO)) {
newSceneMode = ExynosCamera::SCENE_MODE_STEADYPHOTO;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_FIREWORKS)) {
newSceneMode = ExynosCamera::SCENE_MODE_FIREWORKS;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SPORTS)) {
newSceneMode = ExynosCamera::SCENE_MODE_SPORTS;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PARTY)) {
newSceneMode = ExynosCamera::SCENE_MODE_PARTY;
strNewFlashMode = CameraParameters::FLASH_MODE_AUTO;
} else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT)) {
newSceneMode = ExynosCamera::SCENE_MODE_CANDLELIGHT;
} else {
ALOGE("ERR(%s):unmatched scene_mode(%s)",
__func__, strNewSceneMode); //action, night-portrait, theatre, steadyphoto
ret = UNKNOWN_ERROR;
}
}
// focus mode
if (strNewFocusMode != NULL) {
int newFocusMode = -1;
if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_AUTO)) {
newFocusMode = ExynosCamera::FOCUS_MODE_AUTO;
m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR);
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_INFINITY)) {
newFocusMode = ExynosCamera::FOCUS_MODE_INFINITY;
m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR);
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_MACRO)) {
newFocusMode = ExynosCamera::FOCUS_MODE_MACRO;
m_params.set(CameraParameters::KEY_FOCUS_DISTANCES,
BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR);
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_FIXED)) {
newFocusMode = ExynosCamera::FOCUS_MODE_FIXED;
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_EDOF)) {
newFocusMode = ExynosCamera::FOCUS_MODE_EDOF;
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) {
newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO;
} else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) {
newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
ALOGE("ERR(%s):unmatched focus_mode(%s)", __func__, strNewFocusMode);
ret = UNKNOWN_ERROR;
}
if (0 <= newFocusMode) {
if (m_secCamera->setFocusMode(newFocusMode) == false) {
ALOGE("ERR(%s):m_secCamera->setFocusMode(%d) fail", __func__, newFocusMode);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_FOCUS_MODE, strNewFocusMode);
}
}
}
// flash mode
if (strNewFlashMode != NULL) {
int newFlashMode = -1;
if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_OFF))
newFlashMode = ExynosCamera::FLASH_MODE_OFF;
else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_AUTO))
newFlashMode = ExynosCamera::FLASH_MODE_AUTO;
else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_ON))
newFlashMode = ExynosCamera::FLASH_MODE_ON;
else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_RED_EYE))
newFlashMode = ExynosCamera::FLASH_MODE_RED_EYE;
else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_TORCH))
newFlashMode = ExynosCamera::FLASH_MODE_TORCH;
else {
ALOGE("ERR(%s):unmatched flash_mode(%s)", __func__, strNewFlashMode); //red-eye
ret = UNKNOWN_ERROR;
}
if (0 <= newFlashMode) {
if (m_secCamera->setFlashMode(newFlashMode) == false) {
ALOGE("ERR(%s):m_secCamera->setFlashMode(%d) fail", __func__, newFlashMode);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_FLASH_MODE, strNewFlashMode);
}
}
}
// scene mode
if (0 <= newSceneMode) {
if (m_secCamera->setSceneMode(newSceneMode) == false) {
ALOGE("ERR(%s):m_secCamera->setSceneMode(%d) fail", __func__, newSceneMode);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_SCENE_MODE, strNewSceneMode);
}
}
}
// focus areas
const char *newFocusAreas = params.get(CameraParameters::KEY_FOCUS_AREAS);
int maxNumFocusAreas = m_secCamera->getMaxNumFocusAreas();
if (newFocusAreas != NULL && maxNumFocusAreas != 0) {
int curFocusMode = m_secCamera->getFocusMode();
// In CameraParameters.h
// Focus area only has effect if the cur focus mode is FOCUS_MODE_AUTO,
// FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or
// FOCUS_MODE_CONTINUOUS_PICTURE.
if ( curFocusMode & ExynosCamera::FOCUS_MODE_AUTO
|| curFocusMode & ExynosCamera::FOCUS_MODE_MACRO
|| curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO
|| curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE) {
// ex : (-10,-10,0,0,300),(0,0,10,10,700)
ExynosRect2 *rect2s = new ExynosRect2[maxNumFocusAreas];
int *weights = new int[maxNumFocusAreas];
int validFocusedAreas = m_bracketsStr2Ints((char *)newFocusAreas, maxNumFocusAreas, rect2s, weights);
if (0 < validFocusedAreas) {
// CameraParameters.h
// A special case of single focus area (0,0,0,0,0) means driver to decide
// the focus area. For example, the driver may use more signals to decide
// focus areas and change them dynamically. Apps can set (0,0,0,0,0) if they
// want the driver to decide focus areas.
if ( validFocusedAreas == 1
&& rect2s[0].x1 == 0 && rect2s[0].y1 == 0 && rect2s[0].x2 == 0 && rect2s[0].y2 == 0) {
rect2s[0].x1 = 0;
rect2s[0].y1 = 0;
rect2s[0].x2 = newPreviewW;
rect2s[0].y2 = newPreviewH;
} else {
for (int i = 0; i < validFocusedAreas; i++) {
rect2s[i].x1 = (rect2s[i].x1 + 1000) * 1023 / 2000;
rect2s[i].y1 = (rect2s[i].y1 + 1000) * 1023 / 2000;
rect2s[i].x2 = (rect2s[i].x2 + 1000) * 1023 / 2000;
rect2s[i].y2 = (rect2s[i].y2 + 1000) * 1023 / 2000;
}
if (m_secCamera->setFocusAreas(validFocusedAreas, rect2s, weights) == false) {
ALOGE("ERR(%s):setFocusAreas(%s) fail", __func__, newFocusAreas);
ret = UNKNOWN_ERROR;
} else {
m_params.set(CameraParameters::KEY_FOCUS_AREAS, newFocusAreas);
}
}
}
delete [] rect2s;
delete [] weights;
}
}
// image effect
const char *strNewEffect = params.get(CameraParameters::KEY_EFFECT);
if (strNewEffect != NULL) {
int newEffect = -1;
if (!strcmp(strNewEffect, CameraParameters::EFFECT_NONE)) {
newEffect = ExynosCamera::EFFECT_NONE;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_MONO)) {
newEffect = ExynosCamera::EFFECT_MONO;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_NEGATIVE)) {
newEffect = ExynosCamera::EFFECT_NEGATIVE;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SOLARIZE)) {
newEffect = ExynosCamera::EFFECT_SOLARIZE;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SEPIA)) {
newEffect = ExynosCamera::EFFECT_SEPIA;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_POSTERIZE)) {
newEffect = ExynosCamera::EFFECT_POSTERIZE;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_WHITEBOARD)) {
newEffect = ExynosCamera::EFFECT_WHITEBOARD;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_BLACKBOARD)) {
newEffect = ExynosCamera::EFFECT_BLACKBOARD;
} else if (!strcmp(strNewEffect, CameraParameters::EFFECT_AQUA)) {
newEffect = ExynosCamera::EFFECT_AQUA;
} else {
ALOGE("ERR(%s):Invalid effect(%s)", __func__, strNewEffect);
ret = UNKNOWN_ERROR;
}
if (0 <= newEffect) {
if (m_secCamera->setColorEffect(newEffect) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setColorEffect(effect(%d))", __func__, newEffect);
ret = UNKNOWN_ERROR;
} else {
const char *oldStrEffect = m_params.get(CameraParameters::KEY_EFFECT);
if (oldStrEffect) {
if (strcmp(oldStrEffect, strNewEffect)) {
m_setSkipFrame(EFFECT_SKIP_FRAME);
}
}
m_params.set(CameraParameters::KEY_EFFECT, strNewEffect);
}
}
}
// gps altitude
const char *strNewGpsAltitude = params.get(CameraParameters::KEY_GPS_ALTITUDE);
if (m_secCamera->setGpsAltitude(strNewGpsAltitude) == false) {
ALOGE("ERR(%s):m_secCamera->setGpsAltitude(%s) fail", __func__, strNewGpsAltitude);
ret = UNKNOWN_ERROR;
} else {
if (strNewGpsAltitude)
m_params.set(CameraParameters::KEY_GPS_ALTITUDE, strNewGpsAltitude);
else
m_params.remove(CameraParameters::KEY_GPS_ALTITUDE);
}
// gps latitude
const char *strNewGpsLatitude = params.get(CameraParameters::KEY_GPS_LATITUDE);
if (m_secCamera->setGpsLatitude(strNewGpsLatitude) == false) {
ALOGE("ERR(%s):m_secCamera->setGpsLatitude(%s) fail", __func__, strNewGpsLatitude);
ret = UNKNOWN_ERROR;
} else {
if (strNewGpsLatitude)
m_params.set(CameraParameters::KEY_GPS_LATITUDE, strNewGpsLatitude);
else
m_params.remove(CameraParameters::KEY_GPS_LATITUDE);
}
// gps longitude
const char *strNewGpsLongtitude = params.get(CameraParameters::KEY_GPS_LONGITUDE);
if (m_secCamera->setGpsLongitude(strNewGpsLongtitude) == false) {
ALOGE("ERR(%s):m_secCamera->setGpsLongitude(%s) fail", __func__, strNewGpsLongtitude);
ret = UNKNOWN_ERROR;
} else {
if (strNewGpsLongtitude)
m_params.set(CameraParameters::KEY_GPS_LONGITUDE, strNewGpsLongtitude);
else
m_params.remove(CameraParameters::KEY_GPS_LONGITUDE);
}
// gps processing method
const char *strNewGpsProcessingMethod = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
if (m_secCamera->setGpsProcessingMethod(strNewGpsProcessingMethod) == false) {
ALOGE("ERR(%s):m_secCamera->setGpsProcessingMethod(%s) fail", __func__, strNewGpsProcessingMethod);
ret = UNKNOWN_ERROR;
} else {
if (strNewGpsProcessingMethod)
m_params.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, strNewGpsProcessingMethod);
else
m_params.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
}
// gps timestamp
const char *strNewGpsTimestamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP);
if (m_secCamera->setGpsTimeStamp(strNewGpsTimestamp) == false) {
ALOGE("ERR(%s):m_secCamera->setGpsTimeStamp(%s) fail", __func__, strNewGpsTimestamp);
ret = UNKNOWN_ERROR;
} else {
if (strNewGpsTimestamp)
m_params.set(CameraParameters::KEY_GPS_TIMESTAMP, strNewGpsTimestamp);
else
m_params.remove(CameraParameters::KEY_GPS_TIMESTAMP);
}
///////////////////////////////////////////////////
// Additional API.
///////////////////////////////////////////////////
// brightness
int newBrightness = params.getInt("brightness");
int maxBrightness = params.getInt("brightness-max");
int minBrightness = params.getInt("brightness-min");
ALOGV("DEBUG(%s):newBrightness %d", __func__, newBrightness);
if ((minBrightness <= newBrightness) && (newBrightness <= maxBrightness)) {
if (m_secCamera->setBrightness(newBrightness) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setBrightness(%d)", __func__, newBrightness);
ret = UNKNOWN_ERROR;
} else {
m_params.set("brightness", newBrightness);
}
}
// saturation
int newSaturation = params.getInt("saturation");
int maxSaturation = params.getInt("saturation-max");
int minSaturation = params.getInt("saturation-min");
ALOGV("DEBUG(%s):newSaturation %d", __func__, newSaturation);
if ((minSaturation <= newSaturation) && (newSaturation <= maxSaturation)) {
if (m_secCamera->setSaturation(newSaturation) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setSaturation(%d)", __func__, newSaturation);
ret = UNKNOWN_ERROR;
} else {
m_params.set("saturation", newSaturation);
}
}
// sharpness
int newSharpness = params.getInt("sharpness");
int maxSharpness = params.getInt("sharpness-max");
int minSharpness = params.getInt("sharpness-min");
ALOGV("DEBUG(%s):newSharpness %d", __func__, newSharpness);
if ((minSharpness <= newSharpness) && (newSharpness <= maxSharpness)) {
if (m_secCamera->setSharpness(newSharpness) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setSharpness(%d)", __func__, newSharpness);
ret = UNKNOWN_ERROR;
} else {
m_params.set("sharpness", newSharpness);
}
}
// hue
int newHue = params.getInt("hue");
int maxHue = params.getInt("hue-max");
int minHue = params.getInt("hue-min");
ALOGV("DEBUG(%s):newHue %d", __func__, newHue);
if ((minHue <= newHue) && (maxHue >= newHue)) {
if (m_secCamera->setHue(newHue) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setHue(hue(%d))", __func__, newHue);
ret = UNKNOWN_ERROR;
} else {
m_params.set("hue", newHue);
}
}
// ISO
const char *strNewISO = params.get("iso");
ALOGV("DEBUG(%s):strNewISO %s", __func__, strNewISO);
if (strNewISO != NULL) {
int newISO = -1;
if (!strcmp(strNewISO, "auto"))
newISO = 0;
else {
newISO = (int)atoi(strNewISO);
if (newISO == 0) {
ALOGE("ERR(%s):Invalid iso value(%s)", __func__, strNewISO);
ret = UNKNOWN_ERROR;
}
}
if (0 <= newISO) {
if (m_secCamera->setISO(newISO) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setISO(iso(%d))", __func__, newISO);
ret = UNKNOWN_ERROR;
} else {
m_params.set("iso", strNewISO);
}
}
}
//contrast
const char *strNewContrast = params.get("contrast");
ALOGV("DEBUG(%s):strNewContrast %s", __func__, strNewContrast);
if (strNewContrast != NULL) {
int newContrast = -1;
if (!strcmp(strNewContrast, "auto"))
newContrast = ExynosCamera::CONTRAST_AUTO;
else if (!strcmp(strNewContrast, "-2"))
newContrast = ExynosCamera::CONTRAST_MINUS_2;
else if (!strcmp(strNewContrast, "-1"))
newContrast = ExynosCamera::CONTRAST_MINUS_1;
else if (!strcmp(strNewContrast, "0"))
newContrast = ExynosCamera::CONTRAST_DEFAULT;
else if (!strcmp(strNewContrast, "1"))
newContrast = ExynosCamera::CONTRAST_PLUS_1;
else if (!strcmp(strNewContrast, "2"))
newContrast = ExynosCamera::CONTRAST_PLUS_2;
else {
ALOGE("ERR(%s):Invalid contrast value(%s)", __func__, strNewContrast);
ret = UNKNOWN_ERROR;
}
if (0 <= newContrast) {
if (m_secCamera->setContrast(newContrast) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setContrast(contrast(%d))", __func__, newContrast);
ret = UNKNOWN_ERROR;
} else {
m_params.set("contrast", strNewContrast);
}
}
}
//WDR
int newWdr = params.getInt("wdr");
ALOGV("DEBUG(%s):newWdr %d", __func__, newWdr);
if (0 <= newWdr) {
if (m_secCamera->setWDR(newWdr) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setWDR(%d)", __func__, newWdr);
ret = UNKNOWN_ERROR;
}
}
//anti shake
int newAntiShake = m_internalParams.getInt("anti-shake");
ALOGV("DEBUG(%s):newAntiShake %d", __func__, newAntiShake);
if (0 <= newAntiShake) {
bool toggle = false;
if (newAntiShake == 1)
toggle = true;
if (m_secCamera->setAntiShake(toggle) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setAntiShake(%d)", __func__, newAntiShake);
ret = UNKNOWN_ERROR;
}
}
//gamma
const char *strNewGamma = m_internalParams.get("video_recording_gamma");
ALOGV("DEBUG(%s):strNewGamma %s", __func__, strNewGamma);
if (strNewGamma != NULL) {
int newGamma = -1;
if (!strcmp(strNewGamma, "off"))
newGamma = 0;
else if (!strcmp(strNewGamma, "on"))
newGamma = 1;
else {
ALOGE("ERR(%s):unmatched gamma(%s)", __func__, strNewGamma);
ret = UNKNOWN_ERROR;
}
if (0 <= newGamma) {
bool toggle = false;
if (newGamma == 1)
toggle = true;
if (m_secCamera->setGamma(toggle) == false) {
ALOGE("ERR(%s):m_secCamera->setGamma(%s) fail", __func__, strNewGamma);
ret = UNKNOWN_ERROR;
}
}
}
//slow ae
const char *strNewSlowAe = m_internalParams.get("slow_ae");
ALOGV("DEBUG(%s):strNewSlowAe %s", __func__, strNewSlowAe);
if (strNewSlowAe != NULL) {
int newSlowAe = -1;
if (!strcmp(strNewSlowAe, "off"))
newSlowAe = 0;
else if (!strcmp(strNewSlowAe, "on"))
newSlowAe = 1;
else {
ALOGE("ERR(%s):unmatched slow_ae(%s)", __func__, strNewSlowAe);
ret = UNKNOWN_ERROR;
}
if (0 <= newSlowAe) {
bool toggle = false;
if (newSlowAe == 1)
toggle = true;
if (m_secCamera->setSlowAE(newSlowAe) == false) {
ALOGE("ERR(%s):m_secCamera->setSlowAE(%d) fail", __func__, newSlowAe);
ret = UNKNOWN_ERROR;
}
}
}
// Shot mode
int newShotMode = m_internalParams.getInt("shot_mode");
ALOGV("DEBUG(%s):newShotMode %d", __func__, newShotMode);
if (0 <= newShotMode) {
if (m_secCamera->setShotMode(newShotMode) == false) {
ALOGE("ERR(%s):Fail on m_secCamera->setShotMode(%d)", __func__, newShotMode);
ret = UNKNOWN_ERROR;
}
} else {
newShotMode=0;
}
ALOGV("DEBUG(%s):return ret = %d", __func__, ret);
return ret;
}
CameraParameters ExynosCameraHWInterface::getParameters() const
{
ALOGV("DEBUG(%s):", __func__);
return m_params;
}
status_t ExynosCameraHWInterface::sendCommand(int32_t command, int32_t arg1, int32_t arg2)
{
switch (command) {
case CAMERA_CMD_START_FACE_DETECTION:
case CAMERA_CMD_STOP_FACE_DETECTION:
if (m_secCamera->getMaxNumDetectedFaces() == 0) {
ALOGE("ERR(%s):getMaxNumDetectedFaces == 0", __func__);
return BAD_VALUE;
}
if (arg1 == CAMERA_FACE_DETECTION_SW) {
ALOGE("ERR(%s):only support HW face dectection", __func__);
return BAD_VALUE;
}
if (command == CAMERA_CMD_START_FACE_DETECTION) {
if ( m_secCamera->flagStartFaceDetection() == false
&& m_secCamera->startFaceDetection() == false) {
ALOGE("ERR(%s):startFaceDetection() fail", __func__);
return BAD_VALUE;
}
} else { // if (command == CAMERA_CMD_STOP_FACE_DETECTION)
if ( m_secCamera->flagStartFaceDetection() == true
&& m_secCamera->stopFaceDetection() == false) {
ALOGE("ERR(%s):stopFaceDetection() fail", __func__);
return BAD_VALUE;
}
}
break;
default:
ALOGE("ERR(%s):unexpectect command(%d) fail", __func__, command);
return BAD_VALUE;
break;
}
return NO_ERROR;
}
void ExynosCameraHWInterface::release()
{
ALOGV("DEBUG(%s):", __func__);
/* shut down any threads we have that might be running. do it here
* instead of the destructor. we're guaranteed to be on another thread
* than the ones below. if we used the destructor, since the threads
* have a reference to this object, we could wind up trying to wait
* for ourself to exit, which is a deadlock.
*/
if (m_videoThread != NULL) {
m_videoThread->requestExit();
m_exitVideoThread = true;
m_videoRunning = true; // let it run so it can exit
m_videoCondition.signal();
m_videoThread->requestExitAndWait();
m_videoThread.clear();
}
if (m_previewThread != NULL) {
/* this thread is normally already in it's threadLoop but blocked
* on the condition variable or running. signal it so it wakes
* up and can exit.
*/
m_previewThread->requestExit();
m_exitPreviewThread = true;
m_previewRunning = true; // let it run so it can exit
m_previewCondition.signal();
m_previewThread->requestExitAndWait();
m_previewThread.clear();
}
if (m_autoFocusThread != NULL) {
/* this thread is normally already in it's threadLoop but blocked
* on the condition variable. signal it so it wakes up and can exit.
*/
m_focusLock.lock();
m_autoFocusThread->requestExit();
m_exitAutoFocusThread = true;
m_focusCondition.signal();
m_focusLock.unlock();
m_autoFocusThread->requestExitAndWait();
m_autoFocusThread.clear();
}
if (m_pictureThread != NULL) {
m_pictureThread->requestExitAndWait();
m_pictureThread.clear();
}
for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) {
if (m_videoHeap[i]) {
m_videoHeap[i]->release(m_videoHeap[i]);
m_videoHeap[i] = 0;
}
if (m_resizedVideoHeap[i]) {
m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]);
m_resizedVideoHeap[i] = 0;
}
}
for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
if (m_previewHeap[i]) {
m_previewHeap[i]->release(m_previewHeap[i]);
m_previewHeap[i] = 0;
}
}
if (m_pictureRunning == true) {
if (m_stopPictureInternal() == false)
ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__);
}
if (m_exynosVideoCSC)
csc_deinit(m_exynosVideoCSC);
m_exynosVideoCSC = NULL;
if (m_exynosPictureCSC)
csc_deinit(m_exynosPictureCSC);
m_exynosPictureCSC = NULL;
if (m_exynosPreviewCSC)
csc_deinit(m_exynosPreviewCSC);
m_exynosPreviewCSC = NULL;
/* close after all the heaps are cleared since those
* could have dup'd our file descriptor.
*/
if (m_secCamera->flagCreate() == true)
m_secCamera->destroy();
}
status_t ExynosCameraHWInterface::dump(int fd) const
{
const size_t SIZE = 256;
char buffer[SIZE];
String8 result;
const Vector<String16> args;
if (m_secCamera != 0) {
m_params.dump(fd, args);
m_internalParams.dump(fd, args);
snprintf(buffer, 255, " preview running(%s)\n", m_previewRunning?"true": "false");
result.append(buffer);
} else {
result.append("No camera client yet.\n");
}
write(fd, result.string(), result.size());
return NO_ERROR;
}
int ExynosCameraHWInterface::getCameraId() const
{
return m_secCamera->getCameraId();
}
void ExynosCameraHWInterface::m_initDefaultParameters(int cameraId)
{
if (m_secCamera == NULL) {
ALOGE("ERR(%s):m_secCamera object is NULL", __func__);
return;
}
CameraParameters p;
CameraParameters ip;
String8 parameterString;
char * cameraName;
cameraName = m_secCamera->getCameraName();
if (cameraName == NULL)
ALOGE("ERR(%s):getCameraName() fail", __func__);
/*
if (cameraId == ExynosCamera::CAMERA_ID_BACK) {
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
"3264x2448,2576x1948,1920x1080,1280x720,800x480,720x480,640x480,320x240,528x432,176x144");
p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
"3264x2448,1920x1080,1280x720,800x480,720x480,640x480");
p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
"1920x1080,1280x720,640x480,176x144");
} else {
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
"1392x1392,1280x720,640x480,352x288,320x240,176x144");
p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
"1392x1392,1280x960,640x480");
p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
"1280x720,640x480,176x144");
}
*/
char strBuf[256];
String8 listString;
// preview
int previewMaxW = 0;
int previewMaxH = 0;
m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH);
listString.setTo("");
if (m_getResolutionList(listString, strBuf, previewMaxW, previewMaxH) == false) {
ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
previewMaxW = 640;
previewMaxH = 480;
listString = String8::format("%dx%d", previewMaxW, previewMaxH);
}
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, listString.string());
p.setPreviewSize(previewMaxW, previewMaxH);
p.getSupportedPreviewSizes(m_supportedPreviewSizes);
listString.setTo("");
listString = String8::format("%s,%s", CameraParameters::PIXEL_FORMAT_YUV420SP, CameraParameters::PIXEL_FORMAT_YUV420P);
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, listString);
p.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420P);
// video
int videoMaxW = 0;
int videoMaxH = 0;
m_secCamera->getSupportedVideoSizes(&videoMaxW, &videoMaxH);
listString.setTo("");
if (m_getResolutionList(listString, strBuf, videoMaxW, videoMaxH) == false) {
ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
videoMaxW = 640;
videoMaxH = 480;
listString = String8::format("%dx%d", videoMaxW, videoMaxH);
}
p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, listString.string());
p.setVideoSize(videoMaxW, videoMaxH);
int preferredPreviewW = 0;
int preferredPreviewH = 0;
m_secCamera->getPreferredPreivewSizeForVideo(&preferredPreviewW, &preferredPreviewH);
listString.setTo("");
listString = String8::format("%dx%d", preferredPreviewW, preferredPreviewH);
p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, listString.string());
p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP);
if (m_secCamera->isVideoSnapshotSupported() == true)
p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "true");
else
p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "false");
if (m_secCamera->isVideoStabilizationSupported() == true)
p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "true");
else
p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "false");
// picture
int pictureMaxW = 0;
int pictureMaxH = 0;
m_secCamera->getSupportedPictureSizes(&pictureMaxW, &pictureMaxH);
listString.setTo("");
if (m_getResolutionList(listString, strBuf, pictureMaxW, pictureMaxH) == false) {
ALOGE("ERR(%s):m_getResolutionList() fail", __func__);
pictureMaxW = 640;
pictureMaxW = 480;
listString = String8::format("%dx%d", pictureMaxW, pictureMaxH);
}
p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, listString.string());
p.setPictureSize(pictureMaxW, pictureMaxH);
p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
CameraParameters::PIXEL_FORMAT_JPEG);
p.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG);
p.set(CameraParameters::KEY_JPEG_QUALITY, "100"); // maximum quality
// thumbnail
int thumbnailMaxW = 0;
int thumbnailMaxH = 0;
m_secCamera->getSupportedJpegThumbnailSizes(&thumbnailMaxW, &thumbnailMaxH);
listString = String8::format("%dx%d", thumbnailMaxW, thumbnailMaxH);
listString.append(",0x0");
p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, listString.string());
p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnailMaxW);
p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, thumbnailMaxH);
p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "100");
// exposure
p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, m_secCamera->getMinExposureCompensation());
p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, m_secCamera->getMaxExposureCompensation());
p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, m_secCamera->getExposureCompensation());
p.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, m_secCamera->getExposureCompensationStep());
if (m_secCamera->isAutoExposureLockSupported() == true)
p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true");
else
p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false");
// face detection
p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, m_secCamera->getMaxNumDetectedFaces());
p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, 0);
// focus mode
int focusMode = m_secCamera->getSupportedFocusModes();
parameterString.setTo("");
if (focusMode & ExynosCamera::FOCUS_MODE_AUTO) {
parameterString.append(CameraParameters::FOCUS_MODE_AUTO);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_INFINITY) {
parameterString.append(CameraParameters::FOCUS_MODE_INFINITY);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_MACRO) {
parameterString.append(CameraParameters::FOCUS_MODE_MACRO);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_FIXED) {
parameterString.append(CameraParameters::FOCUS_MODE_FIXED);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_EDOF) {
parameterString.append(CameraParameters::FOCUS_MODE_EDOF);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO) {
parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
parameterString.append(",");
}
if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE)
parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,
parameterString.string());
if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE)
p.set(CameraParameters::KEY_FOCUS_MODE,
CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
else if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO)
p.set(CameraParameters::KEY_FOCUS_MODE,
CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO);
else if (focusMode & ExynosCamera::FOCUS_MODE_AUTO)
p.set(CameraParameters::KEY_FOCUS_MODE,
CameraParameters::FOCUS_MODE_AUTO);
else
p.set(CameraParameters::KEY_FOCUS_MODE,
CameraParameters::FOCUS_MODE_FIXED);
// HACK
if (cameraId == ExynosCamera::CAMERA_ID_BACK) {
p.set(CameraParameters::KEY_FOCUS_DISTANCES,
BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR);
p.set(CameraParameters::FOCUS_DISTANCE_INFINITY,
BACK_CAMERA_FOCUS_DISTANCE_INFINITY);
} else {
p.set(CameraParameters::KEY_FOCUS_DISTANCES,
FRONT_CAMERA_FOCUS_DISTANCES_STR);
p.set(CameraParameters::FOCUS_DISTANCE_INFINITY,
FRONT_CAMERA_FOCUS_DISTANCE_INFINITY);
}
if (focusMode & ExynosCamera::FOCUS_MODE_TOUCH)
p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, m_secCamera->getMaxNumFocusAreas());
// flash
int flashMode = m_secCamera->getSupportedFlashModes();
parameterString.setTo("");
if (flashMode & ExynosCamera::FLASH_MODE_OFF) {
parameterString.append(CameraParameters::FLASH_MODE_OFF);
parameterString.append(",");
}
if (flashMode & ExynosCamera::FLASH_MODE_AUTO) {
parameterString.append(CameraParameters::FLASH_MODE_AUTO);
parameterString.append(",");
}
if (flashMode & ExynosCamera::FLASH_MODE_ON) {
parameterString.append(CameraParameters::FLASH_MODE_ON);
parameterString.append(",");
}
if (flashMode & ExynosCamera::FLASH_MODE_RED_EYE) {
parameterString.append(CameraParameters::FLASH_MODE_RED_EYE);
parameterString.append(",");
}
if (flashMode & ExynosCamera::FLASH_MODE_TORCH)
parameterString.append(CameraParameters::FLASH_MODE_TORCH);
p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, parameterString.string());
p.set(CameraParameters::KEY_FLASH_MODE, CameraParameters::FLASH_MODE_OFF);
// scene mode
int sceneMode = m_secCamera->getSupportedSceneModes();
parameterString.setTo("");
if (sceneMode & ExynosCamera::SCENE_MODE_AUTO) {
parameterString.append(CameraParameters::SCENE_MODE_AUTO);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_ACTION) {
parameterString.append(CameraParameters::SCENE_MODE_ACTION);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_PORTRAIT) {
parameterString.append(CameraParameters::SCENE_MODE_PORTRAIT);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_LANDSCAPE) {
parameterString.append(CameraParameters::SCENE_MODE_LANDSCAPE);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT) {
parameterString.append(CameraParameters::SCENE_MODE_NIGHT);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT) {
parameterString.append(CameraParameters::SCENE_MODE_NIGHT_PORTRAIT);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_THEATRE) {
parameterString.append(CameraParameters::SCENE_MODE_THEATRE);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_BEACH) {
parameterString.append(CameraParameters::SCENE_MODE_BEACH);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_SNOW) {
parameterString.append(CameraParameters::SCENE_MODE_SNOW);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_SUNSET) {
parameterString.append(CameraParameters::SCENE_MODE_SUNSET);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_STEADYPHOTO) {
parameterString.append(CameraParameters::SCENE_MODE_STEADYPHOTO);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_FIREWORKS) {
parameterString.append(CameraParameters::SCENE_MODE_FIREWORKS);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_SPORTS) {
parameterString.append(CameraParameters::SCENE_MODE_SPORTS);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_PARTY) {
parameterString.append(CameraParameters::SCENE_MODE_PARTY);
parameterString.append(",");
}
if (sceneMode & ExynosCamera::SCENE_MODE_CANDLELIGHT)
parameterString.append(CameraParameters::SCENE_MODE_CANDLELIGHT);
p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES,
parameterString.string());
p.set(CameraParameters::KEY_SCENE_MODE,
CameraParameters::SCENE_MODE_AUTO);
// effect
int effect = m_secCamera->getSupportedColorEffects();
parameterString.setTo("");
if (effect & ExynosCamera::EFFECT_NONE) {
parameterString.append(CameraParameters::EFFECT_NONE);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_MONO) {
parameterString.append(CameraParameters::EFFECT_MONO);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_NEGATIVE) {
parameterString.append(CameraParameters::EFFECT_NEGATIVE);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_SOLARIZE) {
parameterString.append(CameraParameters::EFFECT_SOLARIZE);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_SEPIA) {
parameterString.append(CameraParameters::EFFECT_SEPIA);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_POSTERIZE) {
parameterString.append(CameraParameters::EFFECT_POSTERIZE);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_WHITEBOARD) {
parameterString.append(CameraParameters::EFFECT_WHITEBOARD);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_BLACKBOARD) {
parameterString.append(CameraParameters::EFFECT_BLACKBOARD);
parameterString.append(",");
}
if (effect & ExynosCamera::EFFECT_AQUA)
parameterString.append(CameraParameters::EFFECT_AQUA);
p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, parameterString.string());
p.set(CameraParameters::KEY_EFFECT, CameraParameters::EFFECT_NONE);
// white balance
int whiteBalance = m_secCamera->getSupportedWhiteBalance();
parameterString.setTo("");
if (whiteBalance & ExynosCamera::WHITE_BALANCE_AUTO) {
parameterString.append(CameraParameters::WHITE_BALANCE_AUTO);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_INCANDESCENT) {
parameterString.append(CameraParameters::WHITE_BALANCE_INCANDESCENT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_FLUORESCENT) {
parameterString.append(CameraParameters::WHITE_BALANCE_FLUORESCENT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT) {
parameterString.append(CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_DAYLIGHT) {
parameterString.append(CameraParameters::WHITE_BALANCE_DAYLIGHT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT) {
parameterString.append(CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_TWILIGHT) {
parameterString.append(CameraParameters::WHITE_BALANCE_TWILIGHT);
parameterString.append(",");
}
if (whiteBalance & ExynosCamera::WHITE_BALANCE_SHADE)
parameterString.append(CameraParameters::WHITE_BALANCE_SHADE);
p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
parameterString.string());
p.set(CameraParameters::KEY_WHITE_BALANCE, CameraParameters::WHITE_BALANCE_AUTO);
if (m_secCamera->isAutoWhiteBalanceLockSupported() == true)
p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true");
else
p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false");
// anti banding
int antiBanding = m_secCamera->getSupportedAntibanding();
parameterString.setTo("");
if (antiBanding & ExynosCamera::ANTIBANDING_AUTO) {
parameterString.append(CameraParameters::ANTIBANDING_AUTO);
parameterString.append(",");
}
if (antiBanding & ExynosCamera::ANTIBANDING_50HZ) {
parameterString.append(CameraParameters::ANTIBANDING_50HZ);
parameterString.append(",");
}
if (antiBanding & ExynosCamera::ANTIBANDING_60HZ) {
parameterString.append(CameraParameters::ANTIBANDING_60HZ);
parameterString.append(",");
}
if (antiBanding & ExynosCamera::ANTIBANDING_OFF)
parameterString.append(CameraParameters::ANTIBANDING_OFF);
p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING,
parameterString.string());
p.set(CameraParameters::KEY_ANTIBANDING, CameraParameters::ANTIBANDING_OFF);
// rotation
p.set(CameraParameters::KEY_ROTATION, 0);
// view angle
p.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, m_secCamera->getHorizontalViewAngle());
p.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, m_secCamera->getVerticalViewAngle());
// metering
if (0 < m_secCamera->getMaxNumMeteringAreas())
p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, m_secCamera->getMaxNumMeteringAreas());
// zoom
if (m_secCamera->isZoomSupported() == true) {
int maxZoom = m_secCamera->getMaxZoom();
if (0 < maxZoom) {
p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true");
if (m_secCamera->isSmoothZoomSupported() == true)
p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
else
p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
p.set(CameraParameters::KEY_MAX_ZOOM, maxZoom);
p.set(CameraParameters::KEY_ZOOM, m_secCamera->getZoom());
int max_zoom_ratio = m_secCamera->getMaxZoomRatio();
listString.setTo("");
if (m_getZoomRatioList(listString, strBuf, maxZoom, 100, max_zoom_ratio) == true)
p.set(CameraParameters::KEY_ZOOM_RATIOS, listString.string());
else
p.set(CameraParameters::KEY_ZOOM_RATIOS, "100");
} else {
p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
}
} else {
p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false");
}
// fps
int minPreviewFps, maxPreviewFps;
m_secCamera->getPreviewFpsRange(&minPreviewFps, &maxPreviewFps);
int baseFps = ((minPreviewFps + 5) / 5) * 5;
listString.setTo("");
snprintf(strBuf, 256, "%d", minPreviewFps);
listString.append(strBuf);
for (int i = baseFps; i <= maxPreviewFps; i += 5) {
int step = (i / 5) * 5;
snprintf(strBuf, 256, ",%d", step);
listString.append(strBuf);
}
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, listString.string());
p.setPreviewFrameRate(maxPreviewFps);
int minFpsRange = minPreviewFps * 1000; // 15 -> 15000
int maxFpsRange = maxPreviewFps * 1000; // 30 -> 30000
snprintf(strBuf, 256, "(%d,%d)", minFpsRange, maxFpsRange);
p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, strBuf);
snprintf(strBuf, 256, "%d,%d", minFpsRange, maxFpsRange);
p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, strBuf);
//p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)");
//p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000")
// focal length
int num = 0;
int den = 0;
int precision = 0;
m_secCamera->getFocalLength(&num, &den);
switch (den) {
default:
case 1000:
precision = 3;
break;
case 100:
precision = 2;
break;
case 10:
precision = 1;
break;
case 1:
precision = 0;
break;
}
snprintf(strBuf, 256, "%.*f", precision, ((float)num / (float)den));
p.set(CameraParameters::KEY_FOCAL_LENGTH, strBuf);
//p.set(CameraParameters::KEY_FOCAL_LENGTH, "3.43");
//p.set(CameraParameters::KEY_FOCAL_LENGTH, "0.9");
// Additional params.
p.set("contrast", "auto");
p.set("iso", "auto");
p.set("wdr", 0);
p.set("metering", "center");
p.set("brightness", 0);
p.set("brightness-max", 2);
p.set("brightness-min", -2);
p.set("saturation", 0);
p.set("saturation-max", 2);
p.set("saturation-min", -2);
p.set("sharpness", 0);
p.set("sharpness-max", 2);
p.set("sharpness-min", -2);
p.set("hue", 0);
p.set("hue-max", 2);
p.set("hue-min", -2);
m_params = p;
m_internalParams = ip;
/* make sure m_secCamera has all the settings we do. applications
* aren't required to call setParameters themselves (only if they
* want to change something.
*/
setParameters(p);
m_secCamera->setPreviewFrameRate(maxPreviewFps);
}
bool ExynosCameraHWInterface::m_startPreviewInternal(void)
{
ALOGV("DEBUG(%s):", __func__);
int i;
int previewW, previewH, previewFormat, previewFramesize;
m_secCamera->getPreviewSize(&previewW, &previewH);
previewFormat = m_secCamera->getPreviewFormat();
// we will use previewFramesize for m_previewHeap[i]
previewFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat), m_orgPreviewRect.w, m_orgPreviewRect.h);
ExynosBuffer previewBuf;
void *virtAddr[3];
int fd[3];
for (i = 0; i < 3; i++) {
virtAddr[i] = NULL;
fd[i] = -1;
}
for (i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
m_avaliblePreviewBufHandle[i] = false;
if (m_previewWindow->dequeue_buffer(m_previewWindow, &m_previewBufHandle[i], &m_previewStride[i]) != 0) {
ALOGE("ERR(%s):Could not dequeue gralloc buffer[%d]!!", __func__, i);
continue;
} else {
if (m_previewWindow->lock_buffer(m_previewWindow, m_previewBufHandle[i]) != 0)
ALOGE("ERR(%s):Could not lock gralloc buffer[%d]!!", __func__, i);
}
if (m_flagGrallocLocked[i] == false) {
if (m_grallocHal->lock(m_grallocHal,
*m_previewBufHandle[i],
GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR,
0, 0, previewW, previewH, virtAddr) != 0) {
ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0)
ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, i);
continue;
}
const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*m_previewBufHandle[i]);
fd[0] = priv_handle->fd;
fd[1] = priv_handle->u_fd;
fd[2] = priv_handle->v_fd;
m_grallocVirtAddr[i] = virtAddr[0];
m_matchedGrallocIndex[i] = i;
m_flagGrallocLocked[i] = true;
}
m_getAlignedYUVSize(previewFormat, previewW, previewH, &previewBuf);
previewBuf.reserved.p = i;
previewBuf.virt.extP[0] = (char *)virtAddr[0];
previewBuf.virt.extP[1] = (char *)virtAddr[1];
previewBuf.virt.extP[2] = (char *)virtAddr[2];
previewBuf.fd.extFd[0] = fd[0];
previewBuf.fd.extFd[1] = fd[1];
previewBuf.fd.extFd[2] = fd[2];
m_secCamera->setPreviewBuf(&previewBuf);
if (m_previewHeap[i]) {
m_previewHeap[i]->release(m_previewHeap[i]);
m_previewHeap[i] = 0;
}
m_previewHeap[i] = m_getMemoryCb(-1, previewFramesize, 1, 0);
if (!m_previewHeap[i]) {
ALOGE("ERR(%s):m_getMemoryCb(m_previewHeap[%d], size(%d) fail", __func__, i, previewFramesize);
continue;
}
m_avaliblePreviewBufHandle[i] = true;
}
if (m_secCamera->startPreview() == false) {
ALOGE("ERR(%s):Fail on m_secCamera->startPreview()", __func__);
return false;
}
for (i = NUM_OF_PREVIEW_BUF - m_minUndequeuedBufs; i < NUM_OF_PREVIEW_BUF; i++) {
if (m_secCamera->getPreviewBuf(&previewBuf) == false) {
ALOGE("ERR(%s):getPreviewBuf() fail", __func__);
return false;
}
if (m_grallocHal && m_flagGrallocLocked[previewBuf.reserved.p] == true) {
m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]);
m_flagGrallocLocked[previewBuf.reserved.p] = false;
}
if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[previewBuf.reserved.p]) != 0)
ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, previewBuf.reserved.p);
m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false;
}
m_setSkipFrame(INITIAL_SKIP_FRAME);
if (m_pictureRunning == false
&& m_startPictureInternal() == false)
ALOGE("ERR(%s):m_startPictureInternal() fail", __func__);
return true;
}
void ExynosCameraHWInterface::m_stopPreviewInternal(void)
{
ALOGV("DEBUG(%s):", __func__);
/* request that the preview thread stop. */
if (m_previewRunning == true) {
m_previewRunning = false;
if (m_previewStartDeferred == false) {
m_previewCondition.signal();
/* wait until preview thread is stopped */
m_previewStoppedCondition.wait(m_previewLock);
for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
if (m_previewBufHandle[i] != NULL) {
if (m_grallocHal && m_flagGrallocLocked[i] == true) {
m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[i]);
m_flagGrallocLocked[i] = false;
}
if (m_avaliblePreviewBufHandle[i] == true) {
if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0) {
ALOGE("ERR(%s):Fail to cancel buffer(%d)", __func__, i);
} else {
m_previewBufHandle[i] = NULL;
m_previewStride[i] = NULL;
}
m_avaliblePreviewBufHandle[i] = false;
}
}
}
} else {
ALOGV("DEBUG(%s):preview running but deferred, doing nothing", __func__);
}
} else {
ALOGV("DEBUG(%s):preview not running, doing nothing", __func__);
}
}
bool ExynosCameraHWInterface::m_previewThreadFuncWrapper(void)
{
ALOGV("DEBUG(%s):starting", __func__);
while (1) {
m_previewLock.lock();
while (m_previewRunning == false) {
if ( m_secCamera->flagStartPreview() == true
&& m_secCamera->stopPreview() == false)
ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__);
ALOGV("DEBUG(%s):calling m_secCamera->stopPreview() and waiting", __func__);
m_previewStoppedCondition.signal();
m_previewCondition.wait(m_previewLock);
ALOGV("DEBUG(%s):return from wait", __func__);
}
m_previewLock.unlock();
if (m_exitPreviewThread == true) {
if ( m_secCamera->flagStartPreview() == true
&& m_secCamera->stopPreview() == false)
ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__);
return true;
}
m_previewThreadFunc();
}
}
bool ExynosCameraHWInterface::m_previewThreadFunc(void)
{
ExynosBuffer previewBuf, callbackBuf;
int stride;
int previewW, previewH;
bool doPutPreviewBuf = true;
if (m_secCamera->getPreviewBuf(&previewBuf) == false) {
ALOGE("ERR(%s):getPreviewBuf() fail", __func__);
return false;
}
#ifndef USE_3DNR_DMAOUT
if (m_videoStart == true) {
copy_previewBuf = previewBuf;
m_videoRunning = true;
m_videoCondition.signal();
}
#endif
m_skipFrameLock.lock();
if (0 < m_skipFrame) {
m_skipFrame--;
m_skipFrameLock.unlock();
ALOGV("DEBUG(%s):skipping %d frame", __func__, previewBuf.reserved.p);
if ( doPutPreviewBuf == true
&& m_secCamera->putPreviewBuf(&previewBuf) == false) {
ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
return false;
}
return true;
}
m_skipFrameLock.unlock();
callbackBuf = previewBuf;
m_secCamera->getPreviewSize(&previewW, &previewH);
if (m_previewWindow && m_grallocHal && m_previewRunning == true) {
bool findGrallocBuf = false;
buffer_handle_t *bufHandle = NULL;
void *virtAddr[3];
int fd[3];
/* Unlock grallocHal buffer if locked */
if (m_flagGrallocLocked[previewBuf.reserved.p] == true) {
m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]);
m_flagGrallocLocked[previewBuf.reserved.p] = false;
} else {
if (m_previewWindow->lock_buffer(m_previewWindow, bufHandle) != 0)
ALOGE("ERR(%s):Could not lock gralloc buffer!!", __func__);
}
/* Enqueue lastest buffer */
if (m_avaliblePreviewBufHandle[previewBuf.reserved.p] == true) {
if (m_previewWindow->enqueue_buffer(m_previewWindow,
m_previewBufHandle[previewBuf.reserved.p]) != 0) {
ALOGE("ERR(%s):Could not enqueue gralloc buffer[%d]!!", __func__, previewBuf.reserved.p);
goto callbacks;
}
m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false;
}
/* Dequeue buffer from Gralloc */
if (m_previewWindow->dequeue_buffer(m_previewWindow,
&bufHandle,
&stride) != 0) {
ALOGE("ERR(%s):Could not dequeue gralloc buffer!!", __func__);
goto callbacks;
}
/* Get virtual address from dequeued buf */
if (m_grallocHal->lock(m_grallocHal,
*bufHandle,
GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR,
0, 0, previewW, previewH, virtAddr) != 0) {
ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
goto callbacks;
}
const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*bufHandle);
fd[0] = priv_handle->fd;
fd[1] = priv_handle->u_fd;
fd[2] = priv_handle->v_fd;
for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) {
if ((unsigned int)m_grallocVirtAddr[i] == (unsigned int)virtAddr[0]) {
findGrallocBuf = true;
m_previewBufHandle[i] = bufHandle;
m_previewStride[i] = stride;
previewBuf.reserved.p = i;
previewBuf.virt.extP[0] = (char *)virtAddr[0];
previewBuf.virt.extP[1] = (char *)virtAddr[1];
previewBuf.virt.extP[2] = (char *)virtAddr[2];
previewBuf.fd.extFd[0] = fd[0];
previewBuf.fd.extFd[1] = fd[1];
previewBuf.fd.extFd[2] = fd[2];
m_secCamera->setPreviewBuf(&previewBuf);
m_matchedGrallocIndex[previewBuf.reserved.p] = i;
m_avaliblePreviewBufHandle[i] = true;
break;
}
}
if (findGrallocBuf == false) {
ALOGE("%s:addr(%x) is not matched any gralloc buffer's addr", __func__, virtAddr[0]);
goto callbacks;
}
if ( doPutPreviewBuf == true
&& m_secCamera->putPreviewBuf(&previewBuf) == false)
ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
else
doPutPreviewBuf = false;
}
callbacks:
if ( m_previewRunning == true
&& m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
// resize from previewBuf(max size) to m_previewHeap(user's set size)
if (m_exynosPreviewCSC) {
int previewFormat = m_secCamera->getPreviewFormat();
csc_set_src_format(m_exynosPreviewCSC,
previewW, previewH - 8,
0, 0, previewW, previewH - 8,
V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat),
0);
csc_set_dst_format(m_exynosPreviewCSC,
m_orgPreviewRect.w, m_orgPreviewRect.h,
0, 0, m_orgPreviewRect.w, m_orgPreviewRect.h,
V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat),
1);
csc_set_src_buffer(m_exynosPreviewCSC,
(unsigned char *)callbackBuf.virt.extP[0],
(unsigned char *)callbackBuf.virt.extP[1],
(unsigned char *)callbackBuf.virt.extP[2],
0);
ExynosBuffer dstBuf;
m_getAlignedYUVSize(m_orgPreviewRect.colorFormat, m_orgPreviewRect.w, m_orgPreviewRect.h, &dstBuf);
dstBuf.virt.extP[0] = (char *)m_previewHeap[callbackBuf.reserved.p]->data;
for (int i = 1; i < 3; i++) {
if (dstBuf.size.extS[i] != 0)
dstBuf.virt.extP[i] = dstBuf.virt.extP[i-1] + dstBuf.size.extS[i-1];
}
csc_set_dst_buffer(m_exynosPreviewCSC,
(unsigned char *)dstBuf.virt.extP[0],
(unsigned char *)dstBuf.virt.extP[1],
(unsigned char *)dstBuf.virt.extP[2],
0);
if (csc_convert(m_exynosPreviewCSC) != 0)
ALOGE("ERR(%s):csc_convert() fail", __func__);
} else {
ALOGE("ERR(%s):m_exynosPreviewCSC == NULL", __func__);
}
}
/* TODO: We need better error handling scheme than this scheme */
if ( doPutPreviewBuf == true
&& m_secCamera->putPreviewBuf(&previewBuf) == false)
ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p);
else
doPutPreviewBuf = false;
if ( m_previewRunning == true
&& m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
m_dataCb(CAMERA_MSG_PREVIEW_FRAME, m_previewHeap[callbackBuf.reserved.p], 0, NULL, m_callbackCookie);
}
/* Face detection */
if ( m_previewRunning == true
&& m_msgEnabled & CAMERA_MSG_PREVIEW_METADATA
&& m_secCamera->flagStartFaceDetection() == true) {
camera_frame_metadata_t *ptrMetadata = NULL;
int id[NUM_OF_DETECTED_FACES];
int score[NUM_OF_DETECTED_FACES];
ExynosRect2 detectedFace[NUM_OF_DETECTED_FACES];
ExynosRect2 detectedLeftEye[NUM_OF_DETECTED_FACES];
ExynosRect2 detectedRightEye[NUM_OF_DETECTED_FACES];
ExynosRect2 detectedMouth[NUM_OF_DETECTED_FACES];
int numOfDetectedFaces = m_secCamera->getDetectedFacesAreas(NUM_OF_DETECTED_FACES,
id,
score,
detectedFace,
detectedLeftEye,
detectedRightEye,
detectedMouth);
if (0 < numOfDetectedFaces) {
// camera.h
// width : -1000~1000
// height : -1000~1000
// if eye, mouth is not detectable : -2000, -2000.
int realNumOfDetectedFaces = 0;
m_faceDetected = true;
for (int i = 0;</