blob: fa6c8d39c62ce4531e62600327c76ceac17890e4 [file] [log] [blame]
/*
**
** Copyright 2008, The Android Open Source Project
** Copyright 2012, Samsung Electronics Co. LTD
**
** Licensed under the Apache License, Version 2.0 (the "License");
** you may not use this file except in compliance with the License.
** You may obtain a copy of the License at
**
** http://www.apache.org/licenses/LICENSE-2.0
**
** Unless required by applicable law or agreed to in writing, software
** distributed under the License is distributed on an "AS IS" BASIS,
** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
** See the License for the specific language governing permissions and
** limitations under the License.
*/
/*!
* \file ExynosCameraHWInterface2.cpp
* \brief source file for Android Camera API 2.0 HAL
* \author Sungjoong Kang(sj3.kang@samsung.com)
* \date 2012/07/10
*
* <b>Revision History: </b>
* - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
* Initial Release
*
* - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
* 2nd Release
*
*/
//#define LOG_NDEBUG 0
#define LOG_TAG "ExynosCameraHAL2"
#include <sys/time.h>
#include <utils/Log.h>
#include <math.h>
#include "ExynosCameraHWInterface2.h"
#include "exynos_format.h"
namespace android {
void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
{
int nw;
int cnt = 0;
uint32_t written = 0;
ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
int fd = open(fname, O_RDWR | O_CREAT, 0644);
if (fd < 0) {
ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
return;
}
ALOGV("writing %d bytes to file [%s]", size, fname);
while (written < size) {
nw = ::write(fd, buf + written, size - written);
if (nw < 0) {
ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
break;
}
written += nw;
cnt++;
}
ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
::close(fd);
}
int get_pixel_depth(uint32_t fmt)
{
int depth = 0;
switch (fmt) {
case V4L2_PIX_FMT_JPEG:
depth = 8;
break;
case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420M:
case V4L2_PIX_FMT_NV12M:
case V4L2_PIX_FMT_NV12MT:
depth = 12;
break;
case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_VYUY:
case V4L2_PIX_FMT_NV16:
case V4L2_PIX_FMT_NV61:
case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_SBGGR10:
case V4L2_PIX_FMT_SBGGR12:
case V4L2_PIX_FMT_SBGGR16:
depth = 16;
break;
case V4L2_PIX_FMT_RGB32:
depth = 32;
break;
default:
ALOGE("Get depth failed(format : %d)", fmt);
break;
}
return depth;
}
int cam_int_s_fmt(node_info_t *node)
{
struct v4l2_format v4l2_fmt;
unsigned int framesize;
int ret;
memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
v4l2_fmt.type = node->type;
framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
if (node->planes >= 1) {
v4l2_fmt.fmt.pix_mp.width = node->width;
v4l2_fmt.fmt.pix_mp.height = node->height;
v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
} else {
ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
}
/* Set up for capture */
ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
if (ret < 0)
ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
return ret;
}
int cam_int_reqbufs(node_info_t *node)
{
struct v4l2_requestbuffers req;
int ret;
req.count = node->buffers;
req.type = node->type;
req.memory = node->memory;
ret = exynos_v4l2_reqbufs(node->fd, &req);
if (ret < 0)
ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
return req.count;
}
int cam_int_qbuf(node_info_t *node, int index)
{
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
int i;
int ret = 0;
v4l2_buf.m.planes = planes;
v4l2_buf.type = node->type;
v4l2_buf.memory = node->memory;
v4l2_buf.index = index;
v4l2_buf.length = node->planes;
for(i = 0; i < node->planes; i++){
v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]);
}
ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
if (ret < 0)
ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
return ret;
}
int cam_int_streamon(node_info_t *node)
{
enum v4l2_buf_type type = node->type;
int ret;
ret = exynos_v4l2_streamon(node->fd, type);
if (ret < 0)
ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
ALOGV("On streaming I/O... ... fd(%d)", node->fd);
return ret;
}
int cam_int_streamoff(node_info_t *node)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
int ret;
ALOGV("Off streaming I/O... fd(%d)", node->fd);
ret = exynos_v4l2_streamoff(node->fd, type);
if (ret < 0)
ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
return ret;
}
int isp_int_streamoff(node_info_t *node)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
int ret;
ALOGV("Off streaming I/O... fd(%d)", node->fd);
ret = exynos_v4l2_streamoff(node->fd, type);
if (ret < 0)
ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
return ret;
}
int cam_int_dqbuf(node_info_t *node)
{
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
int ret;
v4l2_buf.type = node->type;
v4l2_buf.memory = node->memory;
v4l2_buf.m.planes = planes;
v4l2_buf.length = node->planes;
ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
if (ret < 0)
ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
return v4l2_buf.index;
}
int cam_int_dqbuf(node_info_t *node, int num_plane)
{
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
int ret;
v4l2_buf.type = node->type;
v4l2_buf.memory = node->memory;
v4l2_buf.m.planes = planes;
v4l2_buf.length = num_plane;
ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
if (ret < 0)
ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
return v4l2_buf.index;
}
int cam_int_s_input(node_info_t *node, int index)
{
int ret;
ret = exynos_v4l2_s_input(node->fd, index);
if (ret < 0)
ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
return ret;
}
gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
RequestManager::RequestManager(SignalDrivenThread* main_thread):
m_vdisEnable(false),
m_lastCompletedFrameCnt(-1),
m_lastAeMode(0),
m_lastAaMode(0),
m_lastAwbMode(0),
m_lastAeComp(0),
m_vdisBubbleEn(false)
{
m_metadataConverter = new MetadataConverter;
m_mainThread = main_thread;
ResetEntry();
m_sensorPipelineSkipCnt = 0;
return;
}
RequestManager::~RequestManager()
{
ALOGV("%s", __FUNCTION__);
if (m_metadataConverter != NULL) {
delete m_metadataConverter;
m_metadataConverter = NULL;
}
releaseSensorQ();
return;
}
void RequestManager::ResetEntry()
{
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
entries[i].internal_shot.shot.ctl.request.frameCount = -1;
}
m_numOfEntries = 0;
m_entryInsertionIndex = -1;
m_entryProcessingIndex = -1;
m_entryFrameOutputIndex = -1;
}
int RequestManager::GetNumEntries()
{
Mutex::Autolock lock(m_numOfEntriesLock);
return m_numOfEntries;
}
void RequestManager::SetDefaultParameters(int cropX)
{
m_cropX = cropX;
}
bool RequestManager::IsRequestQueueFull()
{
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
return true;
else
return false;
}
void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
request_manager_entry * newEntry = NULL;
int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
newEntry = &(entries[newInsertionIndex]);
if (newEntry->status!=EMPTY) {
ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
return;
}
newEntry->status = REGISTERED;
newEntry->original_request = new_request;
memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
newEntry->output_stream_count = 0;
if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
newEntry->output_stream_count++;
if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
newEntry->output_stream_count++;
m_numOfEntries++;
m_entryInsertionIndex = newInsertionIndex;
*afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
}
void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
int frame_index;
request_manager_entry * currentEntry;
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
frame_index = GetCompletedIndex();
currentEntry = &(entries[frame_index]);
if (currentEntry->status != COMPLETED) {
CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
return;
}
if (deregistered_request) *deregistered_request = currentEntry->original_request;
m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
currentEntry->status = EMPTY;
currentEntry->original_request = NULL;
memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
currentEntry->output_stream_count = 0;
m_numOfEntries--;
ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
CheckCompleted(GetNextIndex(frame_index));
return;
}
bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
camera_metadata_t ** prepared_frame, int afState)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
Mutex::Autolock lock(m_requestMutex);
status_t res = NO_ERROR;
int tempFrameOutputIndex = GetCompletedIndex();
request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
if (currentEntry->status != COMPLETED) {
ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
return false;
}
m_entryFrameOutputIndex = tempFrameOutputIndex;
m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
m_tempFrameMetadata);
if (res!=NO_ERROR) {
ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
return false;
}
*num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
*frame_size = get_camera_metadata_size(m_tempFrameMetadata);
*prepared_frame = m_tempFrameMetadata;
ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
// Dump();
return true;
}
int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
{
struct camera2_shot_ext * shot_ext;
struct camera2_shot_ext * request_shot;
int targetStreamIndex = 0;
request_manager_entry * newEntry = NULL;
static int count = 0;
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
if (m_numOfEntries == 0) {
CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
return -1;
}
if ((m_entryProcessingIndex == m_entryInsertionIndex)
&& (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
return -1;
}
int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
newEntry = &(entries[newProcessingIndex]);
request_shot = &(newEntry->internal_shot);
if (newEntry->status != REGISTERED) {
CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
}
return -1;
}
newEntry->status = REQUESTED;
shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
shot_ext->request_sensor = 1;
shot_ext->dis_bypass = 1;
shot_ext->dnr_bypass = 1;
shot_ext->fd_bypass = 1;
shot_ext->setfile = 0;
targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
if (targetStreamIndex & MASK_OUTPUT_SCP)
shot_ext->request_scp = 1;
if (targetStreamIndex & MASK_OUTPUT_SCC)
shot_ext->request_scc = 1;
if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
shot_ext->fd_bypass = 0;
if (count == 0){
shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
} else
shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
count++;
shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
shot_ext->shot.magicNumber = 0x23456789;
shot_ext->shot.ctl.sensor.exposureTime = 0;
shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
shot_ext->shot.ctl.sensor.sensitivity = 0;
shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
m_entryProcessingIndex = newProcessingIndex;
return newProcessingIndex;
}
void RequestManager::NotifyStreamOutput(int frameCnt)
{
int index;
Mutex::Autolock lock(m_requestMutex);
ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
return;
}
ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count);
entries[index].output_stream_count--; //TODO : match stream id also
CheckCompleted(index);
}
void RequestManager::CheckCompleted(int index)
{
if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
&& (entries[index].output_stream_count <= 0)){
ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
index, entries[index].internal_shot.shot.ctl.request.frameCount );
entries[index].status = COMPLETED;
if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount)
m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
}
}
int RequestManager::GetCompletedIndex()
{
return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
}
void RequestManager::pushSensorQ(int index)
{
Mutex::Autolock lock(m_requestMutex);
m_sensorQ.push_back(index);
}
int RequestManager::popSensorQ()
{
List<int>::iterator sensor_token;
int index;
Mutex::Autolock lock(m_requestMutex);
if(m_sensorQ.size() == 0)
return -1;
sensor_token = m_sensorQ.begin()++;
index = *sensor_token;
m_sensorQ.erase(sensor_token);
return (index);
}
void RequestManager::releaseSensorQ()
{
List<int>::iterator r;
Mutex::Autolock lock(m_requestMutex);
ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
while(m_sensorQ.size() > 0){
r = m_sensorQ.begin()++;
m_sensorQ.erase(r);
}
return;
}
void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
{
int index;
struct camera2_shot_ext * request_shot;
nsecs_t timeStamp;
int i;
Mutex::Autolock lock(m_requestMutex);
ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
if (entries[i].internal_shot.shot.ctl.request.frameCount
== shot_ext->shot.ctl.request.frameCount) {
if (entries[i].status == CAPTURED) {
entries[i].status = METADONE;
break;
}
if (entries[i].status == METADONE) {
return;
}
}
}
if (i == NUM_MAX_REQUEST_MGR_ENTRY){
ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
return;
}
request_manager_entry * newEntry = &(entries[i]);
request_shot = &(newEntry->internal_shot);
timeStamp = request_shot->shot.dm.sensor.timeStamp;
memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
request_shot->shot.dm.sensor.timeStamp = timeStamp;
m_lastTimeStamp = timeStamp;
CheckCompleted(i);
}
void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
{
int index, targetStreamIndex;
struct camera2_shot_ext * request_shot;
ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
if (frameCnt < 0)
return;
index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
return;
}
request_manager_entry * newEntry = &(entries[index]);
request_shot = &(newEntry->internal_shot);
memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
shot_ext->shot.ctl.request.frameCount = frameCnt;
shot_ext->request_sensor = 1;
shot_ext->dis_bypass = 1;
shot_ext->dnr_bypass = 1;
shot_ext->fd_bypass = 1;
shot_ext->drc_bypass = 1;
shot_ext->setfile = 0;
shot_ext->request_scc = 0;
shot_ext->request_scp = 0;
shot_ext->isReprocessing = request_shot->isReprocessing;
shot_ext->reprocessInput = request_shot->reprocessInput;
shot_ext->shot.ctl.request.outputStreams[0] = 0;
shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
// mapping flash UI mode from aeMode
if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
}
// Apply ae/awb lock or unlock
if (request_shot->ae_lock == AEMODE_LOCK_ON)
request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
if (request_shot->awb_lock == AWBMODE_LOCK_ON)
request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
}
else {
shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
}
if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
}
else {
shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
}
if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
}
else {
shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
}
if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
shot_ext->shot.ctl.aa.aeExpCompensation = 0;
}
else {
shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
}
if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) {
m_vdisBubbleEn = true;
shot_ext->dis_bypass = 0;
shot_ext->dnr_bypass = 0;
} else {
m_vdisBubbleEn = false;
shot_ext->dis_bypass = 1;
shot_ext->dnr_bypass = 1;
}
shot_ext->shot.ctl.aa.afTrigger = 0;
targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
if (targetStreamIndex & MASK_OUTPUT_SCP)
shot_ext->request_scp = 1;
if (targetStreamIndex & MASK_OUTPUT_SCC)
shot_ext->request_scc = 1;
if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
shot_ext->fd_bypass = 0;
shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
(int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
(int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
(int)(shot_ext->shot.ctl.aa.afMode));
}
bool RequestManager::IsVdisEnable(void)
{
return m_vdisBubbleEn;
}
int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
{
for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
return i;
}
return -1;
}
void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
{
int index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
return;
}
request_manager_entry * currentEntry = &(entries[index]);
if (currentEntry->internal_shot.isReprocessing == 1) {
ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
} else {
currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
}
}
nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt)
{
int index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
return m_lastTimeStamp;
}
else
return GetTimestamp(index);
}
nsecs_t RequestManager::GetTimestamp(int index)
{
Mutex::Autolock lock(m_requestMutex);
if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
return 0;
}
request_manager_entry * currentEntry = &(entries[index]);
nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
if (frameTime == 0) {
ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__);
frameTime = m_lastTimeStamp;
}
ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
return frameTime;
}
uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
{
int index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
return 0;
}
else
return GetOutputStream(index);
}
uint8_t RequestManager::GetOutputStream(int index)
{
Mutex::Autolock lock(m_requestMutex);
if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
return 0;
}
request_manager_entry * currentEntry = &(entries[index]);
return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
}
camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
{
int index = FindEntryIndexByFrameCnt(frameCnt);
if (index == -1) {
ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
return 0;
}
else
return GetInternalShotExt(index);
}
camera2_shot_ext * RequestManager::GetInternalShotExt(int index)
{
Mutex::Autolock lock(m_requestMutex);
if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
return 0;
}
request_manager_entry * currentEntry = &(entries[index]);
return &currentEntry->internal_shot;
}
int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext, bool drain)
{
Mutex::Autolock lock(m_requestMutex);
Mutex::Autolock lock2(m_numOfEntriesLock);
int i;
if (m_numOfEntries == 0) {
CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
return -1;
}
for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
continue;
if (entries[i].status == REQUESTED) {
entries[i].status = CAPTURED;
return entries[i].internal_shot.shot.ctl.request.frameCount;
}
if (drain && (entries[i].status >= CAPTURED)) {
return entries[i].internal_shot.shot.ctl.request.frameCount;
}
CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
}
CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
return -1;
}
void RequestManager::SetInitialSkip(int count)
{
ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
if (count > m_sensorPipelineSkipCnt)
m_sensorPipelineSkipCnt = count;
}
int RequestManager::GetSkipCnt()
{
ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
if (m_sensorPipelineSkipCnt == 0)
return m_sensorPipelineSkipCnt;
else
return --m_sensorPipelineSkipCnt;
}
void RequestManager::Dump(void)
{
int i = 0;
request_manager_entry * currentEntry;
Mutex::Autolock lock(m_numOfEntriesLock);
ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
currentEntry = &(entries[i]);
ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
currentEntry->output_stream_count,
currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
}
}
int RequestManager::GetNextIndex(int index)
{
index++;
if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
index = 0;
return index;
}
int RequestManager::GetPrevIndex(int index)
{
index--;
if (index < 0)
index = NUM_MAX_REQUEST_MGR_ENTRY-1;
return index;
}
ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
m_requestQueueOps(NULL),
m_frameQueueOps(NULL),
m_callbackCookie(NULL),
m_numOfRemainingReqInSvc(0),
m_isRequestQueuePending(false),
m_isRequestQueueNull(true),
m_halDevice(dev),
m_ionCameraClient(0),
m_isIspStarted(false),
m_sccLocalBufferValid(false),
m_cameraId(cameraId),
m_scp_closing(false),
m_scp_closed(false),
m_wideAspect(false),
m_zoomRatio(1),
m_vdisBubbleCnt(0),
m_vdisDupFrame(0),
m_jpegEncodingCount(0),
m_scpForceSuspended(false),
m_afState(HAL_AFSTATE_INACTIVE),
m_afTriggerId(0),
m_afMode(NO_CHANGE),
m_afMode2(NO_CHANGE),
m_IsAfModeUpdateRequired(false),
m_IsAfTriggerRequired(false),
m_IsAfLockRequired(false),
m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
m_afPendingTriggerId(0),
m_afModeWaitingCnt(0),
m_scpOutputSignalCnt(0),
m_scpOutputImageCnt(0),
m_nightCaptureCnt(0),
m_nightCaptureFrameCnt(0),
m_lastSceneMode(0),
m_thumbNailW(160),
m_thumbNailH(120)
{
ALOGD("(%s): ENTER", __FUNCTION__);
int ret = 0;
int res = 0;
m_exynosPictureCSC = NULL;
m_exynosVideoCSC = NULL;
if (!m_grallocHal) {
ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
if (ret)
ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
}
m_camera2 = camera;
m_ionCameraClient = createIonClient(m_ionCameraClient);
if(m_ionCameraClient == 0)
ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
m_BayerManager = new BayerBufManager();
m_mainThread = new MainThread(this);
m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
*openInvalid = InitializeISPChain();
if (*openInvalid < 0) {
ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
// clean process
// 1. close video nodes
// SCP
res = exynos_v4l2_close(m_camera_info.scp.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
// SCC
res = exynos_v4l2_close(m_camera_info.capture.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
// Sensor
res = exynos_v4l2_close(m_camera_info.sensor.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
// ISP
res = exynos_v4l2_close(m_camera_info.isp.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
} else {
m_sensorThread = new SensorThread(this);
m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
m_subStreams[i].type = SUBSTREAM_TYPE_NONE;
CSC_METHOD cscMethod = CSC_METHOD_HW;
m_exynosPictureCSC = csc_init(cscMethod);
if (m_exynosPictureCSC == NULL)
ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
m_exynosVideoCSC = csc_init(cscMethod);
if (m_exynosVideoCSC == NULL)
ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
m_setExifFixedAttribute();
// contol information clear
// flash
m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
m_ctlInfo.flash.m_afFlashDoneFlg= false;
m_ctlInfo.flash.m_flashEnableFlg = false;
m_ctlInfo.flash.m_flashFrameCount = 0;
m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
m_ctlInfo.flash.m_flashTimeOut = 0;
m_ctlInfo.flash.m_flashDecisionResult = false;
m_ctlInfo.flash.m_flashTorchMode = false;
m_ctlInfo.flash.m_precaptureState = 0;
m_ctlInfo.flash.m_precaptureTriggerId = 0;
// ae
m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
// af
m_ctlInfo.af.m_afTriggerTimeOut = 0;
// scene
m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
}
ALOGD("(%s): EXIT", __FUNCTION__);
}
ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
{
ALOGD("(%s): ENTER", __FUNCTION__);
this->release();
ALOGD("(%s): EXIT", __FUNCTION__);
}
void ExynosCameraHWInterface2::release()
{
int i, res;
ALOGD("(HAL2::release): ENTER");
if (m_streamThreads[1] != NULL) {
m_streamThreads[1]->release();
m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
}
if (m_streamThreads[0] != NULL) {
m_streamThreads[0]->release();
m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
}
if (m_sensorThread != NULL) {
m_sensorThread->release();
}
if (m_mainThread != NULL) {
m_mainThread->release();
}
if (m_exynosPictureCSC)
csc_deinit(m_exynosPictureCSC);
m_exynosPictureCSC = NULL;
if (m_exynosVideoCSC)
csc_deinit(m_exynosVideoCSC);
m_exynosVideoCSC = NULL;
if (m_streamThreads[1] != NULL) {
ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
while (!m_streamThreads[1]->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination");
m_streamThreads[1] = NULL;
}
if (m_streamThreads[0] != NULL) {
ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
while (!m_streamThreads[0]->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination");
m_streamThreads[0] = NULL;
}
if (m_sensorThread != NULL) {
ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
while (!m_sensorThread->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination");
m_sensorThread = NULL;
}
if (m_mainThread != NULL) {
ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
while (!m_mainThread->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination");
m_mainThread = NULL;
}
if (m_requestManager != NULL) {
delete m_requestManager;
m_requestManager = NULL;
}
if (m_BayerManager != NULL) {
delete m_BayerManager;
m_BayerManager = NULL;
}
for (i = 0; i < NUM_BAYER_BUFFERS; i++)
freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
if (m_sccLocalBufferValid) {
for (i = 0; i < NUM_SCC_BUFFERS; i++)
#ifdef ENABLE_FRAME_SYNC
freeCameraMemory(&m_sccLocalBuffer[i], 2);
#else
freeCameraMemory(&m_sccLocalBuffer[i], 1);
#endif
}
else {
for (i = 0; i < NUM_SCC_BUFFERS; i++)
freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
}
ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
res = exynos_v4l2_close(m_camera_info.sensor.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
res = exynos_v4l2_close(m_camera_info.isp.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
res = exynos_v4l2_close(m_camera_info.capture.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
res = exynos_v4l2_close(m_camera_info.scp.fd);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
deleteIonClient(m_ionCameraClient);
ALOGD("(HAL2::release): EXIT");
}
int ExynosCameraHWInterface2::InitializeISPChain()
{
char node_name[30];
int fd = 0;
int i;
int ret = 0;
/* Open Sensor */
memset(&node_name, 0x00, sizeof(char[30]));
sprintf(node_name, "%s%d", NODE_PREFIX, 40);
fd = exynos_v4l2_open(node_name, O_RDWR, 0);
if (fd < 0) {
ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
}
else {
ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
}
m_camera_info.sensor.fd = fd;
/* Open ISP */
memset(&node_name, 0x00, sizeof(char[30]));
sprintf(node_name, "%s%d", NODE_PREFIX, 41);
fd = exynos_v4l2_open(node_name, O_RDWR, 0);
if (fd < 0) {
ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
}
else {
ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
}
m_camera_info.isp.fd = fd;
/* Open ScalerC */
memset(&node_name, 0x00, sizeof(char[30]));
sprintf(node_name, "%s%d", NODE_PREFIX, 42);
fd = exynos_v4l2_open(node_name, O_RDWR, 0);
if (fd < 0) {
ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
}
else {
ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
}
m_camera_info.capture.fd = fd;
/* Open ScalerP */
memset(&node_name, 0x00, sizeof(char[30]));
sprintf(node_name, "%s%d", NODE_PREFIX, 44);
fd = exynos_v4l2_open(node_name, O_RDWR, 0);
if (fd < 0) {
ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
}
else {
ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
}
m_camera_info.scp.fd = fd;
if(m_cameraId == 0)
m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
else
m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
m_camera_info.dummy_shot.dis_bypass = 1;
m_camera_info.dummy_shot.dnr_bypass = 1;
m_camera_info.dummy_shot.fd_bypass = 1;
/*sensor setting*/
m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
/*request setting*/
m_camera_info.dummy_shot.request_sensor = 1;
m_camera_info.dummy_shot.request_scc = 0;
m_camera_info.dummy_shot.request_scp = 0;
m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
m_camera_info.sensor.width = m_camera2->getSensorRawW();
m_camera_info.sensor.height = m_camera2->getSensorRawH();
m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
m_camera_info.sensor.planes = 2;
m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
for(i = 0; i < m_camera_info.sensor.buffers; i++){
int res;
initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
if (res) {
ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
// Free allocated sensor buffers
for (int j = 0; j < i; j++) {
freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
}
return false;
}
}
m_camera_info.isp.width = m_camera_info.sensor.width;
m_camera_info.isp.height = m_camera_info.sensor.height;
m_camera_info.isp.format = m_camera_info.sensor.format;
m_camera_info.isp.planes = m_camera_info.sensor.planes;
m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
for(i = 0; i < m_camera_info.isp.buffers; i++){
initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
};
/* init ISP */
ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
if (ret < 0) {
ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id);
return false;
}
cam_int_s_fmt(&(m_camera_info.isp));
ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
cam_int_reqbufs(&(m_camera_info.isp));
ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
/* init Sensor */
cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
}
ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
cam_int_reqbufs(&(m_camera_info.sensor));
ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
for (i = 0; i < m_camera_info.sensor.buffers; i++) {
ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
sizeof(struct camera2_shot_ext));
}
for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
cam_int_qbuf(&(m_camera_info.sensor), i);
for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
m_requestManager->pushSensorQ(i);
ALOGV("== stream_on :: sensor");
cam_int_streamon(&(m_camera_info.sensor));
m_camera_info.sensor.status = true;
/* init Capture */
m_camera_info.capture.width = m_camera2->getSensorW();
m_camera_info.capture.height = m_camera2->getSensorH();
m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
#ifdef ENABLE_FRAME_SYNC
m_camera_info.capture.planes = 2;
#else
m_camera_info.capture.planes = 1;
#endif
m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
m_camera_info.capture.status = false;
return true;
}
void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
{
ALOGV("(%s)", __FUNCTION__);
StreamThread *AllocatedStream;
stream_parameters_t newParameters;
uint32_t format_actual;
if (!threadExists) {
m_streamThreads[1] = new StreamThread(this, 1);
}
AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
if (!threadExists) {
AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
AllocatedStream->m_numRegisteredStream = 1;
}
AllocatedStream->m_index = 1;
format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
newParameters.width = m_camera2->getSensorW();
newParameters.height = m_camera2->getSensorH();
newParameters.format = format_actual;
newParameters.streamOps = NULL;
newParameters.numHwBuffers = NUM_SCC_BUFFERS;
#ifdef ENABLE_FRAME_SYNC
newParameters.planes = 2;
#else
newParameters.planes = 1;
#endif
newParameters.numSvcBufsInHal = 0;
newParameters.node = &m_camera_info.capture;
AllocatedStream->streamType = STREAM_TYPE_INDIRECT;
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
if (!threadExists) {
if (!m_sccLocalBufferValid) {
for (int i = 0; i < m_camera_info.capture.buffers; i++){
initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
#ifdef ENABLE_FRAME_SYNC
m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
#else
allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
#endif
m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
}
m_sccLocalBufferValid = true;
}
} else {
if (m_sccLocalBufferValid) {
for (int i = 0; i < m_camera_info.capture.buffers; i++)
m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
} else {
ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
}
}
cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
cam_int_s_fmt(newParameters.node);
ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
cam_int_reqbufs(newParameters.node);
ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
for (int i = 0; i < newParameters.node->buffers; i++) {
ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
cam_int_qbuf(newParameters.node, i);
newParameters.svcBufStatus[i] = ON_DRIVER;
}
ALOGV("== stream_on :: capture");
if (cam_int_streamon(newParameters.node) < 0) {
ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
} else {
m_camera_info.capture.status = true;
}
AllocatedStream->setParameter(&newParameters);
AllocatedStream->m_activated = true;
AllocatedStream->m_isBufferInit = true;
}
void ExynosCameraHWInterface2::StartISP()
{
ALOGV("== stream_on :: isp");
cam_int_streamon(&(m_camera_info.isp));
exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
}
int ExynosCameraHWInterface2::getCameraId() const
{
return m_cameraId;
}
int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
&& (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
return 0;
}
else {
ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
return 1;
}
}
int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
{
int i = 0;
ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
return 0;
}
m_isRequestQueueNull = false;
if (m_requestManager->GetNumEntries() == 0)
m_requestManager->SetInitialSkip(0);
if (m_isIspStarted == false) {
/* isp */
m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
cam_int_s_fmt(&(m_camera_info.isp));
cam_int_reqbufs(&(m_camera_info.isp));
/* sensor */
if (m_camera_info.sensor.status == false) {
cam_int_s_fmt(&(m_camera_info.sensor));
cam_int_reqbufs(&(m_camera_info.sensor));
for (i = 0; i < m_camera_info.sensor.buffers; i++) {
ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
sizeof(struct camera2_shot_ext));
}
for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
cam_int_qbuf(&(m_camera_info.sensor), i);
for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
m_requestManager->pushSensorQ(i);
ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
cam_int_streamon(&(m_camera_info.sensor));
m_camera_info.sensor.status = true;
}
}
if (!(m_streamThreads[1].get())) {
ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
StartSCCThread(false);
} else {
if (m_streamThreads[1]->m_activated == false) {
ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
StartSCCThread(true);
} else {
if (m_camera_info.capture.status == false) {
m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
cam_int_s_fmt(&(m_camera_info.capture));
ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
cam_int_reqbufs(&(m_camera_info.capture));
ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
StreamThread * targetStream = m_streamThreads[1].get();
stream_parameters_t *targetStreamParms = &(targetStream->m_parameters);
node_info_t *currentNode = targetStreamParms->node;
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
v4l2_buf.m.planes = planes;
v4l2_buf.type = currentNode->type;
v4l2_buf.memory = currentNode->memory;
v4l2_buf.length = currentNode->planes;
v4l2_buf.index = i;
ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
if (i < currentNode->buffers) {
#ifdef ENABLE_FRAME_SYNC
v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
v4l2_buf.length += targetStreamParms->metaPlanes;
v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
#endif
if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
}
ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
}
else {
targetStreamParms->svcBufStatus[i] = ON_SERVICE;
}
}
} else {
for (int i = 0; i < m_camera_info.capture.buffers; i++) {
ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
cam_int_qbuf(&(m_camera_info.capture), i);
}
}
ALOGV("== stream_on :: capture");
if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
} else {
m_camera_info.capture.status = true;
}
}
if (m_scpForceSuspended) {
m_scpForceSuspended = false;
}
}
}
if (m_isIspStarted == false) {
StartISP();
ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
m_requestManager->SetInitialSkip(6);
m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
m_isIspStarted = true;
}
m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
return 0;
}
int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
&& (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
return 0;
}
else {
ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
return 1;
}
}
int ExynosCameraHWInterface2::getInProgressCount()
{
int inProgressJpeg;
int inProgressCount;
{
Mutex::Autolock lock(m_jpegEncoderLock);
inProgressJpeg = m_jpegEncodingCount;
inProgressCount = m_requestManager->GetNumEntries();
}
ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
return (inProgressCount + inProgressJpeg);
}
int ExynosCameraHWInterface2::flushCapturesInProgress()
{
return 0;
}
int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
{
ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
if (request == NULL) return BAD_VALUE;
if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
return BAD_VALUE;
}
status_t res;
// Pass 1, calculate size and allocate
res = m_camera2->constructDefaultRequest(request_template,
request,
true);
if (res != OK) {
return res;
}
// Pass 2, build request
res = m_camera2->constructDefaultRequest(request_template,
request,
false);
if (res != OK) {
ALOGE("Unable to populate new request for template %d",
request_template);
}
return res;
}
int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
{
ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
bool useDirectOutput = false;
StreamThread *AllocatedStream;
stream_parameters_t newParameters;
substream_parameters_t *subParameters;
StreamThread *parentStream;
status_t res;
int allocCase = 0;
if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) &&
m_camera2->isSupportedResolution(width, height)) {
if (!(m_streamThreads[0].get())) {
ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
allocCase = 0;
}
else {
if ((m_streamThreads[0].get())->m_activated == true) {
ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
allocCase = 1;
}
else {
ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
allocCase = 2;
}
}
// TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
|| (width == 720 && height == 480) || (width == 1440 && height == 960)
|| (width == 1344 && height == 896)) {
m_wideAspect = true;
} else {
m_wideAspect = false;
}
ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
if (allocCase == 0 || allocCase == 2) {
*stream_id = STREAM_ID_PREVIEW;
m_streamThreads[0] = new StreamThread(this, *stream_id);
AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
*format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 7;
newParameters.width = width;
newParameters.height = height;
newParameters.format = *format_actual;
newParameters.streamOps = stream_ops;
newParameters.usage = *usage;
newParameters.numHwBuffers = NUM_SCP_BUFFERS;
newParameters.numOwnSvcBuffers = *max_buffers;
newParameters.planes = NUM_PLANES(*format_actual);
newParameters.metaPlanes = 1;
newParameters.numSvcBufsInHal = 0;
newParameters.minUndequedBuffer = 3;
newParameters.needsIonMap = true;
newParameters.node = &m_camera_info.scp;
newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
newParameters.node->memory = V4L2_MEMORY_DMABUF;
AllocatedStream->streamType = STREAM_TYPE_DIRECT;
AllocatedStream->m_index = 0;
AllocatedStream->setParameter(&newParameters);
AllocatedStream->m_activated = true;
AllocatedStream->m_numRegisteredStream = 1;
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
// set video stabilization killswitch
m_requestManager->m_vdisEnable = width > 352 && height > 288;
return 0;
} else if (allocCase == 1) {
*stream_id = STREAM_ID_RECORD;
subParameters = &m_subStreams[STREAM_ID_RECORD];
memset(subParameters, 0, sizeof(substream_parameters_t));
parentStream = (StreamThread*)(m_streamThreads[0].get());
if (!parentStream) {
return 1;
}
*format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 7;
subParameters->type = SUBSTREAM_TYPE_RECORD;
subParameters->width = width;
subParameters->height = height;
subParameters->format = *format_actual;
subParameters->svcPlanes = NUM_PLANES(*format_actual);
subParameters->streamOps = stream_ops;
subParameters->usage = *usage;
subParameters->numOwnSvcBuffers = *max_buffers;
subParameters->numSvcBufsInHal = 0;
subParameters->needBufferInit = false;
subParameters->minUndequedBuffer = 2;
res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
if (res != NO_ERROR) {
ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
return 1;
}
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
ALOGV("(%s): Enabling Record", __FUNCTION__);
return 0;
}
}
else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
&& ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) {
if (!(m_streamThreads[1].get())) {
ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
useDirectOutput = true;
}
else {
ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
useDirectOutput = false;
}
if (useDirectOutput) {
*stream_id = STREAM_ID_ZSL;
m_streamThreads[1] = new StreamThread(this, *stream_id);
AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
*format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
*format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 7;
newParameters.width = width;
newParameters.height = height;
newParameters.format = *format_actual;
newParameters.streamOps = stream_ops;
newParameters.usage = *usage;
newParameters.numHwBuffers = NUM_SCC_BUFFERS;
newParameters.numOwnSvcBuffers = *max_buffers;
newParameters.planes = NUM_PLANES(*format_actual);
newParameters.metaPlanes = 1;
newParameters.numSvcBufsInHal = 0;
newParameters.minUndequedBuffer = 2;
newParameters.needsIonMap = false;
newParameters.node = &m_camera_info.capture;
newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
newParameters.node->memory = V4L2_MEMORY_DMABUF;
AllocatedStream->streamType = STREAM_TYPE_DIRECT;
AllocatedStream->m_index = 1;
AllocatedStream->setParameter(&newParameters);
AllocatedStream->m_activated = true;
AllocatedStream->m_numRegisteredStream = 1;
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
return 0;
} else {
bool bJpegExists = false;
AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
subParameters = &m_subStreams[STREAM_ID_JPEG];
if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
ALOGD("(%s): jpeg stream exists", __FUNCTION__);
bJpegExists = true;
AllocatedStream->detachSubStream(STREAM_ID_JPEG);
}
AllocatedStream->m_releasing = true;
ALOGD("START stream thread 1 release %d", __LINE__);
do {
AllocatedStream->release();
usleep(SIG_WAITING_TICK);
} while (AllocatedStream->m_releasing);
ALOGD("END stream thread 1 release %d", __LINE__);
*stream_id = STREAM_ID_ZSL;
m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
*format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
*format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 7;
newParameters.width = width;
newParameters.height = height;
newParameters.format = *format_actual;
newParameters.streamOps = stream_ops;
newParameters.usage = *usage;
newParameters.numHwBuffers = NUM_SCC_BUFFERS;
newParameters.numOwnSvcBuffers = *max_buffers;
newParameters.planes = NUM_PLANES(*format_actual);
newParameters.metaPlanes = 1;
newParameters.numSvcBufsInHal = 0;
newParameters.minUndequedBuffer = 2;
newParameters.needsIonMap = false;
newParameters.node = &m_camera_info.capture;
newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
newParameters.node->memory = V4L2_MEMORY_DMABUF;
AllocatedStream->streamType = STREAM_TYPE_DIRECT;
AllocatedStream->m_index = 1;
AllocatedStream->setParameter(&newParameters);
AllocatedStream->m_activated = true;
AllocatedStream->m_numRegisteredStream = 1;
if (bJpegExists) {
AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
}
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
return 0;
}
}
else if (format == HAL_PIXEL_FORMAT_BLOB
&& m_camera2->isSupportedJpegResolution(width, height)) {
*stream_id = STREAM_ID_JPEG;
subParameters = &m_subStreams[*stream_id];
memset(subParameters, 0, sizeof(substream_parameters_t));
if (!(m_streamThreads[1].get())) {
ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
StartSCCThread(false);
}
else if (m_streamThreads[1]->m_activated == false) {
ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
StartSCCThread(true);
}
parentStream = (StreamThread*)(m_streamThreads[1].get());
*format_actual = HAL_PIXEL_FORMAT_BLOB;
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 5;
subParameters->type = SUBSTREAM_TYPE_JPEG;
subParameters->width = width;
subParameters->height = height;
subParameters->format = *format_actual;
subParameters->svcPlanes = 1;
subParameters->streamOps = stream_ops;
subParameters->usage = *usage;
subParameters->numOwnSvcBuffers = *max_buffers;
subParameters->numSvcBufsInHal = 0;
subParameters->needBufferInit = false;
subParameters->minUndequedBuffer = 2;
res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
if (res != NO_ERROR) {
ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
return 1;
}
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
return 0;
}
else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
*stream_id = STREAM_ID_PRVCB;
subParameters = &m_subStreams[STREAM_ID_PRVCB];
memset(subParameters, 0, sizeof(substream_parameters_t));
parentStream = (StreamThread*)(m_streamThreads[0].get());
if (!parentStream) {
return 1;
}
*format_actual = format;
*usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
if (m_wideAspect)
*usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
*max_buffers = 7;
subParameters->type = SUBSTREAM_TYPE_PRVCB;
subParameters->width = width;
subParameters->height = height;
subParameters->format = *format_actual;
subParameters->svcPlanes = NUM_PLANES(*format_actual);
subParameters->streamOps = stream_ops;
subParameters->usage = *usage;
subParameters->numOwnSvcBuffers = *max_buffers;
subParameters->numSvcBufsInHal = 0;
subParameters->needBufferInit = false;
subParameters->minUndequedBuffer = 2;
if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
}
else {
subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
}
res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
if (res != NO_ERROR) {
ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
return 1;
}
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
ALOGV("(%s): Enabling previewcb", __FUNCTION__);
return 0;
}
ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
return 1;
}
int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
int num_buffers, buffer_handle_t *registeringBuffers)
{
int i,j;
void *virtAddr[3];
int plane_index = 0;
StreamThread * targetStream;
stream_parameters_t *targetStreamParms;
node_info_t *currentNode;
struct v4l2_buffer v4l2_buf;
struct v4l2_plane planes[VIDEO_MAX_PLANES];
ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
stream_id, num_buffers, (uint32_t)registeringBuffers);
if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
targetStream = m_streamThreads[0].get();
targetStreamParms = &(m_streamThreads[0]->m_parameters);
}
else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
substream_parameters_t *targetParms;
targetParms = &m_subStreams[stream_id];
targetParms->numSvcBuffers = num_buffers;
for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
i, stream_id, (uint32_t)(registeringBuffers[i]));
if (m_grallocHal) {
if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
targetParms->usage, 0, 0,
targetParms->width, targetParms->height, virtAddr) != 0) {
ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
}
else {
ExynosBuffer currentBuf;
const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
if (targetParms->svcPlanes == 1) {
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.size.extS[0] = priv_handle->size;
currentBuf.size.extS[1] = 0;
currentBuf.size.extS[2] = 0;
} else if (targetParms->svcPlanes == 2) {
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.fd.extFd[1] = priv_handle->fd1;
} else if (targetParms->svcPlanes == 3) {
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.fd.extFd[1] = priv_handle->fd1;
currentBuf.fd.extFd[2] = priv_handle->fd2;
}
for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
__FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
(unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
}
targetParms->svcBufStatus[i] = ON_SERVICE;
targetParms->svcBuffers[i] = currentBuf;
targetParms->svcBufHandle[i] = registeringBuffers[i];
}
}
}
targetParms->needBufferInit = true;
return 0;
}
else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
targetStream = m_streamThreads[1].get();
targetStreamParms = &(m_streamThreads[1]->m_parameters);
}
else {
ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
return 1;
}
if (targetStream->streamType == STREAM_TYPE_DIRECT) {
if (num_buffers < targetStreamParms->numHwBuffers) {
ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
__FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
return 1;
}
}
CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
__FUNCTION__, targetStreamParms->format, targetStreamParms->width,
targetStreamParms->height, targetStreamParms->planes);
targetStreamParms->numSvcBuffers = num_buffers;
currentNode = targetStreamParms->node;
currentNode->width = targetStreamParms->width;
currentNode->height = targetStreamParms->height;
currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
currentNode->planes = targetStreamParms->planes;
currentNode->buffers = targetStreamParms->numHwBuffers;
cam_int_s_input(currentNode, m_camera_info.sensor_id);
cam_int_s_fmt(currentNode);
cam_int_reqbufs(currentNode);
for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
i, (uint32_t)(registeringBuffers[i]));
v4l2_buf.m.planes = planes;
v4l2_buf.type = currentNode->type;
v4l2_buf.memory = currentNode->memory;
v4l2_buf.index = i;
v4l2_buf.length = currentNode->planes;
ExynosBuffer currentBuf;
ExynosBuffer metaBuf;
const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
m_getAlignedYUVSize(currentNode->format,
currentNode->width, currentNode->height, &currentBuf);
ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
if (currentNode->planes == 1) {
v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.size.extS[0] = priv_handle->size;
currentBuf.size.extS[1] = 0;
currentBuf.size.extS[2] = 0;
} else if (currentNode->planes == 2) {
v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.fd.extFd[1] = priv_handle->fd1;
} else if (currentNode->planes == 3) {
v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
currentBuf.fd.extFd[0] = priv_handle->fd;
currentBuf.fd.extFd[2] = priv_handle->fd1;
currentBuf.fd.extFd[1] = priv_handle->fd2;
}
for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
if (targetStreamParms->needsIonMap)
currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index];
ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
__FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
(unsigned int)currentBuf.virt.extP[plane_index],
v4l2_buf.m.planes[plane_index].length);
}
if (i < currentNode->buffers) {
#ifdef ENABLE_FRAME_SYNC
/* add plane for metadata*/
metaBuf.size.extS[0] = 4*1024;
allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
v4l2_buf.length += targetStreamParms->metaPlanes;
v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
#endif
if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
__FUNCTION__, stream_id, currentNode->fd);
}
ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
__FUNCTION__, stream_id, currentNode->fd);
targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
}
else {
targetStreamParms->svcBufStatus[i] = ON_SERVICE;
}
targetStreamParms->svcBuffers[i] = currentBuf;
targetStreamParms->metaBuffers[i] = metaBuf;
targetStreamParms->svcBufHandle[i] = registeringBuffers[i];
}
ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id);
cam_int_streamon(targetStreamParms->node);
ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
currentNode->status = true;
ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
return 0;
}
int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
{
StreamThread *targetStream;
status_t res = NO_ERROR;
ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
bool releasingScpMain = false;
if (stream_id == STREAM_ID_PREVIEW) {
targetStream = (StreamThread*)(m_streamThreads[0].get());
if (!targetStream) {
ALOGW("(%s): Stream Not Exists", __FUNCTION__);
return NO_ERROR;
}
targetStream->m_numRegisteredStream--;
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
releasingScpMain = true;
if (targetStream->m_parameters.needsIonMap) {
for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
for (int j = 0; j < targetStream->m_parameters.planes; j++) {
ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
targetStream->m_parameters.svcBuffers[i].size.extS[j]);
ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
}
}
}
} else if (stream_id == STREAM_ID_JPEG) {
if (m_resizeBuf.size.s != 0) {
freeCameraMemory(&m_resizeBuf, 1);
}
memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
targetStream = (StreamThread*)(m_streamThreads[1].get());
if (!targetStream) {
ALOGW("(%s): Stream Not Exists", __FUNCTION__);
return NO_ERROR;
}
if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
return 1;
}
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
return 0;
} else if (stream_id == STREAM_ID_RECORD) {
memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
targetStream = (StreamThread*)(m_streamThreads[0].get());
if (!targetStream) {
ALOGW("(%s): Stream Not Exists", __FUNCTION__);
return NO_ERROR;
}
if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
return 1;
}
if (targetStream->m_numRegisteredStream != 0)
return 0;
} else if (stream_id == STREAM_ID_PRVCB) {
if (m_previewCbBuf.size.s != 0) {
freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
}
memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
targetStream = (StreamThread*)(m_streamThreads[0].get());
if (!targetStream) {
ALOGW("(%s): Stream Not Exists", __FUNCTION__);
return NO_ERROR;
}
if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
return 1;
}
if (targetStream->m_numRegisteredStream != 0)
return 0;
} else if (stream_id == STREAM_ID_ZSL) {
targetStream = (StreamThread*)(m_streamThreads[1].get());
if (!targetStream) {
ALOGW("(%s): Stream Not Exists", __FUNCTION__);
return NO_ERROR;
}
targetStream->m_numRegisteredStream--;
ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
if (targetStream->m_parameters.needsIonMap) {
for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
for (int j = 0; j < targetStream->m_parameters.planes; j++) {
ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
targetStream->m_parameters.svcBuffers[i].size.extS[j]);
ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
}
}
}
} else {
ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
return 1;
}
if (m_sensorThread != NULL && releasingScpMain) {
m_sensorThread->release();
ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
while (!m_sensorThread->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__);
}
if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
targetStream = (StreamThread*)(m_streamThreads[1].get());
targetStream->m_releasing = true;
ALOGD("START stream thread release %d", __LINE__);
do {
targetStream->release();
usleep(SIG_WAITING_TICK);
} while (targetStream->m_releasing);
m_camera_info.capture.status = false;
ALOGD("END stream thread release %d", __LINE__);
}
if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
targetStream = (StreamThread*)(m_streamThreads[0].get());
targetStream->m_releasing = true;
ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
do {
targetStream->release();
usleep(SIG_WAITING_TICK);
} while (targetStream->m_releasing);
ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
if (targetStream != NULL) {
ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
while (!targetStream->IsTerminated())
usleep(SIG_WAITING_TICK);
ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__);
m_streamThreads[0] = NULL;
}
if (m_camera_info.capture.status == true) {
m_scpForceSuspended = true;
}
m_isIspStarted = false;
}
ALOGV("(%s): END", __FUNCTION__);
return 0;
}
int ExynosCameraHWInterface2::allocateReprocessStream(
uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/,
const camera2_stream_in_ops_t* /*reprocess_stream_ops*/,
uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
return 0;
}
int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
uint32_t output_stream_id,
const camera2_stream_in_ops_t *reprocess_stream_ops,
// outputs
uint32_t *stream_id)
{
ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
*stream_id = STREAM_ID_JPEG_REPROCESS;
m_reprocessStreamId = *stream_id;
m_reprocessOps = reprocess_stream_ops;
m_reprocessOutputStreamId = output_stream_id;
return 0;
}
int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
{
ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
if (stream_id == STREAM_ID_JPEG_REPROCESS) {
m_reprocessStreamId = 0;
m_reprocessOps = NULL;
m_reprocessOutputStreamId = 0;
return 0;
}
return 1;
}
int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
{
Mutex::Autolock lock(m_afModeTriggerLock);
ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
switch (trigger_id) {
case CAMERA2_TRIGGER_AUTOFOCUS:
ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
OnAfTrigger(ext1);
break;
case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
OnAfCancel(ext1);
break;
case CAMERA2_TRIGGER_PRECAPTURE_METERING:
ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
OnPrecaptureMeteringTriggerStart(ext1);
break;
default:
break;
}
return 0;
}
int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
{
ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
m_notifyCb = notify_cb;
m_callbackCookie = user;
return 0;
}
int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
*ops = NULL;
return 0;
}
int ExynosCameraHWInterface2::dump(int /*fd*/)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
return 0;
}
void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
{
switch (colorFormat) {
// 1p
case V4L2_PIX_FMT_RGB565 :
case V4L2_PIX_FMT_YUYV :
case V4L2_PIX_FMT_UYVY :
case V4L2_PIX_FMT_VYUY :
case V4L2_PIX_FMT_YVYU :
buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
buf->size.extS[1] = 0;
buf->size.extS[2] = 0;
break;
// 2p
case V4L2_PIX_FMT_NV12 :
case V4L2_PIX_FMT_NV12T :
case V4L2_PIX_FMT_NV21 :
buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
buf->size.extS[2] = 0;
break;
case V4L2_PIX_FMT_NV12M :
case V4L2_PIX_FMT_NV12MT_16X16 :
case V4L2_PIX_FMT_NV21M:
buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
buf->size.extS[2] = 0;
break;
case V4L2_PIX_FMT_NV16 :
case V4L2_PIX_FMT_NV61 :
buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16);
buf->size.extS[2] = 0;
break;
// 3p
case V4L2_PIX_FMT_YUV420 :
case V4L2_PIX_FMT_YVU420 :
buf->size.extS[0] = (w * h);
buf->size.extS[1] = (w * h) >> 2;
buf->size.extS[2] = (w * h) >> 2;
break;
case V4L2_PIX_FMT_YUV420M:
case V4L2_PIX_FMT_YVU420M :
buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16);
buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
break;
case V4L2_PIX_FMT_YUV422P :
buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
break;
default:
ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
return;
break;
}
}
bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h,
int dst_w, int dst_h,
int *crop_x, int *crop_y,
int *crop_w, int *crop_h,
int zoom)
{
*crop_w = src_w;
*crop_h = src_h;
if ( src_w != dst_w
|| src_h != dst_h) {
float src_ratio = 1.0f;
float dst_ratio = 1.0f;
// ex : 1024 / 768
src_ratio = (float)src_w / (float)src_h;
// ex : 352 / 288
dst_ratio = (float)dst_w / (float)dst_h;
if (dst_w * dst_h < src_w * src_h) {
if (dst_ratio <= src_ratio) {
// shrink w
*crop_w = src_h * dst_ratio;
*crop_h = src_h;
} else {
// shrink h
*crop_w = src_w;
*crop_h = src_w / dst_ratio;
}
} else {
if (dst_ratio <= src_ratio) {
// shrink w
*crop_w = src_h * dst_ratio;
*crop_h = src_h;
} else {
// shrink h
*crop_w = src_w;
*crop_h = src_w / dst_ratio;
}
}
}
if (zoom != 0) {
float zoomLevel = ((float)zoom + 10.0) / 10.0;
*crop_w = (int)((float)*crop_w / zoomLevel);
*crop_h = (int)((float)*crop_h / zoomLevel);
}
#define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
if (w_align != 0) {
if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
&& (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) {
*crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
}
else
*crop_w -= w_align;
}
#define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
if (h_align != 0) {
if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
&& (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) {
*crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
}
else
*crop_h -= h_align;
}
*crop_x = (src_w - *crop_w) >> 1;
*crop_y = (src_h - *crop_h) >> 1;
if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
*crop_x -= 1;
if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
*crop_y -= 1;
return true;
}
BayerBufManager::BayerBufManager()
{
ALOGV("DEBUG(%s): ", __FUNCTION__);
for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
entries[i].status = BAYER_ON_HAL_EMPTY;
entries[i].reqFrameCnt = 0;
}
sensorEnqueueHead = 0;
sensorDequeueHead = 0;
ispEnqueueHead = 0;
ispDequeueHead = 0;
numOnSensor = 0;
numOnIsp = 0;
numOnHalFilled = 0;
numOnHalEmpty = NUM_BAYER_BUFFERS;
}
BayerBufManager::~BayerBufManager()
{
ALOGV("%s", __FUNCTION__);
}
int BayerBufManager::GetIndexForSensorEnqueue()
{
int ret = 0;
if (numOnHalEmpty == 0)
ret = -1;
else
ret = sensorEnqueueHead;
ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
return ret;
}
int BayerBufManager::MarkSensorEnqueue(int index)
{
ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
// sanity check
if (index != sensorEnqueueHead) {
ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
return -1;
}
if (entries[index].status != BAYER_ON_HAL_EMPTY) {
ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
index, entries[index].status, BAYER_ON_HAL_EMPTY);
return -1;
}
entries[index].status = BAYER_ON_SENSOR;
entries[index].reqFrameCnt = 0;
numOnHalEmpty--;
numOnSensor++;
sensorEnqueueHead = GetNextIndex(index);
ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
__FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
return 0;
}
int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/)
{
ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
if (entries[index].status != BAYER_ON_SENSOR) {
ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
index, entries[index].status, BAYER_ON_SENSOR);
return -1;
}
entries[index].status = BAYER_ON_HAL_FILLED;
numOnHalFilled++;
numOnSensor--;
return 0;
}
int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
{
int ret = 0;
if (numOnHalFilled == 0)
ret = -1;
else {
*reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
ret = ispEnqueueHead;
}
ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
return ret;
}
int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
{
int ret = 0;
if (numOnIsp == 0)