blob: f25cc486920561b9c73b32f759898b712262a01f [file] [log] [blame]
/*--------------------------------------------------------------------------
Copyright (c) 2010 - 2017, The Linux Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of The Linux Foundation nor
the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------*/
/*============================================================================
O p e n M A X w r a p p e r s
O p e n M A X C o r e
This module contains the implementation of the OpenMAX core & component.
*//*========================================================================*/
//////////////////////////////////////////////////////////////////////////////
// Include Files
//////////////////////////////////////////////////////////////////////////////
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#include <string.h>
#include <pthread.h>
#include <sys/prctl.h>
#include <stdlib.h>
#include <unistd.h>
#include <errno.h>
#include "omx_vdec.h"
#include <fcntl.h>
#include <limits.h>
#include <stdlib.h>
#include <media/hardware/HardwareAPI.h>
#include <sys/eventfd.h>
#include <nativebase/nativebase.h>
#if !defined(_ANDROID_) || defined(SYS_IOCTL)
#include <sys/ioctl.h>
#include <sys/mman.h>
#endif
#ifdef _ANDROID_
#include <cutils/properties.h>
#undef USE_EGL_IMAGE_GPU
#ifdef _QUERY_DISP_RES_
#include "display_config.h"
#endif
#endif
#ifdef _USE_GLIB_
#include <glib.h>
#define strlcpy g_strlcpy
#endif
#include <qdMetaData.h>
#include <gralloc_priv.h>
#ifdef ANDROID_JELLYBEAN_MR2
#include "QComOMXMetadata.h"
#endif
#ifdef USE_EGL_IMAGE_GPU
#include <EGL/egl.h>
#include <EGL/eglQCOM.h>
#define EGL_BUFFER_HANDLE 0x4F00
#define EGL_BUFFER_OFFSET 0x4F01
#endif
#define BUFFER_LOG_LOC "/data/vendor/media"
#ifdef OUTPUT_EXTRADATA_LOG
FILE *outputExtradataFile;
char output_extradata_filename [] = "/data/vendor/media/extradata";
#endif
#define DEFAULT_FPS 30
#define MAX_SUPPORTED_FPS 240
#define DEFAULT_WIDTH_ALIGNMENT 128
#define DEFAULT_HEIGHT_ALIGNMENT 32
#define VC1_SP_MP_START_CODE 0xC5000000
#define VC1_SP_MP_START_CODE_MASK 0xFF000000
#define VC1_AP_SEQ_START_CODE 0x0F010000
#define VC1_STRUCT_C_PROFILE_MASK 0xF0
#define VC1_STRUCT_B_LEVEL_MASK 0xE0000000
#define VC1_SIMPLE_PROFILE 0
#define VC1_MAIN_PROFILE 1
#define VC1_ADVANCE_PROFILE 3
#define VC1_SIMPLE_PROFILE_LOW_LEVEL 0
#define VC1_SIMPLE_PROFILE_MED_LEVEL 2
#define VC1_STRUCT_C_LEN 4
#define VC1_STRUCT_C_POS 8
#define VC1_STRUCT_A_POS 12
#define VC1_STRUCT_B_POS 24
#define VC1_SEQ_LAYER_SIZE 36
#define POLL_TIMEOUT 0x7fffffff
#define MEM_DEVICE "/dev/ion"
#ifdef _ANDROID_
extern "C" {
#include<utils/Log.h>
}
#endif//_ANDROID_
#define SZ_4K 0x1000
#define SZ_1M 0x100000
#define Log2(number, power) { OMX_U32 temp = number; power = 0; while( (0 == (temp & 0x1)) && power < 16) { temp >>=0x1; power++; } }
#define Q16ToFraction(q,num,den) { OMX_U32 power; Log2(q,power); num = q >> power; den = 0x1 << (16 - power); }
#define EXTRADATA_IDX(__num_planes) ((__num_planes) ? (__num_planes) - 1 : 0)
#define ALIGN(x, to_align) ((((unsigned) x) + (to_align - 1)) & ~(to_align - 1))
#define DEFAULT_EXTRADATA (OMX_INTERLACE_EXTRADATA | OMX_FRAMEPACK_EXTRADATA | OMX_OUTPUTCROP_EXTRADATA \
| OMX_DISPLAY_INFO_EXTRADATA | OMX_HDR_COLOR_INFO_EXTRADATA)
#define DEFAULT_CONCEAL_COLOR "32784" //0x8010, black by default
#ifndef ION_FLAG_CP_BITSTREAM
#define ION_FLAG_CP_BITSTREAM 0
#endif
#ifndef ION_FLAG_CP_PIXEL
#define ION_FLAG_CP_PIXEL 0
#endif
#ifdef MASTER_SIDE_CP
#define MEM_HEAP_ID ION_SECURE_HEAP_ID
#define SECURE_ALIGN SZ_4K
#define SECURE_FLAGS_INPUT_BUFFER (ION_SECURE | ION_FLAG_CP_BITSTREAM)
#define SECURE_FLAGS_OUTPUT_BUFFER (ION_SECURE | ION_FLAG_CP_PIXEL)
#else //SLAVE_SIDE_CP
#define MEM_HEAP_ID ION_CP_MM_HEAP_ID
#define SECURE_ALIGN SZ_1M
#define SECURE_FLAGS_INPUT_BUFFER ION_SECURE
#define SECURE_FLAGS_OUTPUT_BUFFER ION_SECURE
#endif
#define LUMINANCE_DIV_FACTOR 10000.0
#define MIN(x,y) (((x) < (y)) ? (x) : (y))
#define MAX(x,y) (((x) > (y)) ? (x) : (y))
static OMX_U32 maxSmoothStreamingWidth = 1920;
static OMX_U32 maxSmoothStreamingHeight = 1088;
void* async_message_thread (void *input)
{
OMX_BUFFERHEADERTYPE *buffer;
struct v4l2_plane plane[VIDEO_MAX_PLANES];
struct pollfd pfds[2];
struct v4l2_buffer v4l2_buf;
memset((void *)&v4l2_buf,0,sizeof(v4l2_buf));
struct v4l2_event dqevent;
omx_vdec *omx = reinterpret_cast<omx_vdec*>(input);
pfds[0].events = POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM | POLLRDBAND | POLLPRI;
pfds[1].events = POLLIN | POLLERR;
pfds[0].fd = omx->drv_ctx.video_driver_fd;
pfds[1].fd = omx->m_poll_efd;
int error_code = 0,rc=0,bytes_read = 0,bytes_written = 0;
DEBUG_PRINT_HIGH("omx_vdec: Async thread start");
prctl(PR_SET_NAME, (unsigned long)"VideoDecCallBackThread", 0, 0, 0);
while (!omx->async_thread_force_stop) {
rc = poll(pfds, 2, POLL_TIMEOUT);
if (!rc) {
DEBUG_PRINT_ERROR("Poll timedout");
break;
} else if (rc < 0 && errno != EINTR && errno != EAGAIN) {
DEBUG_PRINT_ERROR("Error while polling: %d, errno = %d", rc, errno);
break;
}
if ((pfds[1].revents & POLLIN) || (pfds[1].revents & POLLERR)) {
DEBUG_PRINT_HIGH("async_message_thread interrupted to be exited");
break;
}
if ((pfds[0].revents & POLLIN) || (pfds[0].revents & POLLRDNORM)) {
struct vdec_msginfo vdec_msg;
memset(&vdec_msg, 0, sizeof(vdec_msg));
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = omx->drv_ctx.num_planes;
v4l2_buf.m.planes = plane;
while (!ioctl(pfds[0].fd, VIDIOC_DQBUF, &v4l2_buf)) {
vdec_msg.msgcode=VDEC_MSG_RESP_OUTPUT_BUFFER_DONE;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.client_data=(void*)&v4l2_buf;
vdec_msg.msgdata.output_frame.len=plane[0].bytesused;
vdec_msg.msgdata.output_frame.bufferaddr=(void*)plane[0].m.userptr;
vdec_msg.msgdata.output_frame.time_stamp= ((uint64_t)v4l2_buf.timestamp.tv_sec * (uint64_t)1000000) +
(uint64_t)v4l2_buf.timestamp.tv_usec;
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
}
if ((pfds[0].revents & POLLOUT) || (pfds[0].revents & POLLWRNORM)) {
struct vdec_msginfo vdec_msg;
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = 1;
v4l2_buf.m.planes = plane;
while (!ioctl(pfds[0].fd, VIDIOC_DQBUF, &v4l2_buf)) {
vdec_msg.msgcode=VDEC_MSG_RESP_INPUT_BUFFER_DONE;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.input_frame_clientdata=(void*)&v4l2_buf;
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
}
if (pfds[0].revents & POLLPRI) {
rc = ioctl(pfds[0].fd, VIDIOC_DQEVENT, &dqevent);
if (dqevent.type == V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT ) {
struct vdec_msginfo vdec_msg;
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
vdec_msg.msgcode=VDEC_MSG_EVT_CONFIG_CHANGED;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.picsize.frame_height = ptr[0];
vdec_msg.msgdata.output_frame.picsize.frame_width = ptr[1];
DEBUG_PRINT_HIGH("VIDC Port Reconfig received insufficient");
if(ptr[2] & V4L2_EVENT_BITDEPTH_FLAG) {
omx->dpb_bit_depth = ptr[3];
DEBUG_PRINT_HIGH("VIDC Port Reconfig Bitdepth change - %d", ptr[3]);
}
if(ptr[2] & V4L2_EVENT_PICSTRUCT_FLAG) {
omx->m_progressive = ptr[4];
DEBUG_PRINT_HIGH("VIDC Port Reconfig PicStruct change - %d", ptr[4]);
}
if(ptr[2] & V4L2_EVENT_COLOUR_SPACE_FLAG) {
if (ptr[5] == MSM_VIDC_BT2020) {
omx->m_color_space = omx_vdec::BT2020;
} else {
omx->m_color_space = omx_vdec::EXCEPT_BT2020;
}
DEBUG_PRINT_HIGH("VIDC Port Reconfig ColorSpace change - %d", omx->m_color_space);
}
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_FLUSH_DONE) {
struct vdec_msginfo vdec_msg;
uint32_t flush_type = *(uint32_t *)dqevent.u.data;
// Old driver doesn't send flushType information.
// To make this backward compatible fallback to old approach
// if the flush_type is not present.
vdec_msg.status_code=VDEC_S_SUCCESS;
if (!flush_type || (flush_type & V4L2_QCOM_CMD_FLUSH_OUTPUT)) {
vdec_msg.msgcode=VDEC_MSG_RESP_FLUSH_INPUT_DONE;
DEBUG_PRINT_HIGH("VIDC Input Flush Done Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
if (!flush_type || (flush_type & V4L2_QCOM_CMD_FLUSH_CAPTURE)) {
vdec_msg.msgcode=VDEC_MSG_RESP_FLUSH_OUTPUT_DONE;
DEBUG_PRINT_HIGH("VIDC Output Flush Done Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_HW_OVERLOAD) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode=VDEC_MSG_EVT_HW_OVERLOAD;
vdec_msg.status_code=VDEC_S_SUCCESS;
DEBUG_PRINT_ERROR("HW Overload received");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_HW_UNSUPPORTED) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode=VDEC_MSG_EVT_HW_UNSUPPORTED;
vdec_msg.status_code=VDEC_S_SUCCESS;
DEBUG_PRINT_ERROR("HW Unsupported received");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_SYS_ERROR) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode = VDEC_MSG_EVT_HW_ERROR;
vdec_msg.status_code = VDEC_S_SUCCESS;
DEBUG_PRINT_HIGH("SYS Error Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_RELEASE_BUFFER_REFERENCE) {
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
DEBUG_PRINT_LOW("REFERENCE RELEASE EVENT RECVD fd = %d offset = %d", ptr[0], ptr[1]);
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_RELEASE_UNQUEUED_BUFFER) {
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
struct vdec_msginfo vdec_msg;
DEBUG_PRINT_LOW("Release unqueued buffer event recvd fd = %d offset = %d", ptr[0], ptr[1]);
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = omx->drv_ctx.num_planes;
v4l2_buf.m.planes = plane;
v4l2_buf.index = ptr[5];
v4l2_buf.flags = 0;
vdec_msg.msgcode = VDEC_MSG_RESP_OUTPUT_BUFFER_DONE;
vdec_msg.status_code = VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.client_data = (void*)&v4l2_buf;
vdec_msg.msgdata.output_frame.len = 0;
vdec_msg.msgdata.output_frame.bufferaddr = (void*)(intptr_t)ptr[2];
vdec_msg.msgdata.output_frame.time_stamp = ((uint64_t)ptr[3] * (uint64_t)1000000) +
(uint64_t)ptr[4];
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exitedn");
break;
}
} else {
DEBUG_PRINT_HIGH("VIDC Some Event recieved");
continue;
}
}
}
DEBUG_PRINT_HIGH("omx_vdec: Async thread stop");
return NULL;
}
void* message_thread_dec(void *input)
{
omx_vdec* omx = reinterpret_cast<omx_vdec*>(input);
unsigned char id;
int n;
fd_set readFds;
int res = 0;
struct timeval tv;
DEBUG_PRINT_HIGH("omx_vdec: message thread start");
prctl(PR_SET_NAME, (unsigned long)"VideoDecMsgThread", 0, 0, 0);
while (!omx->message_thread_stop) {
tv.tv_sec = 2;
tv.tv_usec = 0;
FD_ZERO(&readFds);
FD_SET(omx->m_pipe_in, &readFds);
res = select(omx->m_pipe_in + 1, &readFds, NULL, NULL, &tv);
if (res < 0) {
DEBUG_PRINT_ERROR("select() ERROR: %s", strerror(errno));
continue;
} else if (res == 0 /*timeout*/ || omx->message_thread_stop) {
continue;
}
n = read(omx->m_pipe_in, &id, 1);
if (0 == n) {
break;
}
if (1 == n) {
omx->process_event_cb(omx, id);
}
if ((n < 0) && (errno != EINTR)) {
DEBUG_PRINT_LOW("ERROR: read from pipe failed, ret %d errno %d", n, errno);
break;
}
}
DEBUG_PRINT_HIGH("omx_vdec: message thread stop");
return 0;
}
void post_message(omx_vdec *omx, unsigned char id)
{
int ret_value;
DEBUG_PRINT_LOW("omx_vdec: post_message %d pipe out%d", id,omx->m_pipe_out);
ret_value = write(omx->m_pipe_out, &id, 1);
if (ret_value <= 0) {
DEBUG_PRINT_ERROR("post_message to pipe failed : %s", strerror(errno));
} else {
DEBUG_PRINT_LOW("post_message to pipe done %d",ret_value);
}
}
// omx_cmd_queue destructor
omx_vdec::omx_cmd_queue::~omx_cmd_queue()
{
// Nothing to do
}
// omx cmd queue constructor
omx_vdec::omx_cmd_queue::omx_cmd_queue(): m_read(0),m_write(0),m_size(0)
{
memset(m_q,0,sizeof(omx_event)*OMX_CORE_CONTROL_CMDQ_SIZE);
}
// omx cmd queue insert
bool omx_vdec::omx_cmd_queue::insert_entry(unsigned long p1, unsigned long p2, unsigned long id)
{
bool ret = true;
if (m_size < OMX_CORE_CONTROL_CMDQ_SIZE) {
m_q[m_write].id = id;
m_q[m_write].param1 = p1;
m_q[m_write].param2 = p2;
m_write++;
m_size ++;
if (m_write >= OMX_CORE_CONTROL_CMDQ_SIZE) {
m_write = 0;
}
} else {
ret = false;
DEBUG_PRINT_ERROR("ERROR: %s()::Command Queue Full", __func__);
}
return ret;
}
// omx cmd queue pop
bool omx_vdec::omx_cmd_queue::pop_entry(unsigned long *p1, unsigned long *p2, unsigned long *id)
{
bool ret = true;
if (m_size > 0) {
*id = m_q[m_read].id;
*p1 = m_q[m_read].param1;
*p2 = m_q[m_read].param2;
// Move the read pointer ahead
++m_read;
--m_size;
if (m_read >= OMX_CORE_CONTROL_CMDQ_SIZE) {
m_read = 0;
}
} else {
ret = false;
}
return ret;
}
// Retrieve the first mesg type in the queue
unsigned omx_vdec::omx_cmd_queue::get_q_msg_type()
{
return m_q[m_read].id;
}
#ifdef _ANDROID_
omx_vdec::ts_arr_list::ts_arr_list()
{
//initialize timestamps array
memset(m_ts_arr_list, 0, ( sizeof(ts_entry) * MAX_NUM_INPUT_OUTPUT_BUFFERS) );
}
omx_vdec::ts_arr_list::~ts_arr_list()
{
//free m_ts_arr_list?
}
bool omx_vdec::ts_arr_list::insert_ts(OMX_TICKS ts)
{
bool ret = true;
bool duplicate_ts = false;
int idx = 0;
//insert at the first available empty location
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
if (!m_ts_arr_list[idx].valid) {
//found invalid or empty entry, save timestamp
m_ts_arr_list[idx].valid = true;
m_ts_arr_list[idx].timestamp = ts;
DEBUG_PRINT_LOW("Insert_ts(): Inserting TIMESTAMP (%lld) at idx (%d)",
ts, idx);
break;
}
}
if (idx == MAX_NUM_INPUT_OUTPUT_BUFFERS) {
DEBUG_PRINT_LOW("Timestamp array list is FULL. Unsuccessful insert");
ret = false;
}
return ret;
}
bool omx_vdec::ts_arr_list::pop_min_ts(OMX_TICKS &ts)
{
bool ret = true;
int min_idx = -1;
OMX_TICKS min_ts = 0;
int idx = 0;
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
if (m_ts_arr_list[idx].valid) {
//found valid entry, save index
if (min_idx < 0) {
//first valid entry
min_ts = m_ts_arr_list[idx].timestamp;
min_idx = idx;
} else if (m_ts_arr_list[idx].timestamp < min_ts) {
min_ts = m_ts_arr_list[idx].timestamp;
min_idx = idx;
}
}
}
if (min_idx < 0) {
//no valid entries found
DEBUG_PRINT_LOW("Timestamp array list is empty. Unsuccessful pop");
ts = 0;
ret = false;
} else {
ts = m_ts_arr_list[min_idx].timestamp;
m_ts_arr_list[min_idx].valid = false;
DEBUG_PRINT_LOW("Pop_min_ts:Timestamp (%lld), index(%d)",
ts, min_idx);
}
return ret;
}
bool omx_vdec::ts_arr_list::reset_ts_list()
{
bool ret = true;
int idx = 0;
DEBUG_PRINT_LOW("reset_ts_list(): Resetting timestamp array list");
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
m_ts_arr_list[idx].valid = false;
}
return ret;
}
#endif
// factory function executed by the core to create instances
void *get_omx_component_factory_fn(void)
{
return (new omx_vdec);
}
bool is_platform_tp10capture_supported()
{
char platform_name[PROPERTY_VALUE_MAX] = {0};
property_get("ro.board.platform", platform_name, "0");
if (!strncmp(platform_name, "msm8998", 7)) {
DEBUG_PRINT_HIGH("TP10 on capture port is supported");
return true;
}
DEBUG_PRINT_HIGH("TP10 on capture port is not supported");
return false;
}
/* ======================================================================
FUNCTION
omx_vdec::omx_vdec
DESCRIPTION
Constructor
PARAMETERS
None
RETURN VALUE
None.
========================================================================== */
omx_vdec::omx_vdec(): m_error_propogated(false),
m_state(OMX_StateInvalid),
m_app_data(NULL),
m_inp_mem_ptr(NULL),
m_out_mem_ptr(NULL),
m_client_output_extradata_mem_ptr(NULL),
input_flush_progress (false),
output_flush_progress (false),
input_use_buffer (false),
output_use_buffer (false),
ouput_egl_buffers(false),
m_use_output_pmem(OMX_FALSE),
m_out_mem_region_smi(OMX_FALSE),
m_out_pvt_entry_pmem(OMX_FALSE),
pending_input_buffers(0),
pending_output_buffers(0),
m_out_bm_count(0),
m_inp_bm_count(0),
m_out_extradata_bm_count(0),
m_inp_bPopulated(OMX_FALSE),
m_out_bPopulated(OMX_FALSE),
m_flags(0),
m_inp_bEnabled(OMX_TRUE),
m_out_bEnabled(OMX_TRUE),
m_in_alloc_cnt(0),
m_platform_list(NULL),
m_platform_entry(NULL),
m_pmem_info(NULL),
h264_parser(NULL),
arbitrary_bytes (true),
psource_frame (NULL),
pdest_frame (NULL),
m_inp_heap_ptr (NULL),
m_phdr_pmem_ptr(NULL),
m_heap_inp_bm_count (0),
codec_type_parse ((codec_type)0),
first_frame_meta (true),
frame_count (0),
nal_count (0),
nal_length(0),
look_ahead_nal (false),
first_frame(0),
first_buffer(NULL),
first_frame_size (0),
m_device_file_ptr(NULL),
m_vc1_profile((vc1_profile_type)0),
h264_last_au_ts(LLONG_MAX),
h264_last_au_flags(0),
m_disp_hor_size(0),
m_disp_vert_size(0),
prev_ts(LLONG_MAX),
prev_ts_actual(LLONG_MAX),
rst_prev_ts(true),
frm_int(0),
m_fps_received(0),
m_fps_prev(0),
m_drc_enable(0),
in_reconfig(false),
m_display_id(NULL),
client_extradata(0),
m_reject_avc_1080p_mp (0),
#ifdef _ANDROID_
m_enable_android_native_buffers(OMX_FALSE),
m_use_android_native_buffers(OMX_FALSE),
#endif
m_desc_buffer_ptr(NULL),
secure_mode(false),
allocate_native_handle(false),
m_other_extradata(NULL),
m_profile(0),
m_need_turbo(0),
client_set_fps(false),
stereo_output_mode(HAL_NO_3D),
m_last_rendered_TS(-1),
m_queued_codec_config_count(0),
current_perf_level(V4L2_CID_MPEG_VIDC_PERF_LEVEL_NOMINAL),
secure_scaling_to_non_secure_opb(false),
m_force_compressed_for_dpb(true),
m_is_display_session(false),
m_buffer_error(false)
{
m_pipe_in = -1;
m_pipe_out = -1;
m_poll_efd = -1;
drv_ctx.video_driver_fd = -1;
drv_ctx.extradata_info.ion.fd_ion_data.fd = -1;
/* Assumption is that , to begin with , we have all the frames with decoder */
DEBUG_PRINT_HIGH("In %u bit OMX vdec Constructor", (unsigned int)sizeof(long) * 8);
memset(&m_debug,0,sizeof(m_debug));
#ifdef _ANDROID_
char property_value[PROPERTY_VALUE_MAX] = {0};
property_get("vendor.vidc.debug.level", property_value, "1");
debug_level = strtoul(property_value, NULL, 16);
property_value[0] = '\0';
DEBUG_PRINT_HIGH("In OMX vdec Constructor");
property_get("vendor.vidc.dec.debug.perf", property_value, "0");
perf_flag = atoi(property_value);
if (perf_flag) {
DEBUG_PRINT_HIGH("vidc.dec.debug.perf is %d", perf_flag);
dec_time.start();
}
proc_frms = latency = 0;
prev_n_filled_len = 0;
property_value[0] = '\0';
property_get("vendor.vidc.dec.debug.ts", property_value, "0");
m_debug_timestamp = atoi(property_value);
DEBUG_PRINT_HIGH("vidc.dec.debug.ts value is %d",m_debug_timestamp);
if (m_debug_timestamp) {
time_stamp_dts.set_timestamp_reorder_mode(true);
time_stamp_dts.enable_debug_print(true);
}
property_value[0] = '\0';
property_get("vendor.vidc.dec.debug.concealedmb", property_value, "0");
m_debug_concealedmb = atoi(property_value);
DEBUG_PRINT_HIGH("vidc.dec.debug.concealedmb value is %d",m_debug_concealedmb);
property_value[0] = '\0';
property_get("vendor.vidc.dec.profile.check", property_value, "0");
m_reject_avc_1080p_mp = atoi(property_value);
DEBUG_PRINT_HIGH("vendor.vidc.dec.profile.check value is %d",m_reject_avc_1080p_mp);
property_value[0] = '\0';
property_get("vendor.vidc.dec.log.in", property_value, "0");
m_debug.in_buffer_log = atoi(property_value);
property_value[0] = '\0';
property_get("vendor.vidc.dec.log.out", property_value, "0");
m_debug.out_buffer_log = atoi(property_value);
snprintf(m_debug.log_loc, PROPERTY_VALUE_MAX, "%s", BUFFER_LOG_LOC);
property_value[0] = '\0';
property_get("vendor.vidc.dec.meta.log.out", property_value, "0");
m_debug.out_meta_buffer_log = atoi(property_value);
snprintf(m_debug.log_loc, PROPERTY_VALUE_MAX, "%s", BUFFER_LOG_LOC);
property_value[0] = '\0';
property_get("vendor.vidc.log.loc", property_value, "");
if (*property_value)
strlcpy(m_debug.log_loc, property_value, PROPERTY_VALUE_MAX);
property_value[0] = '\0';
property_get("vendor.vidc.dec.120fps.enabled", property_value, "0");
//if this feature is not enabled then reset this value -ve
if(atoi(property_value)) {
DEBUG_PRINT_LOW("feature 120 FPS decode enabled");
m_last_rendered_TS = 0;
}
property_value[0] = '\0';
property_get("vendor.vidc.dec.debug.dyn.disabled", property_value, "0");
m_disable_dynamic_buf_mode = atoi(property_value);
DEBUG_PRINT_HIGH("vidc.dec.debug.dyn.disabled value is %d",m_disable_dynamic_buf_mode);
property_value[0] = '\0';
property_get("vendor.vidc.dec.drc.enable", property_value, "0");
if (atoi(property_value)) {
m_drc_enable = true;
DEBUG_PRINT_HIGH("DRC enabled");
}
#ifdef _UBWC_
property_value[0] = '\0';
property_get("debug.gralloc.gfx_ubwc_disable", property_value, "0");
m_disable_ubwc_mode = atoi(property_value);
DEBUG_PRINT_HIGH("UBWC mode is %s", m_disable_ubwc_mode ? "disabled" : "enabled");
#else
m_disable_ubwc_mode = true;
#endif
#endif
memset(&m_cmp,0,sizeof(m_cmp));
memset(&m_cb,0,sizeof(m_cb));
memset (&drv_ctx,0,sizeof(drv_ctx));
memset (&h264_scratch,0,sizeof (OMX_BUFFERHEADERTYPE));
memset (m_hwdevice_name,0,sizeof(m_hwdevice_name));
memset(m_demux_offsets, 0, ( sizeof(OMX_U32) * 8192) );
memset(&m_custom_buffersize, 0, sizeof(m_custom_buffersize));
memset(&m_client_color_space, 0, sizeof(DescribeColorAspectsParams));
memset(&m_internal_color_space, 0, sizeof(DescribeColorAspectsParams));
memset(&m_client_hdr_info, 0, sizeof(DescribeHDRStaticInfoParams));
memset(&m_internal_hdr_info, 0, sizeof(DescribeHDRStaticInfoParams));
memset(&m_color_mdata, 0, sizeof(ColorMetaData));
m_demux_entries = 0;
msg_thread_id = 0;
async_thread_id = 0;
msg_thread_created = false;
async_thread_created = false;
async_thread_force_stop = false;
message_thread_stop = false;
#ifdef _ANDROID_ICS_
memset(&native_buffer, 0 ,(sizeof(struct nativebuffer) * MAX_NUM_INPUT_OUTPUT_BUFFERS));
#endif
memset(&drv_ctx.extradata_info, 0, sizeof(drv_ctx.extradata_info));
/* invalidate m_frame_pack_arrangement */
memset(&m_frame_pack_arrangement, 0, sizeof(OMX_QCOM_FRAME_PACK_ARRANGEMENT));
m_frame_pack_arrangement.cancel_flag = 1;
drv_ctx.timestamp_adjust = false;
m_vendor_config.pData = NULL;
pthread_mutex_init(&m_lock, NULL);
pthread_mutex_init(&c_lock, NULL);
pthread_mutex_init(&buf_lock, NULL);
sem_init(&m_cmd_lock,0,0);
sem_init(&m_safe_flush, 0, 0);
streaming[CAPTURE_PORT] =
streaming[OUTPUT_PORT] = false;
#ifdef _ANDROID_
char extradata_value[PROPERTY_VALUE_MAX] = {0};
property_get("vendor.vidc.dec.debug.extradata", extradata_value, "0");
m_debug_extradata = atoi(extradata_value);
DEBUG_PRINT_HIGH("vendor.vidc.dec.debug.extradata value is %d",m_debug_extradata);
#endif
m_fill_output_msg = OMX_COMPONENT_GENERATE_FTB;
client_buffers.set_vdec_client(this);
dynamic_buf_mode = false;
out_dynamic_list = NULL;
is_down_scalar_enabled = false;
m_enable_downscalar = 0;
m_downscalar_width = 0;
m_downscalar_height = 0;
m_force_down_scalar = 0;
m_reconfig_height = 0;
m_reconfig_width = 0;
m_smoothstreaming_mode = false;
m_smoothstreaming_width = 0;
m_smoothstreaming_height = 0;
m_decode_order_mode = false;
is_q6_platform = false;
m_perf_control.send_hint_to_mpctl(true);
m_input_pass_buffer_fd = false;
memset(&m_extradata_info, 0, sizeof(m_extradata_info));
m_client_color_space.nPortIndex = (OMX_U32)OMX_CORE_INPUT_PORT_INDEX;
m_client_color_space.sAspects.mRange = ColorAspects::RangeUnspecified;
m_client_color_space.sAspects.mPrimaries = ColorAspects::PrimariesUnspecified;
m_client_color_space.sAspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
m_client_color_space.sAspects.mTransfer = ColorAspects::TransferUnspecified;
m_internal_color_space.nPortIndex = (OMX_U32)OMX_CORE_OUTPUT_PORT_INDEX;
m_internal_color_space.sAspects.mRange = ColorAspects::RangeUnspecified;
m_internal_color_space.sAspects.mPrimaries = ColorAspects::PrimariesUnspecified;
m_internal_color_space.sAspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
m_internal_color_space.sAspects.mTransfer = ColorAspects::TransferUnspecified;
m_internal_color_space.nSize = sizeof(DescribeColorAspectsParams);
m_client_hdr_info.nPortIndex = (OMX_U32)OMX_CORE_INPUT_PORT_INDEX;
m_internal_hdr_info.nPortIndex = (OMX_U32)OMX_CORE_OUTPUT_PORT_INDEX;
m_change_client_hdr_info = false;
pthread_mutex_init(&m_hdr_info_client_lock, NULL);
char dither_value[PROPERTY_VALUE_MAX] = {0};
property_get("vendor.vidc.dec.dither", dither_value, "0");
if ((atoi(dither_value) > DITHER_ALL_COLORSPACE) ||
(atoi(dither_value) < DITHER_DISABLE)) {
m_dither_config = DITHER_ALL_COLORSPACE;
} else {
m_dither_config = is_platform_tp10capture_supported() ? (dither_type)atoi(dither_value) : DITHER_ALL_COLORSPACE;
}
DEBUG_PRINT_HIGH("Dither config is %d", m_dither_config);
m_color_space = EXCEPT_BT2020;
}
static const int event_type[] = {
V4L2_EVENT_MSM_VIDC_FLUSH_DONE,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_SUFFICIENT,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_BITDEPTH_CHANGED_INSUFFICIENT,
V4L2_EVENT_MSM_VIDC_RELEASE_BUFFER_REFERENCE,
V4L2_EVENT_MSM_VIDC_RELEASE_UNQUEUED_BUFFER,
V4L2_EVENT_MSM_VIDC_SYS_ERROR,
V4L2_EVENT_MSM_VIDC_HW_OVERLOAD,
V4L2_EVENT_MSM_VIDC_HW_UNSUPPORTED
};
static OMX_ERRORTYPE subscribe_to_events(int fd)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_event_subscription sub;
int array_sz = sizeof(event_type)/sizeof(int);
int i,rc;
if (fd < 0) {
DEBUG_PRINT_ERROR("Invalid input: %d", fd);
return OMX_ErrorBadParameter;
}
for (i = 0; i < array_sz; ++i) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
if (rc) {
DEBUG_PRINT_ERROR("Failed to subscribe event: 0x%x", sub.type);
break;
}
}
if (i < array_sz) {
for (--i; i >=0 ; i--) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
if (rc)
DEBUG_PRINT_ERROR("Failed to unsubscribe event: 0x%x", sub.type);
}
eRet = OMX_ErrorNotImplemented;
}
return eRet;
}
static OMX_ERRORTYPE unsubscribe_to_events(int fd)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_event_subscription sub;
int array_sz = sizeof(event_type)/sizeof(int);
int i,rc;
if (fd < 0) {
DEBUG_PRINT_ERROR("Invalid input: %d", fd);
return OMX_ErrorBadParameter;
}
for (i = 0; i < array_sz; ++i) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
if (rc) {
DEBUG_PRINT_ERROR("Failed to unsubscribe event: 0x%x", sub.type);
break;
}
}
return eRet;
}
/* ======================================================================
FUNCTION
omx_vdec::~omx_vdec
DESCRIPTION
Destructor
PARAMETERS
None
RETURN VALUE
None.
========================================================================== */
omx_vdec::~omx_vdec()
{
m_pmem_info = NULL;
DEBUG_PRINT_HIGH("In OMX vdec Destructor");
if (msg_thread_created) {
DEBUG_PRINT_HIGH("Signalling close to OMX Msg Thread");
message_thread_stop = true;
post_message(this, OMX_COMPONENT_CLOSE_MSG);
DEBUG_PRINT_HIGH("Waiting on OMX Msg Thread exit");
pthread_join(msg_thread_id,NULL);
}
close(m_pipe_in);
close(m_pipe_out);
m_pipe_in = -1;
m_pipe_out = -1;
DEBUG_PRINT_HIGH("Waiting on OMX Async Thread exit");
if(eventfd_write(m_poll_efd, 1)) {
DEBUG_PRINT_ERROR("eventfd_write failed for fd: %d, errno = %d, force stop async_thread", m_poll_efd, errno);
async_thread_force_stop = true;
}
if (async_thread_created)
pthread_join(async_thread_id,NULL);
unsubscribe_to_events(drv_ctx.video_driver_fd);
close(m_poll_efd);
close(drv_ctx.video_driver_fd);
pthread_mutex_destroy(&m_lock);
pthread_mutex_destroy(&c_lock);
pthread_mutex_destroy(&buf_lock);
sem_destroy(&m_cmd_lock);
pthread_mutex_destroy(&m_hdr_info_client_lock);
if (perf_flag) {
DEBUG_PRINT_HIGH("--> TOTAL PROCESSING TIME");
dec_time.end();
}
DEBUG_PRINT_INFO("Exit OMX vdec Destructor: fd=%d",drv_ctx.video_driver_fd);
m_perf_control.send_hint_to_mpctl(false);
}
int release_buffers(omx_vdec* obj, enum vdec_buffer buffer_type)
{
struct v4l2_requestbuffers bufreq;
int rc = 0;
if (buffer_type == VDEC_BUFFER_TYPE_OUTPUT) {
bufreq.memory = V4L2_MEMORY_USERPTR;
bufreq.count = 0;
bufreq.type=V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
rc = ioctl(obj->drv_ctx.video_driver_fd,VIDIOC_REQBUFS, &bufreq);
} else if(buffer_type == VDEC_BUFFER_TYPE_INPUT) {
bufreq.memory = V4L2_MEMORY_USERPTR;
bufreq.count = 0;
bufreq.type=V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
rc = ioctl(obj->drv_ctx.video_driver_fd,VIDIOC_REQBUFS, &bufreq);
}
return rc;
}
OMX_ERRORTYPE omx_vdec::set_dpb(bool is_split_mode, int dpb_color_format)
{
int rc = 0;
struct v4l2_ext_control ctrl[2];
struct v4l2_ext_controls controls;
DEBUG_PRINT_HIGH("DPB mode: %s DPB color format: %s OPB color format: %s",
is_split_mode ? "split" : "combined",
dpb_color_format == V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_UBWC ? "nv12_ubwc":
dpb_color_format == V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_TP10_UBWC ? "nv12_10bit_ubwc":
dpb_color_format == V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE ? "same as opb":
"unknown",
capture_capability == V4L2_PIX_FMT_NV12 ? "nv12":
capture_capability == V4L2_PIX_FMT_NV12_UBWC ? "nv12_ubwc":
capture_capability == V4L2_PIX_FMT_NV12_TP10_UBWC ? "nv12_10bit_ubwc":
"unknown");
ctrl[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE;
if (is_split_mode) {
ctrl[0].value = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY;
} else {
ctrl[0].value = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_PRIMARY;
}
ctrl[1].id = V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT;
ctrl[1].value = dpb_color_format;
controls.count = 2;
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
controls.controls = ctrl;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_EXT_CTRLS, &controls);
if (rc) {
DEBUG_PRINT_ERROR("Failed to set ext ctrls for opb_dpb: %d\n", rc);
return OMX_ErrorUnsupportedSetting;
}
return OMX_ErrorNone;
}
OMX_ERRORTYPE omx_vdec::decide_dpb_buffer_mode(bool split_opb_dpb_with_same_color_fmt)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_format fmt;
int rc = 0;
bool cpu_access = (capture_capability != V4L2_PIX_FMT_NV12_UBWC) &&
capture_capability != V4L2_PIX_FMT_NV12_TP10_UBWC;
bool tp10_enable = !drv_ctx.idr_only_decoding &&
dpb_bit_depth == MSM_VIDC_BIT_DEPTH_10;
bool dither_enable = true;
switch (m_dither_config) {
case DITHER_DISABLE:
dither_enable = false;
break;
case DITHER_COLORSPACE_EXCEPTBT2020:
dither_enable = (m_color_space == EXCEPT_BT2020);
break;
case DITHER_ALL_COLORSPACE:
dither_enable = true;
break;
default:
DEBUG_PRINT_ERROR("Unsupported dither configuration:%d", m_dither_config);
}
if (tp10_enable && !dither_enable) {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12_TP10_UBWC;
capture_capability = V4L2_PIX_FMT_NV12_TP10_UBWC;
cpu_access = false;
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed get format on capture mplane", __func__);
return OMX_ErrorUnsupportedSetting;
}
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed set format on capture mplane", __func__);
return OMX_ErrorUnsupportedSetting;
}
}
if (!BITMASK_PRESENT(&m_flags ,OMX_COMPONENT_IDLE_PENDING) &&
!BITMASK_PRESENT(&m_flags, OMX_COMPONENT_OUTPUT_ENABLE_PENDING)) {
DEBUG_PRINT_LOW("Invalid state to decide on dpb-opb split");
return eRet;
}
if (cpu_access) {
if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_8) {
/* Disabled split mode for VP9. In split mode the DPB buffers are part of the internal
* scratch buffers and the driver does not does the reference buffer management for
* scratch buffers. In case of VP9 with spatial scalability, when a sequence changed
* event is received with the new resolution, and when a flush is sent by the driver, it
* releases all the references of internal scratch buffers. However as per the VP9
* spatial scalability, even after the flush, the buffers which have not yet received
* release reference event should not be unmapped and freed. Currently in driver,
* reference buffer management of the internal scratch buffer is not implemented
* and hence the DPB buffers get unmapped. For other codecs it does not matter
* as with the new SPS/PPS, the DPB is flushed.
*/
bool is_not_vp9 = eCompressionFormat != OMX_VIDEO_CodingVP9;
bool eligible_for_split_dpb_ubwc =
m_progressive == MSM_VIDC_PIC_STRUCT_PROGRESSIVE && //@ Due to Venus limitation for Interlaced, Split mode enabled only for Progressive.
is_not_vp9 && //@ Split mode disabled for VP9.
!drv_ctx.idr_only_decoding && //@ Split mode disabled for Thumbnail usecase.
!m_disable_split_mode; //@ Set prop to disable split mode
//Since opb is linear, dpb should also be linear.
if (split_opb_dpb_with_same_color_fmt) {
eligible_for_split_dpb_ubwc = false;
}
if (eligible_for_split_dpb_ubwc) {
//split DPB-OPB
//DPB -> UBWC , OPB -> Linear
eRet = set_dpb(true, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_UBWC);
} else if (split_opb_dpb_with_same_color_fmt) {
//DPB -> Linear, OPB -> Linear
eRet = set_dpb(true, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE);
} else {
//DPB-OPB combined linear
eRet = set_dpb(false, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE);
}
} else if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_10) {
//split DPB-OPB
//DPB -> UBWC, OPB -> Linear
eRet = set_dpb(true, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_TP10_UBWC);
}
} else { //no cpu access
if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_8) {
if (split_opb_dpb_with_same_color_fmt) {
//split DPB-OPB
//DPB -> UBWC, OPB -> UBWC
eRet = set_dpb(true, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_UBWC);
} else {
//DPB-OPB combined UBWC
eRet = set_dpb(false, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE);
}
} else if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_10) {
if (dither_enable) {
//split DPB-OPB
//DPB -> TP10UBWC, OPB -> UBWC
eRet = set_dpb(true, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_TP10_UBWC);
} else {
//combined DPB-OPB
//DPB -> TP10UBWC, OPB -> TP10UBWC
eRet = set_dpb(false, V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_TP10_UBWC);
}
}
}
if (eRet) {
DEBUG_PRINT_HIGH("Failed to set DPB buffer mode: %d", eRet);
}
return eRet;
}
int omx_vdec::enable_downscalar()
{
int rc = 0;
struct v4l2_control control;
struct v4l2_format fmt;
if (is_down_scalar_enabled) {
DEBUG_PRINT_LOW("%s: already enabled", __func__);
return 0;
}
DEBUG_PRINT_LOW("omx_vdec::enable_downscalar");
rc = decide_dpb_buffer_mode(true);
if (rc) {
DEBUG_PRINT_ERROR("%s: decide_dpb_buffer_mode Failed ", __func__);
return rc;
}
is_down_scalar_enabled = true;
memset(&control, 0x0, sizeof(struct v4l2_control));
control.id = V4L2_CID_MPEG_VIDC_VIDEO_KEEP_ASPECT_RATIO;
control.value = 1;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_CTRL, &control);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed to set VIDEO_KEEP_ASPECT_RATIO", __func__);
return rc;
}
return 0;
}
int omx_vdec::disable_downscalar()
{
int rc = 0;
struct v4l2_control control;
if (!is_down_scalar_enabled) {
DEBUG_PRINT_LOW("omx_vdec::disable_downscalar: already disabled");
return 0;
}
rc = decide_dpb_buffer_mode(false);
if (rc < 0) {
DEBUG_PRINT_ERROR("%s:decide_dpb_buffer_mode failed\n", __func__);
return rc;
}
is_down_scalar_enabled = false;
return rc;
}
int omx_vdec::decide_downscalar()
{
int rc = 0;
struct v4l2_format fmt;
enum color_fmts color_format;
OMX_U32 width, height;
OMX_BOOL isPortraitVideo = OMX_FALSE;
if (capture_capability == V4L2_PIX_FMT_NV12_TP10_UBWC) {
rc = disable_downscalar();
if (rc) {
DEBUG_PRINT_ERROR("Disable downscalar failed!");
return rc;
}
return 0;
}
if (!m_enable_downscalar) {
DEBUG_PRINT_LOW("%s: downscalar not supported", __func__);
return 0;
}
#ifdef _QUERY_DISP_RES_
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: Failed to get format on capture mplane", __func__);
return rc;
}
isPortraitVideo = fmt.fmt.pix_mp.width < fmt.fmt.pix_mp.height ? OMX_TRUE : OMX_FALSE;
if (!m_downscalar_width || !m_downscalar_height) {
qdutils::DisplayAttributes dpa = {}, dsa = {}, dva = {};
int prim_config, ext_config, virt_config;
prim_config = qdutils::getActiveConfig(qdutils::DISPLAY_PRIMARY);
dpa = qdutils::getDisplayAttributes(prim_config, qdutils::DISPLAY_PRIMARY);
DEBUG_PRINT_HIGH("%s: Primary dpa.xres = %d dpa.yres=%d dpa.xdpi = %f dpa.ydpi = %f ",
__func__, dpa.xres, dpa.yres, dpa.xdpi, dpa.ydpi);
ext_config = qdutils::getActiveConfig(qdutils::DISPLAY_EXTERNAL);
dsa = qdutils::getDisplayAttributes(ext_config, qdutils::DISPLAY_EXTERNAL);
DEBUG_PRINT_HIGH("%s: HDMI dsa.xres = %d dsa.yres = %d dsa.xdpi = %f dsa.ydpi = %f ",
__func__, dsa.xres, dsa.yres, dsa.xdpi, dsa.ydpi);
virt_config = qdutils::getActiveConfig(qdutils::DISPLAY_VIRTUAL);
dva = qdutils::getDisplayAttributes(virt_config, qdutils::DISPLAY_VIRTUAL);
DEBUG_PRINT_HIGH("%s: Virtual dva.xres = %d dva.yres = %d dva.xdpi = %f dva.ydpi = %f ",
__func__, dva.xres, dva.yres, dva.xdpi, dva.ydpi);
/* Below logic takes care of following conditions:
* 1. Choose display resolution as maximum resolution of all the connected
* displays (secondary, primary, virtual), so that we do not downscale
* unnecessarily which might be supported on one of the display losing quality.
* 2. Displays connected might be in landscape or portrait mode, so the xres might
* be smaller or greater than the yres. So we first take the max of the two
* in width and min of two in height and then rotate it if below point is true.
* 3. Video might also be in portrait mode, so invert the downscalar width and
* height for such cases.
*/
if (dsa.xres * dsa.yres > dpa.xres * dpa.yres) {
m_downscalar_width = MAX(dsa.xres, dsa.yres);
m_downscalar_height = MIN(dsa.xres, dsa.yres);
} else if (dva.xres * dva.yres > dpa.xres * dpa.yres) {
m_downscalar_width = MAX(dva.xres, dva.yres);
m_downscalar_height = MIN(dva.xres, dva.yres);
} else {
m_downscalar_width = MAX(dpa.xres, dpa.yres);
m_downscalar_height = MIN(dpa.xres, dpa.yres);
}
if (isPortraitVideo) {
// Swap width and height
m_downscalar_width = m_downscalar_width ^ m_downscalar_height;
m_downscalar_height = m_downscalar_width ^ m_downscalar_height;
m_downscalar_width = m_downscalar_width ^ m_downscalar_height;
}
}
m_downscalar_width = ALIGN(m_downscalar_width, 128);
m_downscalar_height = ALIGN(m_downscalar_height, 32);
#endif
if (!m_downscalar_width || !m_downscalar_height) {
DEBUG_PRINT_LOW("%s: Invalid downscalar configuration", __func__);
return 0;
}
if (m_force_down_scalar) {
DEBUG_PRINT_LOW("%s: m_force_down_scalar %d ", __func__, m_force_down_scalar);
return 0;
}
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: Failed to get format on capture mplane", __func__);
return rc;
}
height = fmt.fmt.pix_mp.height;
width = fmt.fmt.pix_mp.width;
DEBUG_PRINT_HIGH("%s: driver wxh = %dx%d, downscalar wxh = %dx%d m_is_display_session = %d", __func__,
fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, m_downscalar_width, m_downscalar_height, m_is_display_session);
if ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height > m_downscalar_width * m_downscalar_height) &&
m_is_display_session) {
rc = enable_downscalar();
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: enable_downscalar failed\n", __func__);
return rc;
}
width = m_downscalar_width > fmt.fmt.pix_mp.width ?
fmt.fmt.pix_mp.width : m_downscalar_width;
height = m_downscalar_height > fmt.fmt.pix_mp.height ?
fmt.fmt.pix_mp.height : m_downscalar_height;
switch (capture_capability) {
case V4L2_PIX_FMT_NV12:
color_format = COLOR_FMT_NV12;
break;
case V4L2_PIX_FMT_NV12_UBWC:
color_format = COLOR_FMT_NV12_UBWC;
break;
case V4L2_PIX_FMT_NV12_TP10_UBWC:
color_format = COLOR_FMT_NV12_BPP10_UBWC;
break;
default:
DEBUG_PRINT_ERROR("Color format not recognized\n");
rc = OMX_ErrorUndefined;
return rc;
}
} else {
rc = disable_downscalar();
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: disable_downscalar failed\n", __func__);
return rc;
}
}
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.height = height;
fmt.fmt.pix_mp.width = width;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed set format on capture mplane", __func__);
return rc;
}
rc = get_buffer_req(&drv_ctx.op_buf);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed to get output buffer requirements", __func__);
return rc;
}
return rc;
}
/* ======================================================================
FUNCTION
omx_vdec::OMXCntrlProcessMsgCb
DESCRIPTION
IL Client callbacks are generated through this routine. The decoder
provides the thread context for this routine.
PARAMETERS
ctxt -- Context information related to the self.
id -- Event identifier. This could be any of the following:
1. Command completion event
2. Buffer done callback event
3. Frame done callback event
RETURN VALUE
None.
========================================================================== */
void omx_vdec::process_event_cb(void *ctxt, unsigned char id)
{
unsigned long p1; // Parameter - 1
unsigned long p2; // Parameter - 2
unsigned long ident;
unsigned qsize=0; // qsize
omx_vdec *pThis = (omx_vdec *) ctxt;
if (!pThis) {
DEBUG_PRINT_ERROR("ERROR: %s()::Context is incorrect, bailing out",
__func__);
return;
}
// Protect the shared queue data structure
do {
/*Read the message id's from the queue*/
pthread_mutex_lock(&pThis->m_lock);
qsize = pThis->m_cmd_q.m_size;
if (qsize) {
pThis->m_cmd_q.pop_entry(&p1, &p2, &ident);
}
if (qsize == 0 && pThis->m_state != OMX_StatePause) {
qsize = pThis->m_ftb_q.m_size;
if (qsize) {
pThis->m_ftb_q.pop_entry(&p1, &p2, &ident);
}
}
if (qsize == 0 && pThis->m_state != OMX_StatePause) {
qsize = pThis->m_etb_q.m_size;
if (qsize) {
pThis->m_etb_q.pop_entry(&p1, &p2, &ident);
}
}
pthread_mutex_unlock(&pThis->m_lock);
/*process message if we have one*/
if (qsize > 0) {
id = ident;
switch (id) {
case OMX_COMPONENT_GENERATE_EVENT:
if (pThis->m_cb.EventHandler) {
switch (p1) {
case OMX_CommandStateSet:
pThis->m_state = (OMX_STATETYPE) p2;
DEBUG_PRINT_HIGH("OMX_CommandStateSet complete, m_state = %d",
pThis->m_state);
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL);
break;
case OMX_EventError:
if (p2 == OMX_StateInvalid) {
DEBUG_PRINT_ERROR("OMX_EventError: p2 is OMX_StateInvalid");
pThis->m_state = (OMX_STATETYPE) p2;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventError, OMX_ErrorInvalidState, p2, NULL);
} else if (p2 == (unsigned long)OMX_ErrorHardware) {
pThis->omx_report_error();
} else {
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventError, p2, (OMX_U32)NULL, NULL );
}
break;
case OMX_CommandPortDisable:
DEBUG_PRINT_HIGH("OMX_CommandPortDisable complete for port [%lu]", p2);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING)) {
BITMASK_SET(&pThis->m_flags, OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED);
break;
}
if (p2 == OMX_CORE_OUTPUT_PORT_INDEX) {
OMX_ERRORTYPE eRet = OMX_ErrorNone;
pThis->stream_off(OMX_CORE_OUTPUT_PORT_INDEX);
if (release_buffers(pThis, VDEC_BUFFER_TYPE_OUTPUT))
DEBUG_PRINT_HIGH("Failed to release output buffers");
OMX_ERRORTYPE eRet1 = pThis->get_buffer_req(&pThis->drv_ctx.op_buf);
if (eRet != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("set_buffer_req failed eRet = %d",eRet);
pThis->omx_report_error();
break;
}
}
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL );
break;
case OMX_CommandPortEnable:
DEBUG_PRINT_HIGH("OMX_CommandPortEnable complete for port [%lu]", p2);
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,\
OMX_EventCmdComplete, p1, p2, NULL );
pThis->in_reconfig = false;
break;
default:
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL );
break;
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_ETB_ARBITRARY:
if (pThis->empty_this_buffer_proxy_arbitrary((OMX_HANDLETYPE)p1,\
(OMX_BUFFERHEADERTYPE *)(intptr_t)p2) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy_arbitrary failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_ETB: {
OMX_ERRORTYPE iret;
iret = pThis->empty_this_buffer_proxy((OMX_HANDLETYPE)p1, (OMX_BUFFERHEADERTYPE *)p2);
if (iret == OMX_ErrorInsufficientResources) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy failure due to HW overload");
pThis->omx_report_hw_overload ();
} else if (iret != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy failure");
pThis->omx_report_error ();
}
}
break;
case OMX_COMPONENT_GENERATE_FTB:
if ( pThis->fill_this_buffer_proxy((OMX_HANDLETYPE)(intptr_t)p1,\
(OMX_BUFFERHEADERTYPE *)(intptr_t)p2) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("fill_this_buffer_proxy failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_COMMAND:
pThis->send_command_proxy(&pThis->m_cmp,(OMX_COMMANDTYPE)p1,\
(OMX_U32)p2,(OMX_PTR)NULL);
break;
case OMX_COMPONENT_GENERATE_EBD:
if (p2 != VDEC_S_SUCCESS && p2 != VDEC_S_INPUT_BITSTREAM_ERR) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EBD failure");
pThis->omx_report_error ();
} else {
if (p2 == VDEC_S_INPUT_BITSTREAM_ERR && p1) {
pThis->time_stamp_dts.remove_time_stamp(
((OMX_BUFFERHEADERTYPE *)(intptr_t)p1)->nTimeStamp,
(pThis->drv_ctx.interlace != VDEC_InterlaceFrameProgressive)
?true:false);
}
if ( pThis->empty_buffer_done(&pThis->m_cmp,
(OMX_BUFFERHEADERTYPE *)(intptr_t)p1) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_buffer_done failure");
pThis->omx_report_error ();
}
}
break;
case OMX_COMPONENT_GENERATE_INFO_FIELD_DROPPED: {
int64_t *timestamp = (int64_t *)(intptr_t)p1;
if (p1) {
pThis->time_stamp_dts.remove_time_stamp(*timestamp,
(pThis->drv_ctx.interlace != VDEC_InterlaceFrameProgressive)
?true:false);
free(timestamp);
}
}
break;
case OMX_COMPONENT_GENERATE_FBD:
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_FBD failure");
pThis->omx_report_error ();
} else if ( pThis->fill_buffer_done(&pThis->m_cmp,
(OMX_BUFFERHEADERTYPE *)(intptr_t)p1) != OMX_ErrorNone ) {
DEBUG_PRINT_ERROR("fill_buffer_done failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_EVENT_INPUT_FLUSH:
DEBUG_PRINT_HIGH("Driver flush i/p Port complete");
if (!pThis->input_flush_progress) {
DEBUG_PRINT_HIGH("WARNING: Unexpected flush from driver");
} else {
pThis->execute_input_flush();
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EVENT_INPUT_FLUSH failure");
pThis->omx_report_error ();
} else {
/*Check if we need generate event for Flush done*/
pThis->notify_flush_done(ctxt);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_IDLE_PENDING)) {
if (pThis->stream_off(OMX_CORE_INPUT_PORT_INDEX)) {
DEBUG_PRINT_ERROR("Failed to call streamoff on OUTPUT Port");
pThis->omx_report_error ();
} else {
pThis->streaming[OUTPUT_PORT] = false;
}
if (!pThis->output_flush_progress) {
DEBUG_PRINT_LOW("Input flush done hence issue stop");
pThis->post_event ((unsigned int)NULL, VDEC_S_SUCCESS,\
OMX_COMPONENT_GENERATE_STOP_DONE);
}
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
}
break;
case OMX_COMPONENT_GENERATE_EVENT_OUTPUT_FLUSH:
DEBUG_PRINT_HIGH("Driver flush o/p Port complete");
if (!pThis->output_flush_progress) {
DEBUG_PRINT_HIGH("WARNING: Unexpected flush from driver");
} else {
pThis->execute_output_flush();
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EVENT_OUTPUT_FLUSH failed");
pThis->omx_report_error ();
} else {
/*Check if we need generate event for Flush done*/
pThis->notify_flush_done(ctxt);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING)) {
DEBUG_PRINT_LOW("Internal flush complete");
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED)) {
pThis->post_event(OMX_CommandPortDisable,
OMX_CORE_OUTPUT_PORT_INDEX,
OMX_COMPONENT_GENERATE_EVENT);
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED);
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_OUTPUT_DISABLE_PENDING);
}
}
if (BITMASK_PRESENT(&pThis->m_flags ,OMX_COMPONENT_IDLE_PENDING)) {
if (pThis->stream_off(OMX_CORE_OUTPUT_PORT_INDEX)) {
DEBUG_PRINT_ERROR("Failed to call streamoff on CAPTURE Port");
pThis->omx_report_error ();
break;
}
pThis->streaming[CAPTURE_PORT] = false;
if (!pThis->input_flush_progress) {
DEBUG_PRINT_LOW("Output flush done hence issue stop");
pThis->post_event ((unsigned int)NULL, VDEC_S_SUCCESS,\
OMX_COMPONENT_GENERATE_STOP_DONE);
}
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
}
break;
case OMX_COMPONENT_GENERATE_START_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_START_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_START_DONE Failure");
pThis->omx_report_error ();
} else {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_START_DONE Success");
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_EXECUTE_PENDING)) {
DEBUG_PRINT_LOW("Move to executing");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_EXECUTE_PENDING);
pThis->m_state = OMX_StateExecuting;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateExecuting, NULL);
} else if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_PAUSE_PENDING)) {
if (/*ioctl (pThis->drv_ctx.video_driver_fd,
VDEC_IOCTL_CMD_PAUSE,NULL ) < */0) {
DEBUG_PRINT_ERROR("VDEC_IOCTL_CMD_PAUSE failed");
pThis->omx_report_error ();
}
}
}
} else {
DEBUG_PRINT_LOW("Event Handler callback is NULL");
}
break;
case OMX_COMPONENT_GENERATE_PAUSE_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_PAUSE_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_PAUSE_DONE ret failed");
pThis->omx_report_error ();
} else {
pThis->complete_pending_buffer_done_cbs();
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_PAUSE_PENDING)) {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_PAUSE_DONE nofity");
//Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_PAUSE_PENDING);
pThis->m_state = OMX_StatePause;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StatePause, NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_RESUME_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_RESUME_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_RESUME_DONE failed");
pThis->omx_report_error ();
} else {
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_EXECUTE_PENDING)) {
DEBUG_PRINT_LOW("Moving the decoder to execute state");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_EXECUTE_PENDING);
pThis->m_state = OMX_StateExecuting;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateExecuting,NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_STOP_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_STOP_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_STOP_DONE ret failed");
pThis->omx_report_error ();
} else {
pThis->complete_pending_buffer_done_cbs();
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_IDLE_PENDING)) {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_STOP_DONE Success");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_IDLE_PENDING);
pThis->m_state = OMX_StateIdle;
DEBUG_PRINT_LOW("Move to Idle State");
pThis->m_cb.EventHandler(&pThis->m_cmp,pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateIdle,NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_PORT_RECONFIG:
if (p2 == OMX_IndexParamPortDefinition) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_IndexParamPortDefinition");
pThis->in_reconfig = true;
pThis->m_need_turbo &= ~TURBO_MODE_HIGH_FPS;
} else if (p2 == OMX_IndexConfigCommonOutputCrop) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_IndexConfigCommonOutputCrop");
/* Check if resolution is changed in smooth streaming mode */
if (pThis->m_smoothstreaming_mode &&
(pThis->framesize.nWidth !=
pThis->drv_ctx.video_resolution.frame_width) ||
(pThis->framesize.nHeight !=
pThis->drv_ctx.video_resolution.frame_height)) {
DEBUG_PRINT_HIGH("Resolution changed from: wxh = %dx%d to: wxh = %dx%d",
pThis->framesize.nWidth,
pThis->framesize.nHeight,
pThis->drv_ctx.video_resolution.frame_width,
pThis->drv_ctx.video_resolution.frame_height);
/* Update new resolution */
pThis->framesize.nWidth =
pThis->drv_ctx.video_resolution.frame_width;
pThis->framesize.nHeight =
pThis->drv_ctx.video_resolution.frame_height;
/* Update C2D with new resolution */
if (!pThis->client_buffers.update_buffer_req()) {
DEBUG_PRINT_ERROR("Setting C2D buffer requirements failed");
}
}
/* Update new crop information */
pThis->rectangle.nLeft = pThis->drv_ctx.frame_size.left;
pThis->rectangle.nTop = pThis->drv_ctx.frame_size.top;
pThis->rectangle.nWidth = pThis->drv_ctx.frame_size.right;
pThis->rectangle.nHeight = pThis->drv_ctx.frame_size.bottom;
/* Validate the new crop information */
if (pThis->rectangle.nLeft + pThis->rectangle.nWidth >
pThis->drv_ctx.video_resolution.frame_width) {
DEBUG_PRINT_HIGH("Crop L[%u] + R[%u] > W[%u]",
pThis->rectangle.nLeft, pThis->rectangle.nWidth,
pThis->drv_ctx.video_resolution.frame_width);
pThis->rectangle.nLeft = 0;
if (pThis->rectangle.nWidth >
pThis->drv_ctx.video_resolution.frame_width) {
DEBUG_PRINT_HIGH("Crop R[%u] > W[%u]",
pThis->rectangle.nWidth,
pThis->drv_ctx.video_resolution.frame_width);
pThis->rectangle.nWidth =
pThis->drv_ctx.video_resolution.frame_width;
}
}
if (pThis->rectangle.nTop + pThis->rectangle.nHeight >
pThis->drv_ctx.video_resolution.frame_height) {
DEBUG_PRINT_HIGH("Crop T[%u] + B[%u] > H[%u]",
pThis->rectangle.nTop, pThis->rectangle.nHeight,
pThis->drv_ctx.video_resolution.frame_height);
pThis->rectangle.nTop = 0;
if (pThis->rectangle.nHeight >
pThis->drv_ctx.video_resolution.frame_height) {
DEBUG_PRINT_HIGH("Crop B[%u] > H[%u]",
pThis->rectangle.nHeight,
pThis->drv_ctx.video_resolution.frame_height);
pThis->rectangle.nHeight =
pThis->drv_ctx.video_resolution.frame_height;
}
}
DEBUG_PRINT_HIGH("Updated Crop Info: L: %u, T: %u, R: %u, B: %u",
pThis->rectangle.nLeft, pThis->rectangle.nTop,
pThis->rectangle.nWidth, pThis->rectangle.nHeight);
} else if (p2 == OMX_QTIIndexConfigDescribeColorAspects) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_QTIIndexConfigDescribeColorAspects");
} else if (p2 == OMX_QTIIndexConfigDescribeHDRColorInfo) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_QTIIndexConfigDescribeHDRcolorinfo");
} else {
DEBUG_PRINT_ERROR("Rxd Invalid PORT_RECONFIG event (%lu)", p2);
break;
}
if (pThis->m_debug.outfile) {
fclose(pThis->m_debug.outfile);
pThis->m_debug.outfile = NULL;
}
if (pThis->m_debug.out_ymeta_file) {
fclose(pThis->m_debug.out_ymeta_file);
pThis->m_debug.out_ymeta_file = NULL;
}
if (pThis->m_debug.out_uvmeta_file) {
fclose(pThis->m_debug.out_uvmeta_file);
pThis->m_debug.out_uvmeta_file = NULL;
}
if (pThis->secure_mode && pThis->m_cb.EventHandler && pThis->in_reconfig) {
pThis->prefetchNewBuffers();
}
if (pThis->m_cb.EventHandler) {
uint32_t frame_data[7];
frame_data[0] = (p2 == OMX_IndexParamPortDefinition) ?
pThis->m_reconfig_height : pThis->rectangle.nHeight;
frame_data[1] = (p2 == OMX_IndexParamPortDefinition) ?
pThis->m_reconfig_width : pThis->rectangle.nWidth;
frame_data[2] = (p2 == OMX_IndexParamPortDefinition) ?
frame_data[0] : pThis->drv_ctx.video_resolution.frame_height;
frame_data[3] = (p2 == OMX_IndexParamPortDefinition) ?
frame_data[1] : pThis->drv_ctx.video_resolution.frame_width;
frame_data[4] = pThis->dpb_bit_depth;
frame_data[5] = pThis->m_color_space;
frame_data[6] = pThis->m_dither_config;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventPortSettingsChanged, p1, p2, (void*) frame_data );
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_EOS_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_EOS_DONE");
if (pThis->m_cb.EventHandler) {
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data, OMX_EventBufferFlag,
OMX_CORE_OUTPUT_PORT_INDEX, OMX_BUFFERFLAG_EOS, NULL );
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
pThis->prev_ts = LLONG_MAX;
pThis->rst_prev_ts = true;
break;
case OMX_COMPONENT_GENERATE_HARDWARE_ERROR:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_HARDWARE_ERROR");
pThis->omx_report_error();
break;
case OMX_COMPONENT_GENERATE_UNSUPPORTED_SETTING:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_UNSUPPORTED_SETTING");
pThis->omx_report_unsupported_setting();
break;
case OMX_COMPONENT_GENERATE_HARDWARE_OVERLOAD:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_HARDWARE_OVERLOAD");
pThis->omx_report_hw_overload();
break;
default:
break;
}
}
pthread_mutex_lock(&pThis->m_lock);
qsize = pThis->m_cmd_q.m_size;
if (pThis->m_state != OMX_StatePause)
qsize += (pThis->m_ftb_q.m_size + pThis->m_etb_q.m_size);
pthread_mutex_unlock(&pThis->m_lock);
} while (qsize>0);
}
int omx_vdec::update_resolution(int width, int height, int stride, int scan_lines)
{
int format_changed = 0;
if ((height != (int)drv_ctx.video_resolution.frame_height) ||
(width != (int)drv_ctx.video_resolution.frame_width)) {
DEBUG_PRINT_HIGH("NOTE_CIF: W/H %d (%d), %d (%d)",
width, drv_ctx.video_resolution.frame_width,
height,drv_ctx.video_resolution.frame_height);
format_changed = 1;
}
drv_ctx.video_resolution.frame_height = height;
drv_ctx.video_resolution.frame_width = width;
drv_ctx.video_resolution.scan_lines = scan_lines;
drv_ctx.video_resolution.stride = stride;
if (!is_down_scalar_enabled) {
rectangle.nLeft = m_extradata_info.output_crop_rect.nLeft;
rectangle.nTop = m_extradata_info.output_crop_rect.nTop;
rectangle.nWidth = m_extradata_info.output_crop_rect.nWidth;
rectangle.nHeight = m_extradata_info.output_crop_rect.nHeight;
}
return format_changed;
}
OMX_ERRORTYPE omx_vdec::is_video_session_supported()
{
if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.avc",
OMX_MAX_STRINGNAME_SIZE) &&
(m_profile == HIGH_PROFILE || m_profile == MAIN_PROFILE)) {
m_decoder_capability.max_width = 1280;
m_decoder_capability.max_height = 720;
DEBUG_PRINT_HIGH("Set max_width=1280 & max_height=720 for H264 HP/MP");
}
if ((drv_ctx.video_resolution.frame_width *
drv_ctx.video_resolution.frame_height >
m_decoder_capability.max_width *
m_decoder_capability.max_height) ||
(drv_ctx.video_resolution.frame_width*
drv_ctx.video_resolution.frame_height <
m_decoder_capability.min_width *
m_decoder_capability.min_height)) {
DEBUG_PRINT_ERROR(
"Unsupported WxH = (%u)x(%u) supported range is min(%u)x(%u) - max(%u)x(%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height,
m_decoder_capability.min_width,
m_decoder_capability.min_height,
m_decoder_capability.max_width,
m_decoder_capability.max_height);
return OMX_ErrorUnsupportedSetting;
}
DEBUG_PRINT_HIGH("video session supported");
return OMX_ErrorNone;
}
int omx_vdec::log_input_buffers(const char *buffer_addr, int buffer_len)
{
if (m_debug.in_buffer_log && !m_debug.infile) {
if(!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg4", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.m4v",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg2", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.mpg", m_debug.log_loc,
drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.h263", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.263",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.avc", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.mvc", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.264",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.hevc", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.265",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vc1", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.vc1",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.wmv", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.vc1",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.ivf",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.ivf",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.divx",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
}
m_debug.infile = fopen (m_debug.infile_name, "ab");
if (!m_debug.infile) {
DEBUG_PRINT_HIGH("Failed to open input file: %s for logging", m_debug.infile_name);
m_debug.infile_name[0] = '\0';
return -1;
}
if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
struct ivf_file_header {
OMX_U8 signature[4]; //='DKIF';
OMX_U8 version ; //= 0;
OMX_U8 headersize ; //= 32;
OMX_U32 FourCC;
OMX_U8 width;
OMX_U8 height;
OMX_U32 rate;
OMX_U32 scale;
OMX_U32 length;
OMX_U8 unused[4];
} file_header;
memset((void *)&file_header,0,sizeof(file_header));
file_header.signature[0] = 'D';
file_header.signature[1] = 'K';
file_header.signature[2] = 'I';
file_header.signature[3] = 'F';
file_header.version = 0;
file_header.headersize = 32;
switch (drv_ctx.decoder_format) {
case VDEC_CODECTYPE_VP8:
file_header.FourCC = 0x30385056;
break;
case VDEC_CODECTYPE_VP9:
file_header.FourCC = 0x30395056;
break;
default:
DEBUG_PRINT_ERROR("unsupported format for VP8/VP9");
break;
}
fwrite((const char *)&file_header,
sizeof(file_header),1,m_debug.infile);
}
}
if (m_debug.infile && buffer_addr && buffer_len) {
if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
struct vpx_ivf_frame_header {
OMX_U32 framesize;
OMX_U32 timestamp_lo;
OMX_U32 timestamp_hi;
} vpx_frame_header;
vpx_frame_header.framesize = buffer_len;
/* Currently FW doesn't use timestamp values */
vpx_frame_header.timestamp_lo = 0;
vpx_frame_header.timestamp_hi = 0;
fwrite((const char *)&vpx_frame_header,
sizeof(vpx_frame_header),1,m_debug.infile);
}
fwrite(buffer_addr, buffer_len, 1, m_debug.infile);
}
return 0;
}
int omx_vdec::log_output_buffers(OMX_BUFFERHEADERTYPE *buffer) {
int buf_index = 0;
char *temp = NULL;
if (!(m_debug.out_buffer_log || m_debug.out_meta_buffer_log) || !buffer || !buffer->nFilledLen)
return 0;
if (m_debug.out_buffer_log && !m_debug.outfile) {
snprintf(m_debug.outfile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p.yuv",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
m_debug.outfile = fopen (m_debug.outfile_name, "ab");
if (!m_debug.outfile) {
DEBUG_PRINT_HIGH("Failed to open output file: %s for logging", m_debug.log_loc);
m_debug.outfile_name[0] = '\0';
return -1;
}
}
if (m_debug.out_meta_buffer_log && !m_debug.out_ymeta_file && !m_debug.out_uvmeta_file) {
snprintf(m_debug.out_ymetafile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p.ymeta",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
snprintf(m_debug.out_uvmetafile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p.uvmeta",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
m_debug.out_ymeta_file = fopen (m_debug.out_ymetafile_name, "ab");
m_debug.out_uvmeta_file = fopen (m_debug.out_uvmetafile_name, "ab");
if (!m_debug.out_ymeta_file || !m_debug.out_uvmeta_file) {
DEBUG_PRINT_HIGH("Failed to open output y/uv meta file: %s for logging", m_debug.log_loc);
m_debug.out_ymetafile_name[0] = '\0';
m_debug.out_uvmetafile_name[0] = '\0';
return -1;
}
}
buf_index = buffer - m_out_mem_ptr;
temp = (char *)drv_ctx.ptr_outputbuffer[buf_index].bufferaddr;
if (drv_ctx.output_format == VDEC_YUV_FORMAT_NV12_UBWC ||
drv_ctx.output_format == VDEC_YUV_FORMAT_NV12_TP10_UBWC) {
DEBUG_PRINT_HIGH("Logging UBWC yuv width/height(%u/%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height);
if (m_debug.outfile)
fwrite(temp, buffer->nFilledLen, 1, m_debug.outfile);
if (m_debug.out_ymeta_file && m_debug.out_uvmeta_file) {
unsigned int width = 0, height = 0;
unsigned int y_plane, y_meta_plane;
int y_stride = 0, y_sclines = 0;
int y_meta_stride = 0, y_meta_scanlines = 0, uv_meta_stride = 0, uv_meta_scanlines = 0;
int color_fmt = (drv_ctx.output_format== VDEC_YUV_FORMAT_NV12_UBWC)? COLOR_FMT_NV12_UBWC: COLOR_FMT_NV12_BPP10_UBWC;
int i;
int bytes_written = 0;
width = drv_ctx.video_resolution.frame_width;
height = drv_ctx.video_resolution.frame_height;
y_meta_stride = VENUS_Y_META_STRIDE(color_fmt, width);
y_meta_scanlines = VENUS_Y_META_SCANLINES(color_fmt, height);
y_stride = VENUS_Y_STRIDE(color_fmt, width);
y_sclines = VENUS_Y_SCANLINES(color_fmt, height);
uv_meta_stride = VENUS_UV_META_STRIDE(color_fmt, width);
uv_meta_scanlines = VENUS_UV_META_SCANLINES(color_fmt, height);
y_meta_plane = MSM_MEDIA_ALIGN(y_meta_stride * y_meta_scanlines, 4096);
y_plane = MSM_MEDIA_ALIGN(y_stride * y_sclines, 4096);
temp = (char *)drv_ctx.ptr_outputbuffer[buf_index].bufferaddr;
for (i = 0; i < y_meta_scanlines; i++) {
bytes_written = fwrite(temp, y_meta_stride, 1, m_debug.out_ymeta_file);
temp += y_meta_stride;
}
temp = (char *)drv_ctx.ptr_outputbuffer[buf_index].bufferaddr + y_meta_plane + y_plane;
for(i = 0; i < uv_meta_scanlines; i++) {
bytes_written += fwrite(temp, uv_meta_stride, 1, m_debug.out_uvmeta_file);
temp += uv_meta_stride;
}
}
} else if (m_debug.outfile && drv_ctx.output_format == VDEC_YUV_FORMAT_NV12) {
int stride = drv_ctx.video_resolution.stride;
int scanlines = drv_ctx.video_resolution.scan_lines;
if (m_smoothstreaming_mode) {
stride = drv_ctx.video_resolution.frame_width;
scanlines = drv_ctx.video_resolution.frame_height;
stride = (stride + DEFAULT_WIDTH_ALIGNMENT - 1) & (~(DEFAULT_WIDTH_ALIGNMENT - 1));
scanlines = (scanlines + DEFAULT_HEIGHT_ALIGNMENT - 1) & (~(DEFAULT_HEIGHT_ALIGNMENT - 1));
}
unsigned i;
DEBUG_PRINT_HIGH("Logging width/height(%u/%u) stride/scanlines(%u/%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height, stride, scanlines);
int bytes_written = 0;
for (i = 0; i < drv_ctx.video_resolution.frame_height; i++) {
bytes_written = fwrite(temp, drv_ctx.video_resolution.frame_width, 1, m_debug.outfile);
temp += stride;
}
temp = (char *)drv_ctx.ptr_outputbuffer[buf_index].bufferaddr + stride * scanlines;
int stride_c = stride;
for(i = 0; i < drv_ctx.video_resolution.frame_height/2; i++) {
bytes_written += fwrite(temp, drv_ctx.video_resolution.frame_width, 1, m_debug.outfile);
temp += stride_c;
}
}
return 0;
}
/* ======================================================================
FUNCTION
omx_vdec::ComponentInit
DESCRIPTION
Initialize the component.
PARAMETERS
ctxt -- Context information related to the self.
id -- Event identifier. This could be any of the following:
1. Command completion event
2. Buffer done callback event
3. Frame done callback event
RETURN VALUE
None.
========================================================================== */
OMX_ERRORTYPE omx_vdec::component_init(OMX_STRING role)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_fmtdesc fdesc;
struct v4l2_format fmt;
struct v4l2_requestbuffers bufreq;
struct v4l2_control control;
struct v4l2_frmsizeenum frmsize;
unsigned int alignment = 0,buffer_size = 0;
int fds[2];
int r,ret=0;
bool codec_ambiguous = false;
OMX_STRING device_name = (OMX_STRING)"/dev/video32";
char property_value[PROPERTY_VALUE_MAX] = {0};
FILE *soc_file = NULL;
char buffer[10];
#ifdef _ANDROID_
char platform_name[PROPERTY_VALUE_MAX];
property_get("ro.board.platform", platform_name, "0");
if (!strncmp(platform_name, "msm8610", 7)) {
device_name = (OMX_STRING)"/dev/video/q6_dec";
is_q6_platform = true;
maxSmoothStreamingWidth = 1280;
maxSmoothStreamingHeight = 720;
}
#endif
is_thulium_v1 = false;
soc_file = fopen("/sys/devices/soc0/soc_id", "r");
if (soc_file) {
fread(buffer, 1, 4, soc_file);
fclose(soc_file);
if (atoi(buffer) == 246) {
soc_file = fopen("/sys/devices/soc0/revision", "r");
if (soc_file) {
fread(buffer, 1, 4, soc_file);
fclose(soc_file);
if (atoi(buffer) == 1) {
is_thulium_v1 = true;
DEBUG_PRINT_HIGH("is_thulium_v1 = TRUE");
}
}
}
}
#ifdef _ANDROID_
/*
* turn off frame parsing for Android by default.
* Clients may configure OMX_QCOM_FramePacking_Arbitrary to enable this mode
*/
arbitrary_bytes = false;
property_get("vendor.vidc.dec.debug.arbitrarybytes.mode", property_value, "0");
if (atoi(property_value)) {
DEBUG_PRINT_HIGH("arbitrary_bytes mode enabled via property command");
arbitrary_bytes = true;
}
#endif
if (!strncmp(role, "OMX.qcom.video.decoder.avc.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.avc";
} else if (!strncmp(role, "OMX.qcom.video.decoder.mpeg2.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.mpeg2";
} else if (!strncmp(role, "OMX.qcom.video.decoder.hevc.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.hevc";
} else if (!strncmp(role, "OMX.qcom.video.decoder.vc1.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.vc1";
} else if (!strncmp(role, "OMX.qcom.video.decoder.wmv.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.wmv";
} else if (!strncmp(role, "OMX.qcom.video.decoder.mpeg4.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.mpeg4";
} else if (!strncmp(role, "OMX.qcom.video.decoder.vp9.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.vp9";
}
else if (!strncmp(role, "OMX.qcom.video.decoder.vp8.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
arbitrary_bytes = false;
role = (OMX_STRING)"OMX.qcom.video.decoder.vp8";
}
drv_ctx.video_driver_fd = open(device_name, O_RDWR);
DEBUG_PRINT_INFO("component_init: %s : fd=%d", role, drv_ctx.video_driver_fd);
if (drv_ctx.video_driver_fd < 0) {
DEBUG_PRINT_ERROR("Omx_vdec::Comp Init Returning failure, errno %d", errno);
return OMX_ErrorInsufficientResources;
}
drv_ctx.frame_rate.fps_numerator = DEFAULT_FPS;
drv_ctx.frame_rate.fps_denominator = 1;
operating_frame_rate = DEFAULT_FPS;
m_poll_efd = eventfd(0, 0);
if (m_poll_efd < 0) {
DEBUG_PRINT_ERROR("Failed to create event fd(%s)", strerror(errno));
return OMX_ErrorInsufficientResources;
}
ret = subscribe_to_events(drv_ctx.video_driver_fd);
if (!ret) {
async_thread_created = true;
ret = pthread_create(&async_thread_id,0,async_message_thread,this);
}
if (ret) {
DEBUG_PRINT_ERROR("Failed to create async_message_thread");
async_thread_created = false;
return OMX_ErrorInsufficientResources;
}
#ifdef OUTPUT_EXTRADATA_LOG
outputExtradataFile = fopen (output_extradata_filename, "ab");
#endif
// Copy the role information which provides the decoder kind
strlcpy(drv_ctx.kind,role,128);
if (!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg4",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.mpeg4",\
OMX_MAX_STRINGNAME_SIZE);
drv_ctx.timestamp_adjust = true;
drv_ctx.decoder_format = VDEC_CODECTYPE_MPEG4;
eCompressionFormat = OMX_VIDEO_CodingMPEG4;
output_capability=V4L2_PIX_FMT_MPEG4;
/*Initialize Start Code for MPEG4*/
codec_type_parse = CODEC_TYPE_MPEG4;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg2",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.mpeg2",\
OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_MPEG2;
output_capability = V4L2_PIX_FMT_MPEG2;
eCompressionFormat = OMX_VIDEO_CodingMPEG2;
/*Initialize Start Code for MPEG2*/
codec_type_parse = CODEC_TYPE_MPEG2;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.h263",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.h263",OMX_MAX_STRINGNAME_SIZE);
DEBUG_PRINT_LOW("H263 Decoder selected");
drv_ctx.decoder_format = VDEC_CODECTYPE_H263;
eCompressionFormat = OMX_VIDEO_CodingH263;
output_capability = V4L2_PIX_FMT_H263;
codec_type_parse = CODEC_TYPE_H263;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.divx311",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.divx",OMX_MAX_STRINGNAME_SIZE);
DEBUG_PRINT_LOW ("DIVX 311 Decoder selected");
drv_ctx.decoder_format = VDEC_CODECTYPE_DIVX_3;
output_capability = V4L2_PIX_FMT_DIVX_311;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingDivx;
codec_type_parse = CODEC_TYPE_DIVX;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.divx4",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.divx",OMX_MAX_STRINGNAME_SIZE);
DEBUG_PRINT_ERROR ("DIVX 4 Decoder selected");
drv_ctx.decoder_format = VDEC_CODECTYPE_DIVX_4;
output_capability = V4L2_PIX_FMT_DIVX;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingDivx;
codec_type_parse = CODEC_TYPE_DIVX;
codec_ambiguous = true;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.divx",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.divx",OMX_MAX_STRINGNAME_SIZE);
DEBUG_PRINT_ERROR ("DIVX 5/6 Decoder selected");
drv_ctx.decoder_format = VDEC_CODECTYPE_DIVX_6;
output_capability = V4L2_PIX_FMT_DIVX;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingDivx;
codec_type_parse = CODEC_TYPE_DIVX;
codec_ambiguous = true;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.avc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.avc",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_H264;
output_capability=V4L2_PIX_FMT_H264;
eCompressionFormat = OMX_VIDEO_CodingAVC;
codec_type_parse = CODEC_TYPE_H264;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
if (is_thulium_v1) {
arbitrary_bytes = true;
DEBUG_PRINT_HIGH("Enable arbitrary_bytes for h264");
}
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.mvc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.mvc", OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_MVC;
output_capability = V4L2_PIX_FMT_H264_MVC;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingMVC;
codec_type_parse = CODEC_TYPE_H264;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.hevc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.hevc",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_HEVC;
output_capability = V4L2_PIX_FMT_HEVC;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingHevc;
codec_type_parse = CODEC_TYPE_HEVC;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vc1",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vc1",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VC1;
eCompressionFormat = OMX_VIDEO_CodingWMV;
codec_type_parse = CODEC_TYPE_VC1;
output_capability = V4L2_PIX_FMT_VC1_ANNEX_G;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.wmv",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vc1",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VC1_RCV;
eCompressionFormat = OMX_VIDEO_CodingWMV;
codec_type_parse = CODEC_TYPE_VC1;
output_capability = V4L2_PIX_FMT_VC1_ANNEX_L;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", \
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vp8",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VP8;
output_capability = V4L2_PIX_FMT_VP8;
eCompressionFormat = OMX_VIDEO_CodingVP8;
codec_type_parse = CODEC_TYPE_VP8;
arbitrary_bytes = false;
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", \
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vp9",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VP9;
output_capability = V4L2_PIX_FMT_VP9;
eCompressionFormat = OMX_VIDEO_CodingVP9;
codec_type_parse = CODEC_TYPE_VP9;
arbitrary_bytes = false;
} else {
DEBUG_PRINT_ERROR("ERROR:Unknown Component");
eRet = OMX_ErrorInvalidComponentName;
}
if (eRet == OMX_ErrorNone) {
OMX_COLOR_FORMATTYPE dest_color_format;
if (m_disable_ubwc_mode) {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12;
} else {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12_UBWC;
}
if (eCompressionFormat == (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingMVC)
dest_color_format = (OMX_COLOR_FORMATTYPE)
QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mMultiView;
else
dest_color_format = (OMX_COLOR_FORMATTYPE)
QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m;
if (!client_buffers.set_color_format(dest_color_format)) {
DEBUG_PRINT_ERROR("Setting color format failed");
eRet = OMX_ErrorInsufficientResources;