blob: 0e8c326cb4c286a9792467e6fa4b603f79465722 [file] [log] [blame]
/*--------------------------------------------------------------------------
Copyright (c) 2010 - 2019, The Linux Foundation. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of The Linux Foundation nor
the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------*/
/*============================================================================
O p e n M A X w r a p p e r s
O p e n M A X C o r e
This module contains the implementation of the OpenMAX core & component.
*//*========================================================================*/
//////////////////////////////////////////////////////////////////////////////
// Include Files
//////////////////////////////////////////////////////////////////////////////
#define __STDC_FORMAT_MACROS
#include <inttypes.h>
#include <string.h>
#include <pthread.h>
#include <sys/prctl.h>
#include <stdlib.h>
#include <unistd.h>
#include <errno.h>
#include "omx_vdec.h"
#include "vidc_common.h"
#include <fcntl.h>
#include <limits.h>
#include <stdlib.h>
#ifdef HYPERVISOR
#include "hypv_intercept.h"
#endif
#include <media/hardware/HardwareAPI.h>
#include <sys/eventfd.h>
#include "PlatformConfig.h"
#include <linux/dma-buf.h>
#include <linux/videodev2.h>
#if !defined(_ANDROID_) || defined(SYS_IOCTL)
#include <sys/ioctl.h>
#include <sys/mman.h>
#endif
#ifdef _ANDROID_
#include <cutils/properties.h>
#ifdef _QUERY_DISP_RES_
#include "display_config.h"
#endif
#endif
#ifdef _USE_GLIB_
#include <glib.h>
#define strlcpy g_strlcpy
#endif
#include <qdMetaData.h>
#include <gralloc_priv.h>
#ifdef ANDROID_JELLYBEAN_MR2
#include "QComOMXMetadata.h"
#endif
#define BUFFER_LOG_LOC "/data/vendor/media"
#ifdef OUTPUT_EXTRADATA_LOG
FILE *outputExtradataFile;
char output_extradata_filename [] = "/data/vendor/media/extradata";
#endif
#define DEFAULT_FPS 30
#define MAX_SUPPORTED_FPS 240
#define DEFAULT_WIDTH_ALIGNMENT 128
#define DEFAULT_HEIGHT_ALIGNMENT 32
#define POLL_TIMEOUT 0x7fffffff
#ifdef _ANDROID_
extern "C" {
#include<utils/Log.h>
}
#endif//_ANDROID_
#define SZ_4K 0x1000
#define SZ_1M 0x100000
#define PREFETCH_PIXEL_BUFFER_COUNT 16
#define PREFETCH_NON_PIXEL_BUFFER_COUNT 1
#define Log2(number, power) { OMX_U32 temp = number; power = 0; while( (0 == (temp & 0x1)) && power < 16) { temp >>=0x1; power++; } }
#define Q16ToFraction(q,num,den) { OMX_U32 power; Log2(q,power); num = q >> power; den = 0x1 << (16 - power); }
#define EXTRADATA_IDX(__num_planes) ((__num_planes) ? (__num_planes) - 1 : 0)
#undef ALIGN
#define ALIGN(x, to_align) ((((unsigned) x) + (to_align - 1)) & ~(to_align - 1))
#define DEFAULT_EXTRADATA (OMX_INTERLACE_EXTRADATA | OMX_OUTPUTCROP_EXTRADATA \
| OMX_DISPLAY_INFO_EXTRADATA | OMX_UBWC_CR_STATS_INFO_EXTRADATA)
// Y=16(0-9bits), Cb(10-19bits)=Cr(20-29bits)=128, black by default
#define DEFAULT_VIDEO_CONCEAL_COLOR_BLACK 0x8020010
#ifndef ION_FLAG_CP_BITSTREAM
#define ION_FLAG_CP_BITSTREAM 0
#endif
#ifndef ION_FLAG_CP_PIXEL
#define ION_FLAG_CP_PIXEL 0
#endif
#ifdef SLAVE_SIDE_CP
#define MEM_HEAP_ID ION_CP_MM_HEAP_ID
#define SECURE_ALIGN SZ_1M
#define SECURE_FLAGS_INPUT_BUFFER ION_FLAG_SECURE
#define SECURE_FLAGS_OUTPUT_BUFFER ION_FLAG_SECURE
#else //MASTER_SIDE_CP
#define MEM_HEAP_ID ION_SECURE_HEAP_ID
#define SECURE_ALIGN SZ_4K
#define SECURE_FLAGS_INPUT_BUFFER (ION_FLAG_SECURE | ION_FLAG_CP_BITSTREAM)
#define SECURE_FLAGS_OUTPUT_BUFFER (ION_FLAG_SECURE | ION_FLAG_CP_PIXEL)
#endif
#define LUMINANCE_DIV_FACTOR 10000.0
/* defined in mp-ctl.h */
#define MPCTLV3_VIDEO_DECODE_PB_HINT 0x41C04000
#define MIN(x,y) (((x) < (y)) ? (x) : (y))
#define MAX(x,y) (((x) > (y)) ? (x) : (y))
using namespace android;
#ifdef HYPERVISOR
#define ioctl(x, y, z) hypv_ioctl(x, y, z)
#define poll(x, y, z) hypv_poll(x, y, z)
#endif
static OMX_U32 maxSmoothStreamingWidth = 1920;
static OMX_U32 maxSmoothStreamingHeight = 1088;
void print_omx_buffer(const char *str, OMX_BUFFERHEADERTYPE *pHeader)
{
if (!pHeader)
return;
DEBUG_PRINT_HIGH("%s: Header %p buffer %p alloclen %d offset %d filledlen %d timestamp %lld flags %#x",
str, pHeader, pHeader->pBuffer, pHeader->nAllocLen,
pHeader->nOffset, pHeader->nFilledLen,
pHeader->nTimeStamp, pHeader->nFlags);
}
void print_v4l2_buffer(const char *str, struct v4l2_buffer *v4l2)
{
if (!v4l2)
return;
if (v4l2->length == 1)
DEBUG_PRINT_HIGH(
"%s: %s: idx %2d userptr %#lx fd %d off %d size %d filled %d flags %#x\n",
str, v4l2->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ?
"OUTPUT" : "CAPTURE", v4l2->index,
v4l2->m.planes[0].m.userptr, v4l2->m.planes[0].reserved[0],
v4l2->m.planes[0].reserved[1], v4l2->m.planes[0].length,
v4l2->m.planes[0].bytesused, v4l2->flags);
else
DEBUG_PRINT_HIGH(
"%s: %s: idx %2d userptr %#lx fd %d off %d size %d filled %d flags %#x, extradata: fd %d off %d size %d filled %d\n",
str, v4l2->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE ?
"OUTPUT" : "CAPTURE", v4l2->index,
v4l2->m.planes[0].m.userptr, v4l2->m.planes[0].reserved[0],
v4l2->m.planes[0].reserved[1], v4l2->m.planes[0].length,
v4l2->m.planes[0].bytesused, v4l2->m.planes[1].reserved[0],
v4l2->flags, v4l2->m.planes[1].reserved[1],
v4l2->m.planes[1].length, v4l2->m.planes[1].bytesused);
}
void* async_message_thread (void *input)
{
OMX_BUFFERHEADERTYPE *buffer;
struct v4l2_plane plane[VIDEO_MAX_PLANES];
struct pollfd pfds[2];
struct v4l2_buffer v4l2_buf;
memset((void *)&v4l2_buf,0,sizeof(v4l2_buf));
struct v4l2_event dqevent;
omx_vdec *omx = reinterpret_cast<omx_vdec*>(input);
pfds[0].events = POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM | POLLRDBAND | POLLPRI;
pfds[1].events = POLLIN | POLLERR;
pfds[0].fd = omx->drv_ctx.video_driver_fd;
pfds[1].fd = omx->m_poll_efd;
int error_code = 0,rc=0,bytes_read = 0,bytes_written = 0;
DEBUG_PRINT_HIGH("omx_vdec: Async thread start");
prctl(PR_SET_NAME, (unsigned long)"VideoDecCallBackThread", 0, 0, 0);
while (!omx->async_thread_force_stop) {
rc = poll(pfds, 2, POLL_TIMEOUT);
if (!rc) {
DEBUG_PRINT_ERROR("Poll timedout");
break;
} else if (rc < 0 && errno != EINTR && errno != EAGAIN) {
DEBUG_PRINT_ERROR("Error while polling: %d, errno = %d", rc, errno);
break;
}
if ((pfds[1].revents & POLLIN) || (pfds[1].revents & POLLERR)) {
DEBUG_PRINT_HIGH("async_message_thread interrupted to be exited");
break;
}
if ((pfds[0].revents & POLLIN) || (pfds[0].revents & POLLRDNORM)) {
struct vdec_msginfo vdec_msg;
memset(&vdec_msg, 0, sizeof(vdec_msg));
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = omx->drv_ctx.num_planes;
v4l2_buf.m.planes = plane;
while (!ioctl(pfds[0].fd, VIDIOC_DQBUF, &v4l2_buf)) {
vdec_msg.msgcode=VDEC_MSG_RESP_OUTPUT_BUFFER_DONE;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.client_data=(void*)&v4l2_buf;
vdec_msg.msgdata.output_frame.len=plane[0].bytesused;
vdec_msg.msgdata.output_frame.bufferaddr=(void*)plane[0].m.userptr;
vdec_msg.msgdata.output_frame.time_stamp= ((uint64_t)v4l2_buf.timestamp.tv_sec * (uint64_t)1000000) +
(uint64_t)v4l2_buf.timestamp.tv_usec;
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
}
if ((pfds[0].revents & POLLOUT) || (pfds[0].revents & POLLWRNORM)) {
struct vdec_msginfo vdec_msg;
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = 1;
v4l2_buf.m.planes = plane;
while (!ioctl(pfds[0].fd, VIDIOC_DQBUF, &v4l2_buf)) {
vdec_msg.msgcode=VDEC_MSG_RESP_INPUT_BUFFER_DONE;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.input_frame_clientdata=(void*)&v4l2_buf;
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
}
if (pfds[0].revents & POLLPRI) {
rc = ioctl(pfds[0].fd, VIDIOC_DQEVENT, &dqevent);
if (dqevent.type == V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT ) {
struct vdec_msginfo vdec_msg;
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
vdec_msg.msgcode=VDEC_MSG_EVT_CONFIG_CHANGED;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.picsize.frame_height = ptr[0];
vdec_msg.msgdata.output_frame.picsize.frame_width = ptr[1];
vdec_msg.msgdata.output_frame.flags = true; // INSUFFICIENT event
DEBUG_PRINT_HIGH("VIDC Port Reconfig received insufficient");
omx->dpb_bit_depth = ptr[2];
DEBUG_PRINT_HIGH("VIDC Port Reconfig Bitdepth - %d", ptr[3]);
omx->m_progressive = ptr[3];
DEBUG_PRINT_HIGH("VIDC Port Reconfig PicStruct - %d", ptr[4]);
omx->m_color_space = (ptr[4] == MSM_VIDC_BT2020 ? (omx_vdec::BT2020):
(omx_vdec:: EXCEPT_BT2020));
DEBUG_PRINT_HIGH("VIDC Port Reconfig ColorSpace - %d", omx->m_color_space);
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_SUFFICIENT) {
bool event_fields_changed = false;
bool send_msg = false;
omx_vdec::color_space_type tmp_color_space;
struct vdec_msginfo vdec_msg;
DEBUG_PRINT_HIGH("VIDC Port Reconfig received sufficient");
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
int tmp_profile = 0;
int tmp_level = 0;
int codec = omx->get_session_codec_type();
event_fields_changed |= (omx->dpb_bit_depth != (int)ptr[2]);
event_fields_changed |= (omx->m_progressive != (int)ptr[3]);
tmp_color_space = (ptr[4] == MSM_VIDC_BT2020 ? (omx_vdec::BT2020):
(omx_vdec:: EXCEPT_BT2020));
event_fields_changed |= (omx->m_color_space != tmp_color_space);
/*
* If the resolution is different due to 16\32 pixel alignment,
* let's handle as Sufficient. Ex : 1080 & 1088 or 2160 & 2176.
* When FBD comes, component updates the clients with actual
* resolution through set_buffer_geometry.
*/
event_fields_changed |= (omx->drv_ctx.video_resolution.frame_height != ptr[0]);
event_fields_changed |= (omx->drv_ctx.video_resolution.frame_width != ptr[1]);
if ((codec == V4L2_PIX_FMT_H264) ||
(codec == V4L2_PIX_FMT_HEVC)) {
if (profile_level_converter::convert_v4l2_profile_to_omx(
codec, ptr[9], &tmp_profile) &&
profile_level_converter::convert_v4l2_level_to_omx(
codec, ptr[10], &tmp_level)) {
event_fields_changed |= (omx->mClientSessionForSufficiency &&
((tmp_profile != (int)omx->mClientSetProfile) ||
(tmp_level > (int)omx->mClientSetLevel)));
}
}
if (!omx->is_down_scalar_enabled && omx->m_is_split_mode &&
(omx->drv_ctx.video_resolution.frame_height != ptr[0] ||
omx->drv_ctx.video_resolution.frame_width != ptr[1])) {
event_fields_changed = true;
}
if (event_fields_changed) {
DEBUG_PRINT_HIGH("VIDC Port Reconfig Old Resolution(H,W) = (%d,%d) New Resolution(H,W) = (%d,%d))",
omx->drv_ctx.video_resolution.frame_height,
omx->drv_ctx.video_resolution.frame_width,
ptr[0], ptr[1]);
DEBUG_PRINT_HIGH("VIDC Port Reconfig Old bitdepth = %d New bitdepth = %d",
omx->dpb_bit_depth, ptr[2]);
DEBUG_PRINT_HIGH("VIDC Port Reconfig Old picstruct = %d New picstruct = %d",
omx->m_progressive, ptr[3]);
DEBUG_PRINT_HIGH("VIDC Port Reconfig Old colorSpace = %s New colorspace = %s",
(omx->m_color_space == omx_vdec::BT2020 ? "BT2020": "EXCEPT_BT2020"),
(tmp_color_space == omx_vdec::BT2020 ? "BT2020": "EXCEPT_BT2020"));
DEBUG_PRINT_HIGH("Client Session for sufficiency feature is %s", omx->mClientSessionForSufficiency ? "enabled": "disabled");
DEBUG_PRINT_HIGH("VIDC Port Reconfig Client (Profile,Level) = (%d,%d) bitstream(Profile,Level) = (%d,%d))",
omx->mClientSetProfile,
omx->mClientSetLevel,
tmp_profile, tmp_level);
omx->dpb_bit_depth = ptr[2];
omx->m_progressive = ptr[3];
omx->m_color_space = (ptr[4] == MSM_VIDC_BT2020 ? (omx_vdec::BT2020):
(omx_vdec:: EXCEPT_BT2020));
send_msg = true;
vdec_msg.msgcode=VDEC_MSG_EVT_CONFIG_CHANGED;
vdec_msg.status_code=VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.picsize.frame_height = ptr[0];
vdec_msg.msgdata.output_frame.picsize.frame_width = ptr[1];
vdec_msg.msgdata.output_frame.flags = false; // SUFFICIENT event
} else {
struct v4l2_decoder_cmd dec;
memset(&dec, 0, sizeof(dec));
dec.cmd = V4L2_QCOM_CMD_SESSION_CONTINUE;
rc = ioctl(pfds[0].fd, VIDIOC_DECODER_CMD, &dec);
if (rc < 0) {
DEBUG_PRINT_ERROR("Session continue failed");
send_msg = true;
vdec_msg.msgcode=VDEC_MSG_EVT_HW_ERROR;
vdec_msg.status_code=VDEC_S_SUCCESS;
} else {
DEBUG_PRINT_HIGH("Sent Session continue");
}
}
if (send_msg) {
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_FLUSH_DONE) {
struct vdec_msginfo vdec_msg;
uint32_t flush_type = *(uint32_t *)dqevent.u.data;
// Old driver doesn't send flushType information.
// To make this backward compatible fallback to old approach
// if the flush_type is not present.
vdec_msg.status_code=VDEC_S_SUCCESS;
if (!flush_type || (flush_type & V4L2_QCOM_CMD_FLUSH_OUTPUT)) {
vdec_msg.msgcode=VDEC_MSG_RESP_FLUSH_INPUT_DONE;
DEBUG_PRINT_HIGH("VIDC Input Flush Done Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
if (!flush_type || (flush_type & V4L2_QCOM_CMD_FLUSH_CAPTURE)) {
vdec_msg.msgcode=VDEC_MSG_RESP_FLUSH_OUTPUT_DONE;
DEBUG_PRINT_HIGH("VIDC Output Flush Done Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_HW_OVERLOAD) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode=VDEC_MSG_EVT_HW_OVERLOAD;
vdec_msg.status_code=VDEC_S_SUCCESS;
DEBUG_PRINT_ERROR("HW Overload received");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_HW_UNSUPPORTED) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode=VDEC_MSG_EVT_HW_UNSUPPORTED;
vdec_msg.status_code=VDEC_S_SUCCESS;
DEBUG_PRINT_ERROR("HW Unsupported received");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_SYS_ERROR) {
struct vdec_msginfo vdec_msg;
vdec_msg.msgcode = VDEC_MSG_EVT_HW_ERROR;
vdec_msg.status_code = VDEC_S_SUCCESS;
DEBUG_PRINT_HIGH("SYS Error Recieved");
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exited");
break;
}
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_RELEASE_BUFFER_REFERENCE) {
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
DEBUG_PRINT_LOW("REFERENCE RELEASE EVENT RECVD fd = %d offset = %d", ptr[0], ptr[1]);
} else if (dqevent.type == V4L2_EVENT_MSM_VIDC_RELEASE_UNQUEUED_BUFFER) {
unsigned int *ptr = (unsigned int *)(void *)dqevent.u.data;
struct vdec_msginfo vdec_msg;
DEBUG_PRINT_LOW("Release unqueued buffer event recvd fd = %d offset = %d", ptr[0], ptr[1]);
v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
v4l2_buf.memory = V4L2_MEMORY_USERPTR;
v4l2_buf.length = omx->drv_ctx.num_planes;
v4l2_buf.m.planes = plane;
v4l2_buf.index = ptr[5];
v4l2_buf.flags = 0;
vdec_msg.msgcode = VDEC_MSG_RESP_OUTPUT_BUFFER_DONE;
vdec_msg.status_code = VDEC_S_SUCCESS;
vdec_msg.msgdata.output_frame.client_data = (void*)&v4l2_buf;
vdec_msg.msgdata.output_frame.len = 0;
vdec_msg.msgdata.output_frame.bufferaddr = (void*)(intptr_t)ptr[2];
vdec_msg.msgdata.output_frame.time_stamp = ((uint64_t)ptr[3] * (uint64_t)1000000) +
(uint64_t)ptr[4];
if (omx->async_message_process(input,&vdec_msg) < 0) {
DEBUG_PRINT_HIGH("async_message_thread Exitedn");
break;
}
} else {
DEBUG_PRINT_HIGH("VIDC Some Event recieved");
continue;
}
}
}
DEBUG_PRINT_HIGH("omx_vdec: Async thread stop");
return NULL;
}
void* message_thread_dec(void *input)
{
omx_vdec* omx = reinterpret_cast<omx_vdec*>(input);
int res = 0;
DEBUG_PRINT_HIGH("omx_vdec: message thread start");
prctl(PR_SET_NAME, (unsigned long)"VideoDecMsgThread", 0, 0, 0);
while (!omx->message_thread_stop) {
res = omx->signal.wait(2 * 1000000000);
if (res == ETIMEDOUT || omx->message_thread_stop) {
continue;
} else if (res) {
DEBUG_PRINT_ERROR("omx_vdec: message_thread_dec wait on condition failed, exiting");
break;
}
omx->process_event_cb(omx);
}
DEBUG_PRINT_HIGH("omx_vdec: message thread stop");
return 0;
}
void post_message(omx_vdec *omx, unsigned char id)
{
(void)id;
omx->signal.signal();
}
// omx_cmd_queue destructor
omx_vdec::omx_cmd_queue::~omx_cmd_queue()
{
// Nothing to do
}
// omx cmd queue constructor
omx_vdec::omx_cmd_queue::omx_cmd_queue(): m_read(0),m_write(0),m_size(0)
{
memset(m_q,0,sizeof(omx_event)*OMX_CORE_CONTROL_CMDQ_SIZE);
}
// omx cmd queue insert
bool omx_vdec::omx_cmd_queue::insert_entry(unsigned long p1, unsigned long p2, unsigned long id)
{
bool ret = true;
if (m_size < OMX_CORE_CONTROL_CMDQ_SIZE) {
m_q[m_write].id = id;
m_q[m_write].param1 = p1;
m_q[m_write].param2 = p2;
m_write++;
m_size ++;
if (m_write >= OMX_CORE_CONTROL_CMDQ_SIZE) {
m_write = 0;
}
} else {
ret = false;
DEBUG_PRINT_ERROR("ERROR: %s()::Command Queue Full", __func__);
}
return ret;
}
// omx cmd queue pop
bool omx_vdec::omx_cmd_queue::pop_entry(unsigned long *p1, unsigned long *p2, unsigned long *id)
{
bool ret = true;
if (m_size > 0) {
*id = m_q[m_read].id;
*p1 = m_q[m_read].param1;
*p2 = m_q[m_read].param2;
// Move the read pointer ahead
++m_read;
--m_size;
if (m_read >= OMX_CORE_CONTROL_CMDQ_SIZE) {
m_read = 0;
}
} else {
ret = false;
}
return ret;
}
// Retrieve the first mesg type in the queue
unsigned omx_vdec::omx_cmd_queue::get_q_msg_type()
{
return m_q[m_read].id;
}
#ifdef _ANDROID_
omx_vdec::ts_arr_list::ts_arr_list()
{
//initialize timestamps array
memset(m_ts_arr_list, 0, ( sizeof(ts_entry) * MAX_NUM_INPUT_OUTPUT_BUFFERS) );
}
omx_vdec::ts_arr_list::~ts_arr_list()
{
//free m_ts_arr_list?
}
bool omx_vdec::ts_arr_list::insert_ts(OMX_TICKS ts)
{
bool ret = true;
bool duplicate_ts = false;
int idx = 0;
//insert at the first available empty location
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
if (!m_ts_arr_list[idx].valid) {
//found invalid or empty entry, save timestamp
m_ts_arr_list[idx].valid = true;
m_ts_arr_list[idx].timestamp = ts;
DEBUG_PRINT_LOW("Insert_ts(): Inserting TIMESTAMP (%lld) at idx (%d)",
ts, idx);
break;
}
}
if (idx == MAX_NUM_INPUT_OUTPUT_BUFFERS) {
DEBUG_PRINT_LOW("Timestamp array list is FULL. Unsuccessful insert");
ret = false;
}
return ret;
}
bool omx_vdec::ts_arr_list::pop_min_ts(OMX_TICKS &ts)
{
bool ret = true;
int min_idx = -1;
OMX_TICKS min_ts = 0;
int idx = 0;
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
if (m_ts_arr_list[idx].valid) {
//found valid entry, save index
if (min_idx < 0) {
//first valid entry
min_ts = m_ts_arr_list[idx].timestamp;
min_idx = idx;
} else if (m_ts_arr_list[idx].timestamp < min_ts) {
min_ts = m_ts_arr_list[idx].timestamp;
min_idx = idx;
}
}
}
if (min_idx < 0) {
//no valid entries found
DEBUG_PRINT_LOW("Timestamp array list is empty. Unsuccessful pop");
ts = 0;
ret = false;
} else {
ts = m_ts_arr_list[min_idx].timestamp;
m_ts_arr_list[min_idx].valid = false;
DEBUG_PRINT_LOW("Pop_min_ts:Timestamp (%lld), index(%d)",
ts, min_idx);
}
return ret;
}
bool omx_vdec::ts_arr_list::reset_ts_list()
{
bool ret = true;
int idx = 0;
DEBUG_PRINT_LOW("reset_ts_list(): Resetting timestamp array list");
for ( ; idx < MAX_NUM_INPUT_OUTPUT_BUFFERS; idx++) {
m_ts_arr_list[idx].valid = false;
}
return ret;
}
#endif
// factory function executed by the core to create instances
void *get_omx_component_factory_fn(void)
{
return (new omx_vdec);
}
bool is_platform_tp10capture_supported()
{
DEBUG_PRINT_HIGH("TP10 on capture port is supported");
return true;
}
inline int omx_vdec::get_session_codec_type()
{
return output_capability;
}
/* ======================================================================
FUNCTION
omx_vdec::omx_vdec
DESCRIPTION
Constructor
PARAMETERS
None
RETURN VALUE
None.
========================================================================== */
omx_vdec::omx_vdec(): m_error_propogated(false),
m_state(OMX_StateInvalid),
m_app_data(NULL),
m_inp_mem_ptr(NULL),
m_out_mem_ptr(NULL),
m_intermediate_out_mem_ptr(NULL),
m_client_output_extradata_mem_ptr(NULL),
input_flush_progress (false),
output_flush_progress (false),
input_use_buffer (false),
output_use_buffer (false),
ouput_egl_buffers(false),
m_use_output_pmem(OMX_FALSE),
pending_input_buffers(0),
pending_output_buffers(0),
m_out_bm_count(0),
m_inp_bm_count(0),
m_out_extradata_bm_count(0),
m_inp_bPopulated(OMX_FALSE),
m_out_bPopulated(OMX_FALSE),
m_flags(0),
m_inp_bEnabled(OMX_TRUE),
m_out_bEnabled(OMX_TRUE),
m_in_alloc_cnt(0),
m_platform_list(NULL),
m_platform_entry(NULL),
m_pmem_info(NULL),
h264_parser(NULL),
arbitrary_bytes (false),
psource_frame (NULL),
pdest_frame (NULL),
m_inp_heap_ptr (NULL),
m_phdr_pmem_ptr(NULL),
m_heap_inp_bm_count (0),
codec_type_parse ((codec_type)0),
first_frame_meta (true),
frame_count (0),
nal_count (0),
nal_length(0),
look_ahead_nal (false),
first_frame(0),
first_buffer(NULL),
first_frame_size (0),
m_device_file_ptr(NULL),
h264_last_au_ts(LLONG_MAX),
h264_last_au_flags(0),
m_disp_hor_size(0),
m_disp_vert_size(0),
prev_ts(LLONG_MAX),
prev_ts_actual(LLONG_MAX),
rst_prev_ts(true),
frm_int(0),
m_fps_received(0),
m_fps_prev(0),
m_drc_enable(0),
in_reconfig(false),
c2d_enable_pending(false),
m_display_id(NULL),
client_extradata(0),
#ifdef _ANDROID_
m_enable_android_native_buffers(OMX_FALSE),
m_use_android_native_buffers(OMX_FALSE),
#endif
m_disable_dynamic_buf_mode(0),
m_desc_buffer_ptr(NULL),
secure_mode(false),
allocate_native_handle(false),
client_set_fps(false),
stereo_output_mode(HAL_NO_3D),
m_last_rendered_TS(-1),
m_dec_hfr_fps(0),
m_dec_secure_prefetch_size_internal(0),
m_dec_secure_prefetch_size_output(0),
m_arb_mode_override(0),
m_queued_codec_config_count(0),
secure_scaling_to_non_secure_opb(false),
m_force_compressed_for_dpb(true),
m_is_display_session(false),
m_prefetch_done(0),
m_is_split_mode(false),
m_buffer_error(false)
{
m_poll_efd = -1;
memset(&drv_ctx, 0, sizeof(drv_ctx));
drv_ctx.video_driver_fd = -1;
drv_ctx.extradata_info.ion.data_fd = -1;
drv_ctx.extradata_info.ion.dev_fd = -1;
/* Assumption is that , to begin with , we have all the frames with decoder */
DEBUG_PRINT_HIGH("In %u bit OMX vdec Constructor", (unsigned int)sizeof(long) * 8);
memset(&m_debug,0,sizeof(m_debug));
#ifdef _ANDROID_
char property_value[PROPERTY_VALUE_MAX] = {0};
property_get("vendor.vidc.debug.level", property_value, "1");
debug_level = strtoul(property_value, NULL, 16);
property_value[0] = '\0';
DEBUG_PRINT_HIGH("In OMX vdec Constructor");
// TODO: Support in XML
perf_flag = 0;
if (perf_flag) {
DEBUG_PRINT_HIGH("perf flag is %d", perf_flag);
dec_time.start();
}
proc_frms = latency = 0;
prev_n_filled_len = 0;
Platform::Config::getInt32(Platform::vidc_dec_log_in,
(int32_t *)&m_debug.in_buffer_log, 0);
Platform::Config::getInt32(Platform::vidc_dec_log_out,
(int32_t *)&m_debug.out_buffer_log, 0);
Platform::Config::getInt32(Platform::vidc_dec_sec_prefetch_size_internal,
(int32_t *)&m_dec_secure_prefetch_size_internal, 0);
Platform::Config::getInt32(Platform::vidc_dec_sec_prefetch_size_output,
(int32_t *)&m_dec_secure_prefetch_size_output, 0);
DEBUG_PRINT_HIGH("Prefetch size internal = %d, output = %d",
m_dec_secure_prefetch_size_internal, m_dec_secure_prefetch_size_output);
Platform::Config::getInt32(Platform::vidc_dec_arb_mode_override,
(int32_t *)&m_arb_mode_override, 0);
Platform::Config::getInt32(Platform::vidc_perf_control_enable,
(int32_t *)&m_perf_control.m_perf_control_enable, 0);
if (m_perf_control.m_perf_control_enable) {
DEBUG_PRINT_HIGH("perf cotrol enabled");
m_perf_control.load_perf_library();
}
property_value[0] = '\0';
property_get("vendor.vidc.dec.log.in", property_value, "0");
m_debug.in_buffer_log |= atoi(property_value);
DEBUG_PRINT_HIGH("vendor.vidc.dec.log.in value is %d", m_debug.in_buffer_log);
property_value[0] = '\0';
property_get("vendor.vidc.dec.log.out", property_value, "0");
m_debug.out_buffer_log |= atoi(property_value);
DEBUG_PRINT_HIGH("vendor.vidc.dec.log.out value is %d", m_debug.out_buffer_log);
property_value[0] = '\0';
property_get("vendor.vidc.dec.log.cc.out", property_value, "0");
m_debug.out_cc_buffer_log |= atoi(property_value);
DEBUG_PRINT_HIGH("vendor.vidc.dec.log.cc.out value is %d", m_debug.out_buffer_log);
property_value[0] = '\0';
property_get("vendor.vidc.dec.meta.log.out", property_value, "0");
m_debug.out_meta_buffer_log = atoi(property_value);
property_value[0] = '\0';
property_get("vendor.vidc.log.loc", property_value, BUFFER_LOG_LOC);
if (*property_value)
strlcpy(m_debug.log_loc, property_value, PROPERTY_VALUE_MAX);
struct timeval te;
gettimeofday(&te, NULL);
m_debug.session_id = te.tv_sec*1000LL + te.tv_usec/1000;
m_debug.seq_count = 0;
#ifdef _UBWC_
property_value[0] = '\0';
property_get("vendor.gralloc.disable_ubwc", property_value, "0");
m_disable_ubwc_mode = atoi(property_value);
DEBUG_PRINT_HIGH("UBWC mode is %s", m_disable_ubwc_mode ? "disabled" : "enabled");
#else
m_disable_ubwc_mode = true;
#endif
#endif
memset(&m_cmp,0,sizeof(m_cmp));
memset(&m_cb,0,sizeof(m_cb));
memset (&h264_scratch,0,sizeof (OMX_BUFFERHEADERTYPE));
memset (m_hwdevice_name,0,sizeof(m_hwdevice_name));
memset(m_demux_offsets, 0, ( sizeof(OMX_U32) * 8192) );
memset(&m_custom_buffersize, 0, sizeof(m_custom_buffersize));
memset(&m_client_color_space, 0, sizeof(DescribeColorAspectsParams));
memset(&m_internal_color_space, 0, sizeof(DescribeColorAspectsParams));
memset(&m_client_hdr_info, 0, sizeof(DescribeHDRStaticInfoParams));
memset(&m_internal_hdr_info, 0, sizeof(DescribeHDRStaticInfoParams));
m_demux_entries = 0;
msg_thread_id = 0;
async_thread_id = 0;
msg_thread_created = false;
async_thread_created = false;
async_thread_force_stop = false;
message_thread_stop = false;
#ifdef _ANDROID_ICS_
memset(&native_buffer, 0 ,(sizeof(struct nativebuffer) * MAX_NUM_INPUT_OUTPUT_BUFFERS));
#endif
/* invalidate m_frame_pack_arrangement */
memset(&m_frame_pack_arrangement, 0, sizeof(OMX_QCOM_FRAME_PACK_ARRANGEMENT));
m_frame_pack_arrangement.cancel_flag = 1;
drv_ctx.timestamp_adjust = false;
m_vendor_config.pData = NULL;
pthread_mutex_init(&m_lock, NULL);
pthread_mutex_init(&c_lock, NULL);
pthread_mutex_init(&buf_lock, NULL);
sem_init(&m_cmd_lock,0,0);
sem_init(&m_safe_flush, 0, 0);
streaming[CAPTURE_PORT] =
streaming[OUTPUT_PORT] = false;
#ifdef _ANDROID_
// TODO: Support in XML
m_debug_extradata = 0;
#endif
m_fill_output_msg = OMX_COMPONENT_GENERATE_FTB;
client_buffers.set_vdec_client(this);
dynamic_buf_mode = false;
is_down_scalar_enabled = false;
m_downscalar_width = 0;
m_downscalar_height = 0;
m_force_down_scalar = 0;
m_reconfig_height = 0;
m_reconfig_width = 0;
m_smoothstreaming_mode = false;
m_smoothstreaming_width = 0;
m_smoothstreaming_height = 0;
m_decode_order_mode = false;
m_perf_control.perf_lock_acquire();
m_client_req_turbo_mode = false;
is_q6_platform = false;
m_input_pass_buffer_fd = false;
memset(&m_extradata_info, 0, sizeof(m_extradata_info));
m_client_color_space.nPortIndex = (OMX_U32)OMX_CORE_INPUT_PORT_INDEX;
m_client_color_space.sAspects.mRange = ColorAspects::RangeUnspecified;
m_client_color_space.sAspects.mPrimaries = ColorAspects::PrimariesUnspecified;
m_client_color_space.sAspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
m_client_color_space.sAspects.mTransfer = ColorAspects::TransferUnspecified;
m_internal_color_space.nPortIndex = (OMX_U32)OMX_CORE_OUTPUT_PORT_INDEX;
m_internal_color_space.sAspects.mRange = ColorAspects::RangeUnspecified;
m_internal_color_space.sAspects.mPrimaries = ColorAspects::PrimariesUnspecified;
m_internal_color_space.sAspects.mMatrixCoeffs = ColorAspects::MatrixUnspecified;
m_internal_color_space.sAspects.mTransfer = ColorAspects::TransferUnspecified;
m_internal_color_space.nSize = sizeof(DescribeColorAspectsParams);
m_client_hdr_info.nPortIndex = (OMX_U32)OMX_CORE_INPUT_PORT_INDEX;
m_internal_hdr_info.nPortIndex = (OMX_U32)OMX_CORE_OUTPUT_PORT_INDEX;
m_dither_config = DITHER_DISABLE;
DEBUG_PRINT_HIGH("Dither config is %d", m_dither_config);
m_color_space = EXCEPT_BT2020;
init_color_aspects_map();
profile_level_converter::init();
mClientSessionForSufficiency = false;
mClientSetProfile = 0;
mClientSetLevel = 0;
#ifdef USE_GBM
drv_ctx.gbm_device_fd = -1;
#endif
}
static const int event_type[] = {
V4L2_EVENT_MSM_VIDC_FLUSH_DONE,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_SUFFICIENT,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT,
V4L2_EVENT_MSM_VIDC_RELEASE_BUFFER_REFERENCE,
V4L2_EVENT_MSM_VIDC_RELEASE_UNQUEUED_BUFFER,
V4L2_EVENT_MSM_VIDC_SYS_ERROR,
V4L2_EVENT_MSM_VIDC_HW_OVERLOAD,
V4L2_EVENT_MSM_VIDC_HW_UNSUPPORTED
};
static OMX_ERRORTYPE subscribe_to_events(int fd)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_event_subscription sub;
int array_sz = sizeof(event_type)/sizeof(int);
int i,rc;
if (fd < 0) {
DEBUG_PRINT_ERROR("Invalid input: %d", fd);
return OMX_ErrorBadParameter;
}
for (i = 0; i < array_sz; ++i) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
if (rc) {
DEBUG_PRINT_ERROR("Failed to subscribe event: 0x%x", sub.type);
break;
}
}
if (i < array_sz) {
for (--i; i >=0 ; i--) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
if (rc)
DEBUG_PRINT_ERROR("Failed to unsubscribe event: 0x%x", sub.type);
}
eRet = OMX_ErrorNotImplemented;
}
return eRet;
}
static OMX_ERRORTYPE unsubscribe_to_events(int fd)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_event_subscription sub;
int array_sz = sizeof(event_type)/sizeof(int);
int i,rc;
if (fd < 0) {
DEBUG_PRINT_ERROR("Invalid input: %d", fd);
return OMX_ErrorBadParameter;
}
for (i = 0; i < array_sz; ++i) {
memset(&sub, 0, sizeof(sub));
sub.type = event_type[i];
rc = ioctl(fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
if (rc) {
DEBUG_PRINT_ERROR("Failed to unsubscribe event: 0x%x", sub.type);
break;
}
}
return eRet;
}
/* ======================================================================
FUNCTION
omx_vdec::~omx_vdec
DESCRIPTION
Destructor
PARAMETERS
None
RETURN VALUE
None.
========================================================================== */
omx_vdec::~omx_vdec()
{
m_pmem_info = NULL;
DEBUG_PRINT_HIGH("In OMX vdec Destructor");
if (msg_thread_created) {
DEBUG_PRINT_HIGH("Signalling close to OMX Msg Thread");
message_thread_stop = true;
post_message(this, OMX_COMPONENT_CLOSE_MSG);
DEBUG_PRINT_HIGH("Waiting on OMX Msg Thread exit");
pthread_join(msg_thread_id,NULL);
}
DEBUG_PRINT_HIGH("Waiting on OMX Async Thread exit");
if(eventfd_write(m_poll_efd, 1)) {
DEBUG_PRINT_ERROR("eventfd_write failed for fd: %d, errno = %d, force stop async_thread", m_poll_efd, errno);
async_thread_force_stop = true;
}
if (async_thread_created)
pthread_join(async_thread_id,NULL);
if (m_prefetch_done & 0x1)
prefetch_buffers(PREFETCH_PIXEL_BUFFER_COUNT, m_dec_secure_prefetch_size_output, ION_IOC_DRAIN, ION_FLAG_CP_PIXEL);
if (m_prefetch_done & 0x2)
prefetch_buffers(PREFETCH_NON_PIXEL_BUFFER_COUNT, m_dec_secure_prefetch_size_internal, ION_IOC_DRAIN, ION_FLAG_CP_NON_PIXEL);
unsubscribe_to_events(drv_ctx.video_driver_fd);
close(m_poll_efd);
#ifdef HYPERVISOR
hypv_close(drv_ctx.video_driver_fd);
#else
close(drv_ctx.video_driver_fd);
#endif
pthread_mutex_destroy(&m_lock);
pthread_mutex_destroy(&c_lock);
pthread_mutex_destroy(&buf_lock);
sem_destroy(&m_cmd_lock);
if (perf_flag) {
DEBUG_PRINT_HIGH("--> TOTAL PROCESSING TIME");
dec_time.end();
}
DEBUG_PRINT_INFO("Exit OMX vdec Destructor: fd=%d",drv_ctx.video_driver_fd);
m_perf_control.perf_lock_release();
}
OMX_ERRORTYPE omx_vdec::set_dpb(bool is_split_mode)
{
int rc = 0;
struct v4l2_ext_control ctrl[1];
struct v4l2_ext_controls controls;
ctrl[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE;
if (is_split_mode) {
ctrl[0].value = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY;
} else {
ctrl[0].value = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_PRIMARY;
}
controls.count = 1;
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
controls.controls = ctrl;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_EXT_CTRLS, &controls);
if (rc) {
DEBUG_PRINT_ERROR("Failed to set ext ctrls for opb_dpb: %d\n", rc);
return OMX_ErrorUnsupportedSetting;
}
m_is_split_mode = is_split_mode;
return OMX_ErrorNone;
}
OMX_ERRORTYPE omx_vdec::decide_dpb_buffer_mode()
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_format fmt;
int rc = 0;
// Default is Combined Mode
bool enable_split = false;
bool is_client_dest_format_non_ubwc = (
capture_capability != V4L2_PIX_FMT_NV12_UBWC &&
capture_capability != V4L2_PIX_FMT_NV12_TP10_UBWC);
bool dither_enable = false;
bool capability_changed = false;
switch (m_dither_config) {
case DITHER_DISABLE:
dither_enable = false;
break;
case DITHER_COLORSPACE_EXCEPTBT2020:
dither_enable = (m_color_space == EXCEPT_BT2020);
break;
case DITHER_ALL_COLORSPACE:
dither_enable = true;
break;
default:
DEBUG_PRINT_ERROR("Unsupported dither configuration:%d", m_dither_config);
}
// Reset v4l2_foramt struct object
memset(&fmt, 0x0, sizeof(struct v4l2_format));
if (is_client_dest_format_non_ubwc){
// Assuming all the else blocks are for 8 bit depth
if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_10) {
enable_split = true;
if(is_flexible_format){ // if flexible formats are expected, P010 is set for 10bit cases here
drv_ctx.output_format = VDEC_YUV_FORMAT_P010_VENUS;
capture_capability = V4L2_PIX_FMT_SDE_Y_CBCR_H2V2_P010_VENUS;
capability_changed = true;
}
} else if (m_progressive == MSM_VIDC_PIC_STRUCT_PROGRESSIVE) {
enable_split = true;
} else {
// Hardware does not support NV12+interlace clips.
// Request NV12_UBWC and convert it to NV12+interlace using C2D
// in combined mode
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12_UBWC;
capture_capability = V4L2_PIX_FMT_NV12_UBWC;
capability_changed = true;
}
} else {
if (dpb_bit_depth == MSM_VIDC_BIT_DEPTH_10) {
enable_split = dither_enable;
if (dither_enable) {
capture_capability = m_disable_ubwc_mode ?
V4L2_PIX_FMT_NV12 : V4L2_PIX_FMT_NV12_UBWC;
capability_changed = true;
} else {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12_TP10_UBWC;
capture_capability = V4L2_PIX_FMT_NV12_TP10_UBWC;
capability_changed = true;
}
}
// 8 bit depth uses the default.
// Combined mode
// V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE
}
if (capability_changed == true) {
// Get format for CAPTURE port
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed get format on capture mplane", __func__);
return OMX_ErrorUnsupportedSetting;
}
// Set Capability for CAPTURE port if there is a change
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed set format on capture mplane", __func__);
return OMX_ErrorUnsupportedSetting;
}
}
// Check the component for its valid current state
if (!BITMASK_PRESENT(&m_flags ,OMX_COMPONENT_IDLE_PENDING) &&
!BITMASK_PRESENT(&m_flags, OMX_COMPONENT_OUTPUT_ENABLE_PENDING)) {
DEBUG_PRINT_LOW("Invalid state to decide on dpb-opb split");
return OMX_ErrorNone;
}
eRet = set_dpb(enable_split);
if (eRet) {
DEBUG_PRINT_HIGH("Failed to set DPB buffer mode: %d", eRet);
}
return eRet;
}
bool omx_vdec::check_supported_flexible_formats(OMX_COLOR_FORMATTYPE required_format)
{
if(required_format == (OMX_COLOR_FORMATTYPE)QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m ||
required_format == (OMX_COLOR_FORMATTYPE)QOMX_COLOR_FORMATYUV420SemiPlanarP010Venus) {
//for now, the flexible formats should be NV12 by default for 8bit cases
//it will change to P010 after 10bit port-reconfig accordingly
return TRUE;
}
else {
return FALSE;
}
}
int omx_vdec::enable_downscalar()
{
int rc = 0;
struct v4l2_control control;
struct v4l2_format fmt;
if (is_down_scalar_enabled) {
DEBUG_PRINT_LOW("%s: already enabled", __func__);
return 0;
}
DEBUG_PRINT_LOW("omx_vdec::enable_downscalar");
rc = decide_dpb_buffer_mode();
if (rc) {
DEBUG_PRINT_ERROR("%s: decide_dpb_buffer_mode Failed ", __func__);
return rc;
}
is_down_scalar_enabled = true;
return 0;
}
int omx_vdec::disable_downscalar()
{
int rc = 0;
struct v4l2_control control;
if (!is_down_scalar_enabled) {
DEBUG_PRINT_LOW("omx_vdec::disable_downscalar: already disabled");
return 0;
}
rc = decide_dpb_buffer_mode();
if (rc < 0) {
DEBUG_PRINT_ERROR("%s:decide_dpb_buffer_mode failed\n", __func__);
return rc;
}
is_down_scalar_enabled = false;
return rc;
}
int omx_vdec::decide_downscalar()
{
int rc = 0;
struct v4l2_format fmt;
enum color_fmts color_format;
OMX_U32 width, height;
OMX_BOOL isPortraitVideo = OMX_FALSE;
if (capture_capability == V4L2_PIX_FMT_NV12_TP10_UBWC) {
rc = disable_downscalar();
if (rc) {
DEBUG_PRINT_ERROR("Disable downscalar failed!");
return rc;
}
return 0;
}
#ifdef _QUERY_DISP_RES_
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: Failed to get format on capture mplane", __func__);
return rc;
}
isPortraitVideo = fmt.fmt.pix_mp.width < fmt.fmt.pix_mp.height ? OMX_TRUE : OMX_FALSE;
if (!m_downscalar_width || !m_downscalar_height) {
qdutils::DisplayAttributes dpa = {}, dsa = {}, dva = {};
int prim_config, ext_config, virt_config;
prim_config = qdutils::getActiveConfig(qdutils::DISPLAY_PRIMARY);
dpa = qdutils::getDisplayAttributes(prim_config, qdutils::DISPLAY_PRIMARY);
DEBUG_PRINT_HIGH("%s: Primary dpa.xres = %d dpa.yres=%d dpa.xdpi = %f dpa.ydpi = %f ",
__func__, dpa.xres, dpa.yres, dpa.xdpi, dpa.ydpi);
ext_config = qdutils::getActiveConfig(qdutils::DISPLAY_EXTERNAL);
dsa = qdutils::getDisplayAttributes(ext_config, qdutils::DISPLAY_EXTERNAL);
DEBUG_PRINT_HIGH("%s: HDMI dsa.xres = %d dsa.yres = %d dsa.xdpi = %f dsa.ydpi = %f ",
__func__, dsa.xres, dsa.yres, dsa.xdpi, dsa.ydpi);
virt_config = qdutils::getActiveConfig(qdutils::DISPLAY_VIRTUAL);
dva = qdutils::getDisplayAttributes(virt_config, qdutils::DISPLAY_VIRTUAL);
DEBUG_PRINT_HIGH("%s: Virtual dva.xres = %d dva.yres = %d dva.xdpi = %f dva.ydpi = %f ",
__func__, dva.xres, dva.yres, dva.xdpi, dva.ydpi);
/* Below logic takes care of following conditions:
* 1. Choose display resolution as maximum resolution of all the connected
* displays (secondary, primary, virtual), so that we do not downscale
* unnecessarily which might be supported on one of the display losing quality.
* 2. Displays connected might be in landscape or portrait mode, so the xres might
* be smaller or greater than the yres. So we first take the max of the two
* in width and min of two in height and then rotate it if below point is true.
* 3. Video might also be in portrait mode, so invert the downscalar width and
* height for such cases.
*/
if (dsa.xres * dsa.yres > dpa.xres * dpa.yres) {
m_downscalar_width = MAX(dsa.xres, dsa.yres);
m_downscalar_height = MIN(dsa.xres, dsa.yres);
} else if (dva.xres * dva.yres > dpa.xres * dpa.yres) {
m_downscalar_width = MAX(dva.xres, dva.yres);
m_downscalar_height = MIN(dva.xres, dva.yres);
} else {
m_downscalar_width = MAX(dpa.xres, dpa.yres);
m_downscalar_height = MIN(dpa.xres, dpa.yres);
}
if (isPortraitVideo) {
// Swap width and height
m_downscalar_width = m_downscalar_width ^ m_downscalar_height;
m_downscalar_height = m_downscalar_width ^ m_downscalar_height;
m_downscalar_width = m_downscalar_width ^ m_downscalar_height;
}
}
m_downscalar_width = ALIGN(m_downscalar_width, 128);
m_downscalar_height = ALIGN(m_downscalar_height, 32);
#endif
if (!m_downscalar_width || !m_downscalar_height) {
DEBUG_PRINT_LOW("%s: Invalid downscalar configuration", __func__);
return 0;
}
if (m_force_down_scalar) {
DEBUG_PRINT_LOW("%s: m_force_down_scalar %d ", __func__, m_force_down_scalar);
return 0;
}
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_G_FMT, &fmt);
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: Failed to get format on capture mplane", __func__);
return rc;
}
height = fmt.fmt.pix_mp.height;
width = fmt.fmt.pix_mp.width;
DEBUG_PRINT_HIGH("%s: driver wxh = %dx%d, downscalar wxh = %dx%d m_is_display_session = %d", __func__,
fmt.fmt.pix_mp.width, fmt.fmt.pix_mp.height, m_downscalar_width, m_downscalar_height, m_is_display_session);
if ((fmt.fmt.pix_mp.width * fmt.fmt.pix_mp.height > m_downscalar_width * m_downscalar_height) &&
m_is_display_session) {
rc = enable_downscalar();
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: enable_downscalar failed\n", __func__);
return rc;
}
width = m_downscalar_width > fmt.fmt.pix_mp.width ?
fmt.fmt.pix_mp.width : m_downscalar_width;
height = m_downscalar_height > fmt.fmt.pix_mp.height ?
fmt.fmt.pix_mp.height : m_downscalar_height;
switch (capture_capability) {
case V4L2_PIX_FMT_NV12:
color_format = COLOR_FMT_NV12;
break;
case V4L2_PIX_FMT_NV12_UBWC:
color_format = COLOR_FMT_NV12_UBWC;
break;
case V4L2_PIX_FMT_NV12_TP10_UBWC:
color_format = COLOR_FMT_NV12_BPP10_UBWC;
break;
default:
DEBUG_PRINT_ERROR("Color format not recognized\n");
rc = OMX_ErrorUndefined;
return rc;
}
} else {
rc = disable_downscalar();
if (rc < 0) {
DEBUG_PRINT_ERROR("%s: disable_downscalar failed\n", __func__);
return rc;
}
}
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.height = height;
fmt.fmt.pix_mp.width = width;
fmt.fmt.pix_mp.pixelformat = capture_capability;
rc = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed set format on capture mplane", __func__);
return rc;
}
rc = get_buffer_req(&drv_ctx.op_buf);
if (rc) {
DEBUG_PRINT_ERROR("%s: Failed to get output buffer requirements", __func__);
return rc;
}
return rc;
}
/* ======================================================================
FUNCTION
omx_vdec::OMXCntrlProcessMsgCb
DESCRIPTION
IL Client callbacks are generated through this routine. The decoder
provides the thread context for this routine.
PARAMETERS
ctxt -- Context information related to the self.
id -- Event identifier. This could be any of the following:
1. Command completion event
2. Buffer done callback event
3. Frame done callback event
RETURN VALUE
None.
========================================================================== */
void omx_vdec::process_event_cb(void *ctxt)
{
unsigned long p1; // Parameter - 1
unsigned long p2; // Parameter - 2
unsigned long ident;
unsigned qsize=0; // qsize
omx_vdec *pThis = (omx_vdec *) ctxt;
if (!pThis) {
DEBUG_PRINT_ERROR("ERROR: %s()::Context is incorrect, bailing out",
__func__);
return;
}
// Protect the shared queue data structure
do {
/*Read the message id's from the queue*/
pthread_mutex_lock(&pThis->m_lock);
qsize = pThis->m_cmd_q.m_size;
if (qsize) {
pThis->m_cmd_q.pop_entry(&p1, &p2, &ident);
}
if (qsize == 0 && pThis->m_state != OMX_StatePause) {
qsize = pThis->m_ftb_q.m_size;
if (qsize) {
pThis->m_ftb_q.pop_entry(&p1, &p2, &ident);
}
}
if (qsize == 0 && pThis->m_state != OMX_StatePause) {
qsize = pThis->m_etb_q.m_size;
if (qsize) {
pThis->m_etb_q.pop_entry(&p1, &p2, &ident);
}
}
pthread_mutex_unlock(&pThis->m_lock);
/*process message if we have one*/
if (qsize > 0) {
switch (ident) {
case OMX_COMPONENT_GENERATE_EVENT:
if (pThis->m_cb.EventHandler) {
switch (p1) {
case OMX_CommandStateSet:
pThis->m_state = (OMX_STATETYPE) p2;
DEBUG_PRINT_HIGH("OMX_CommandStateSet complete, m_state = %d",
pThis->m_state);
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL);
break;
case OMX_EventError:
if (p2 == OMX_StateInvalid) {
DEBUG_PRINT_ERROR("OMX_EventError: p2 is OMX_StateInvalid");
pThis->m_state = (OMX_STATETYPE) p2;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventError, OMX_ErrorInvalidState, p2, NULL);
} else if (p2 == (unsigned long)OMX_ErrorHardware) {
pThis->omx_report_error();
} else {
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventError, p2, (OMX_U32)NULL, NULL );
}
break;
case OMX_CommandPortDisable:
DEBUG_PRINT_HIGH("OMX_CommandPortDisable complete for port [%lu]", p2);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING)) {
BITMASK_SET(&pThis->m_flags, OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED);
break;
}
if (p2 == OMX_CORE_OUTPUT_PORT_INDEX) {
OMX_ERRORTYPE eRet = OMX_ErrorNone;
pThis->stream_off(OMX_CORE_OUTPUT_PORT_INDEX);
OMX_ERRORTYPE eRet1 = pThis->get_buffer_req(&pThis->drv_ctx.op_buf);
pThis->in_reconfig = false;
pThis->client_buffers.enable_color_conversion(pThis->c2d_enable_pending);
if (eRet != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("set_buffer_req failed eRet = %d",eRet);
pThis->omx_report_error();
break;
}
}
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL );
break;
case OMX_CommandPortEnable:
DEBUG_PRINT_HIGH("OMX_CommandPortEnable complete for port [%lu]", p2);
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,\
OMX_EventCmdComplete, p1, p2, NULL );
break;
default:
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete, p1, p2, NULL );
break;
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_ETB_ARBITRARY:
if (pThis->empty_this_buffer_proxy_arbitrary((OMX_HANDLETYPE)p1,\
(OMX_BUFFERHEADERTYPE *)(intptr_t)p2) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy_arbitrary failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_ETB: {
OMX_ERRORTYPE iret;
iret = pThis->empty_this_buffer_proxy((OMX_HANDLETYPE)p1, (OMX_BUFFERHEADERTYPE *)p2);
if (iret == OMX_ErrorInsufficientResources) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy failure due to HW overload");
pThis->omx_report_hw_overload ();
} else if (iret != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_this_buffer_proxy failure");
pThis->omx_report_error ();
}
}
break;
case OMX_COMPONENT_GENERATE_FTB:
if ( pThis->fill_this_buffer_proxy((OMX_HANDLETYPE)(intptr_t)p1,\
(OMX_BUFFERHEADERTYPE *)(intptr_t)p2) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("fill_this_buffer_proxy failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_COMMAND:
pThis->send_command_proxy(&pThis->m_cmp,(OMX_COMMANDTYPE)p1,\
(OMX_U32)p2,(OMX_PTR)NULL);
break;
case OMX_COMPONENT_GENERATE_EBD:
if (p2 != VDEC_S_SUCCESS && p2 != VDEC_S_INPUT_BITSTREAM_ERR) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EBD failure");
pThis->omx_report_error ();
} else {
if (p2 == VDEC_S_INPUT_BITSTREAM_ERR && p1) {
pThis->time_stamp_dts.remove_time_stamp(
((OMX_BUFFERHEADERTYPE *)(intptr_t)p1)->nTimeStamp,
(pThis->drv_ctx.interlace != VDEC_InterlaceFrameProgressive)
?true:false);
}
if ( pThis->empty_buffer_done(&pThis->m_cmp,
(OMX_BUFFERHEADERTYPE *)(intptr_t)p1) != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("empty_buffer_done failure");
pThis->omx_report_error ();
}
}
break;
case OMX_COMPONENT_GENERATE_INFO_FIELD_DROPPED: {
int64_t *timestamp = (int64_t *)(intptr_t)p1;
if (p1) {
pThis->time_stamp_dts.remove_time_stamp(*timestamp,
(pThis->drv_ctx.interlace != VDEC_InterlaceFrameProgressive)
?true:false);
free(timestamp);
}
}
break;
case OMX_COMPONENT_GENERATE_FBD:
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_FBD failure");
pThis->omx_report_error ();
} else if ( pThis->fill_buffer_done(&pThis->m_cmp,
(OMX_BUFFERHEADERTYPE *)(intptr_t)p1) != OMX_ErrorNone ) {
DEBUG_PRINT_ERROR("fill_buffer_done failure");
pThis->omx_report_error ();
}
break;
case OMX_COMPONENT_GENERATE_EVENT_INPUT_FLUSH:
DEBUG_PRINT_HIGH("Driver flush i/p Port complete, flags %#llx",
(unsigned long long)pThis->m_flags);
if (!pThis->input_flush_progress) {
DEBUG_PRINT_HIGH("WARNING: Unexpected flush from driver");
} else {
pThis->execute_input_flush();
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EVENT_INPUT_FLUSH failure");
pThis->omx_report_error ();
} else {
/*Check if we need generate event for Flush done*/
pThis->notify_flush_done(ctxt);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_IDLE_PENDING)) {
if (pThis->stream_off(OMX_CORE_INPUT_PORT_INDEX)) {
DEBUG_PRINT_ERROR("Failed to call streamoff on OUTPUT Port");
pThis->omx_report_error ();
} else {
pThis->streaming[OUTPUT_PORT] = false;
}
if (!pThis->output_flush_progress) {
DEBUG_PRINT_LOW("Input flush done hence issue stop");
pThis->post_event ((unsigned int)NULL, VDEC_S_SUCCESS,\
OMX_COMPONENT_GENERATE_STOP_DONE);
}
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
}
break;
case OMX_COMPONENT_GENERATE_EVENT_OUTPUT_FLUSH:
DEBUG_PRINT_HIGH("Driver flush o/p Port complete, flags %#llx",
(unsigned long long)pThis->m_flags);
if (!pThis->output_flush_progress) {
DEBUG_PRINT_HIGH("WARNING: Unexpected flush from driver");
} else {
pThis->execute_output_flush();
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_EVENT_OUTPUT_FLUSH failed");
pThis->omx_report_error ();
} else {
/*Check if we need generate event for Flush done*/
pThis->notify_flush_done(ctxt);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING)) {
DEBUG_PRINT_LOW("Internal flush complete");
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_OUTPUT_FLUSH_IN_DISABLE_PENDING);
if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED)) {
pThis->post_event(OMX_CommandPortDisable,
OMX_CORE_OUTPUT_PORT_INDEX,
OMX_COMPONENT_GENERATE_EVENT);
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_DISABLE_OUTPUT_DEFERRED);
BITMASK_CLEAR (&pThis->m_flags,
OMX_COMPONENT_OUTPUT_DISABLE_PENDING);
}
}
if (BITMASK_PRESENT(&pThis->m_flags ,OMX_COMPONENT_IDLE_PENDING)) {
if (pThis->stream_off(OMX_CORE_OUTPUT_PORT_INDEX)) {
DEBUG_PRINT_ERROR("Failed to call streamoff on CAPTURE Port");
pThis->omx_report_error ();
break;
}
pThis->streaming[CAPTURE_PORT] = false;
if (!pThis->input_flush_progress) {
DEBUG_PRINT_LOW("Output flush done hence issue stop");
pThis->post_event ((unsigned int)NULL, VDEC_S_SUCCESS,\
OMX_COMPONENT_GENERATE_STOP_DONE);
}
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
}
break;
case OMX_COMPONENT_GENERATE_START_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_START_DONE, flags %#llx",
(unsigned long long)pThis->m_flags);
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_START_DONE Failure");
pThis->omx_report_error ();
} else {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_START_DONE Success");
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_EXECUTE_PENDING)) {
DEBUG_PRINT_LOW("Move to executing");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_EXECUTE_PENDING);
pThis->m_state = OMX_StateExecuting;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateExecuting, NULL);
} else if (BITMASK_PRESENT(&pThis->m_flags,
OMX_COMPONENT_PAUSE_PENDING)) {
if (/*ioctl (pThis->drv_ctx.video_driver_fd,
VDEC_IOCTL_CMD_PAUSE,NULL ) < */0) {
DEBUG_PRINT_ERROR("VDEC_IOCTL_CMD_PAUSE failed");
pThis->omx_report_error ();
}
}
}
} else {
DEBUG_PRINT_LOW("Event Handler callback is NULL");
}
break;
case OMX_COMPONENT_GENERATE_PAUSE_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_PAUSE_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_PAUSE_DONE ret failed");
pThis->omx_report_error ();
} else {
pThis->complete_pending_buffer_done_cbs();
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_PAUSE_PENDING)) {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_PAUSE_DONE nofity");
//Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_PAUSE_PENDING);
pThis->m_state = OMX_StatePause;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StatePause, NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_RESUME_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_RESUME_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_RESUME_DONE failed");
pThis->omx_report_error ();
} else {
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_EXECUTE_PENDING)) {
DEBUG_PRINT_LOW("Moving the decoder to execute state");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_EXECUTE_PENDING);
pThis->m_state = OMX_StateExecuting;
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateExecuting,NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_STOP_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_STOP_DONE");
if (pThis->m_cb.EventHandler) {
if (p2 != VDEC_S_SUCCESS) {
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_STOP_DONE ret failed");
pThis->omx_report_error ();
} else {
pThis->complete_pending_buffer_done_cbs();
if (BITMASK_PRESENT(&pThis->m_flags,OMX_COMPONENT_IDLE_PENDING)) {
DEBUG_PRINT_LOW("OMX_COMPONENT_GENERATE_STOP_DONE Success");
// Send the callback now
BITMASK_CLEAR((&pThis->m_flags),OMX_COMPONENT_IDLE_PENDING);
pThis->m_state = OMX_StateIdle;
DEBUG_PRINT_LOW("Move to Idle State");
pThis->m_cb.EventHandler(&pThis->m_cmp,pThis->m_app_data,
OMX_EventCmdComplete,OMX_CommandStateSet,
OMX_StateIdle,NULL);
}
}
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_PORT_RECONFIG:
if (p2 == OMX_IndexParamPortDefinition) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_IndexParamPortDefinition");
pThis->in_reconfig = true;
pThis->prev_n_filled_len = 0;
} else if (p2 == OMX_IndexConfigCommonOutputCrop) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_IndexConfigCommonOutputCrop");
/* Check if resolution is changed in smooth streaming mode */
if (pThis->m_smoothstreaming_mode &&
(pThis->framesize.nWidth !=
pThis->drv_ctx.video_resolution.frame_width) ||
(pThis->framesize.nHeight !=
pThis->drv_ctx.video_resolution.frame_height)) {
DEBUG_PRINT_HIGH("Resolution changed from: wxh = %dx%d to: wxh = %dx%d",
pThis->framesize.nWidth,
pThis->framesize.nHeight,
pThis->drv_ctx.video_resolution.frame_width,
pThis->drv_ctx.video_resolution.frame_height);
/* Update new resolution */
pThis->framesize.nWidth =
pThis->drv_ctx.video_resolution.frame_width;
pThis->framesize.nHeight =
pThis->drv_ctx.video_resolution.frame_height;
/* Update C2D with new resolution */
if (!pThis->client_buffers.update_buffer_req()) {
DEBUG_PRINT_ERROR("Setting C2D buffer requirements failed");
}
}
/* Update new crop information */
pThis->rectangle.nLeft = pThis->drv_ctx.frame_size.left;
pThis->rectangle.nTop = pThis->drv_ctx.frame_size.top;
pThis->rectangle.nWidth = pThis->drv_ctx.frame_size.right;
pThis->rectangle.nHeight = pThis->drv_ctx.frame_size.bottom;
/* Validate the new crop information */
if (pThis->rectangle.nLeft + pThis->rectangle.nWidth >
pThis->drv_ctx.video_resolution.frame_width) {
DEBUG_PRINT_HIGH("Crop L[%u] + R[%u] > W[%u]",
pThis->rectangle.nLeft, pThis->rectangle.nWidth,
pThis->drv_ctx.video_resolution.frame_width);
pThis->rectangle.nLeft = 0;
if (pThis->rectangle.nWidth >
pThis->drv_ctx.video_resolution.frame_width) {
DEBUG_PRINT_HIGH("Crop R[%u] > W[%u]",
pThis->rectangle.nWidth,
pThis->drv_ctx.video_resolution.frame_width);
pThis->rectangle.nWidth =
pThis->drv_ctx.video_resolution.frame_width;
}
}
if (pThis->rectangle.nTop + pThis->rectangle.nHeight >
pThis->drv_ctx.video_resolution.frame_height) {
DEBUG_PRINT_HIGH("Crop T[%u] + B[%u] > H[%u]",
pThis->rectangle.nTop, pThis->rectangle.nHeight,
pThis->drv_ctx.video_resolution.frame_height);
pThis->rectangle.nTop = 0;
if (pThis->rectangle.nHeight >
pThis->drv_ctx.video_resolution.frame_height) {
DEBUG_PRINT_HIGH("Crop B[%u] > H[%u]",
pThis->rectangle.nHeight,
pThis->drv_ctx.video_resolution.frame_height);
pThis->rectangle.nHeight =
pThis->drv_ctx.video_resolution.frame_height;
}
}
DEBUG_PRINT_HIGH("Updated Crop Info: L: %u, T: %u, R: %u, B: %u",
pThis->rectangle.nLeft, pThis->rectangle.nTop,
pThis->rectangle.nWidth, pThis->rectangle.nHeight);
} else if (p2 == OMX_QTIIndexConfigDescribeColorAspects) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_QTIIndexConfigDescribeColorAspects");
} else if (p2 == OMX_QTIIndexConfigDescribeHDRColorInfo) {
DEBUG_PRINT_HIGH("Rxd PORT_RECONFIG: OMX_QTIIndexConfigDescribeHDRcolorinfo");
} else {
DEBUG_PRINT_ERROR("Rxd Invalid PORT_RECONFIG event (%lu)", p2);
break;
}
if (pThis->m_debug.outfile) {
fclose(pThis->m_debug.outfile);
pThis->m_debug.outfile = NULL;
}
if (pThis->m_debug.ccoutfile) {
fclose(pThis->m_debug.ccoutfile);
pThis->m_debug.ccoutfile = NULL;
}
if (pThis->m_debug.out_ymeta_file) {
fclose(pThis->m_debug.out_ymeta_file);
pThis->m_debug.out_ymeta_file = NULL;
}
if (pThis->m_debug.out_uvmeta_file) {
fclose(pThis->m_debug.out_uvmeta_file);
pThis->m_debug.out_uvmeta_file = NULL;
}
pThis->m_debug.seq_count++;
if (pThis->m_cb.EventHandler) {
void *frame_data = NULL;
reconfig_client_data port_data;
reconfig_client_crop_data crop_data;
if (p2 == OMX_IndexConfigCommonOutputCrop) {
crop_data.width = pThis->rectangle.nWidth;
crop_data.height = pThis->rectangle.nHeight;
crop_data.left = pThis->rectangle.nLeft;
crop_data.top = pThis->rectangle.nTop;
crop_data.isPortReconfigInsufficient = pThis->isPortReconfigInsufficient;
frame_data = (void*)&crop_data;
} else if (p2 == OMX_IndexParamPortDefinition){
port_data.width = pThis->m_reconfig_width;
port_data.height = pThis->m_reconfig_height;
port_data.dpb_bit_depth = pThis->dpb_bit_depth;
port_data.m_progressive = pThis->m_progressive;
port_data.isPortReconfigInsufficient = pThis->isPortReconfigInsufficient;
frame_data = (void*)&port_data;
}
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data,
OMX_EventPortSettingsChanged, p1, p2, (void*)frame_data);
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
break;
case OMX_COMPONENT_GENERATE_EOS_DONE:
DEBUG_PRINT_HIGH("Rxd OMX_COMPONENT_GENERATE_EOS_DONE");
if (pThis->m_cb.EventHandler) {
pThis->m_cb.EventHandler(&pThis->m_cmp, pThis->m_app_data, OMX_EventBufferFlag,
OMX_CORE_OUTPUT_PORT_INDEX, OMX_BUFFERFLAG_EOS, NULL );
} else {
DEBUG_PRINT_ERROR("ERROR: %s()::EventHandler is NULL", __func__);
}
pThis->prev_ts = LLONG_MAX;
pThis->rst_prev_ts = true;
break;
case OMX_COMPONENT_GENERATE_HARDWARE_ERROR:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_HARDWARE_ERROR");
pThis->omx_report_error();
break;
case OMX_COMPONENT_GENERATE_UNSUPPORTED_SETTING:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_UNSUPPORTED_SETTING");
pThis->omx_report_unsupported_setting();
break;
case OMX_COMPONENT_GENERATE_HARDWARE_OVERLOAD:
DEBUG_PRINT_ERROR("OMX_COMPONENT_GENERATE_HARDWARE_OVERLOAD");
pThis->omx_report_hw_overload();
break;
case OMX_COMPONENT_GENERATE_ION_PREFETCH_PIXEL:
DEBUG_PRINT_HIGH("OMX_COMPONENT_GENERATE_ION_PREFETCH_PIXEL");
pThis->m_prefetch_done |= pThis->prefetch_buffers(p1, p2, ION_IOC_PREFETCH, ION_FLAG_CP_PIXEL);
break;
case OMX_COMPONENT_GENERATE_ION_PREFETCH_NON_PIXEL:
DEBUG_PRINT_HIGH("OMX_COMPONENT_GENERATE_ION_PREFETCH_NON_PIXEL");
pThis->m_prefetch_done |= pThis->prefetch_buffers(p1, p2, ION_IOC_PREFETCH, ION_FLAG_CP_NON_PIXEL) << 1;
break;
default:
break;
}
}
pthread_mutex_lock(&pThis->m_lock);
qsize = pThis->m_cmd_q.m_size;
if (pThis->m_state != OMX_StatePause)
qsize += (pThis->m_ftb_q.m_size + pThis->m_etb_q.m_size);
pthread_mutex_unlock(&pThis->m_lock);
} while (qsize>0);
}
int omx_vdec::update_resolution(int width, int height, int stride, int scan_lines)
{
int format_changed = 0;
if ((height != (int)drv_ctx.video_resolution.frame_height) ||
(width != (int)drv_ctx.video_resolution.frame_width)) {
DEBUG_PRINT_HIGH("NOTE_CIF: W/H %d (%d), %d (%d)",
width, drv_ctx.video_resolution.frame_width,
height,drv_ctx.video_resolution.frame_height);
format_changed = 1;
}
drv_ctx.video_resolution.frame_height = height;
drv_ctx.video_resolution.frame_width = width;
drv_ctx.video_resolution.scan_lines = scan_lines;
drv_ctx.video_resolution.stride = stride;
if (!is_down_scalar_enabled) {
rectangle.nLeft = m_extradata_info.output_crop_rect.nLeft;
rectangle.nTop = m_extradata_info.output_crop_rect.nTop;
rectangle.nWidth = m_extradata_info.output_crop_rect.nWidth;
rectangle.nHeight = m_extradata_info.output_crop_rect.nHeight;
}
return format_changed;
}
int omx_vdec::log_input_buffers(const char *buffer_addr, int buffer_len, uint64_t timeStamp, int fd)
{
if (!m_debug.in_buffer_log)
return 0;
#ifdef USE_ION
do_cache_operations(fd);
#else
(void)fd;
#endif
if (m_debug.in_buffer_log && !m_debug.infile) {
if(!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg2", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p_%" PRId64 ".mpg", m_debug.log_loc,
drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this, m_debug.session_id);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.avc", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.mvc", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.264",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.hevc", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.265",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.ivf",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else if(!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.ivf",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
} else {
snprintf(m_debug.infile_name, OMX_MAX_STRINGNAME_SIZE, "%s/input_dec_%d_%d_%p.bin",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
}
m_debug.infile = fopen (m_debug.infile_name, "ab");
if (!m_debug.infile) {
DEBUG_PRINT_HIGH("Failed to open input file: %s for logging (%d:%s)",
m_debug.infile_name, errno, strerror(errno));
m_debug.infile_name[0] = '\0';
#ifdef USE_ION
do_cache_operations(fd);
#endif
return -1;
}
if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
bool isVp9 = drv_ctx.decoder_format == VDEC_CODECTYPE_VP9;
int width = drv_ctx.video_resolution.frame_width;
int height = drv_ctx.video_resolution.frame_height;
int fps = drv_ctx.frame_rate.fps_numerator;
IvfFileHeader ivfHeader(isVp9, width, height, 1, fps, 0);
fwrite((const char *)&ivfHeader,
sizeof(ivfHeader),1,m_debug.infile);
}
}
if (m_debug.infile && buffer_addr && buffer_len) {
if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", OMX_MAX_STRINGNAME_SIZE) ||
!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", OMX_MAX_STRINGNAME_SIZE)) {
IvfFrameHeader ivfFrameHeader(buffer_len, timeStamp);
fwrite(&ivfFrameHeader, sizeof(ivfFrameHeader), 1, m_debug.infile);
}
fwrite(buffer_addr, buffer_len, 1, m_debug.infile);
}
#ifdef USE_ION
do_cache_operations(fd);
#endif
return 0;
}
int omx_vdec::log_cc_output_buffers(OMX_BUFFERHEADERTYPE *buffer) {
if (client_buffers.client_buffers_invalid() ||
!m_debug.out_cc_buffer_log || !buffer || !buffer->nFilledLen)
return 0;
if (m_debug.out_cc_buffer_log && !m_debug.ccoutfile) {
snprintf(m_debug.ccoutfile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_cc_%d_%d_%p_%" PRId64 "_%d.yuv",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this,
m_debug.session_id, m_debug.seq_count);
m_debug.ccoutfile = fopen (m_debug.ccoutfile_name, "ab");
if (!m_debug.ccoutfile) {
DEBUG_PRINT_HIGH("Failed to open output file: %s for logging", m_debug.log_loc);
m_debug.ccoutfile_name[0] = '\0';
return -1;
}
DEBUG_PRINT_HIGH("Opened CC output file: %s for logging", m_debug.ccoutfile_name);
}
fwrite(buffer->pBuffer, buffer->nFilledLen, 1, m_debug.ccoutfile);
return 0;
}
int omx_vdec::log_output_buffers(OMX_BUFFERHEADERTYPE *buffer) {
int buf_index = 0;
char *temp = NULL;
char *bufaddr = NULL;
if (!(m_debug.out_buffer_log || m_debug.out_meta_buffer_log) || !buffer || !buffer->nFilledLen)
return 0;
if (m_debug.out_buffer_log && !m_debug.outfile) {
snprintf(m_debug.outfile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p_%" PRId64 "_%d.yuv",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this,
m_debug.session_id, m_debug.seq_count);
m_debug.outfile = fopen (m_debug.outfile_name, "ab");
if (!m_debug.outfile) {
DEBUG_PRINT_HIGH("Failed to open output file: %s for logging", m_debug.log_loc);
m_debug.outfile_name[0] = '\0';
return -1;
}
DEBUG_PRINT_HIGH("Opened output file: %s for logging", m_debug.outfile_name);
}
if (m_debug.out_meta_buffer_log && !m_debug.out_ymeta_file && !m_debug.out_uvmeta_file) {
snprintf(m_debug.out_ymetafile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p.ymeta",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
snprintf(m_debug.out_uvmetafile_name, OMX_MAX_STRINGNAME_SIZE, "%s/output_%d_%d_%p.uvmeta",
m_debug.log_loc, drv_ctx.video_resolution.frame_width, drv_ctx.video_resolution.frame_height, this);
m_debug.out_ymeta_file = fopen (m_debug.out_ymetafile_name, "ab");
m_debug.out_uvmeta_file = fopen (m_debug.out_uvmetafile_name, "ab");
if (!m_debug.out_ymeta_file || !m_debug.out_uvmeta_file) {
DEBUG_PRINT_HIGH("Failed to open output y/uv meta file: %s for logging", m_debug.log_loc);
m_debug.out_ymetafile_name[0] = '\0';
m_debug.out_uvmetafile_name[0] = '\0';
return -1;
}
}
buf_index = buffer - m_out_mem_ptr;
bufaddr = (char *)drv_ctx.ptr_outputbuffer[buf_index].bufferaddr;
if (dynamic_buf_mode && !secure_mode) {
bufaddr = ion_map(drv_ctx.ptr_outputbuffer[buf_index].pmem_fd,
drv_ctx.ptr_outputbuffer[buf_index].buffer_len);
//mmap returns (void *)-1 on failure and sets error code in errno.
if (bufaddr == MAP_FAILED) {
DEBUG_PRINT_ERROR("mmap failed - errno: %d", errno);
return -1;
}
}
temp = bufaddr;
if (drv_ctx.output_format == VDEC_YUV_FORMAT_NV12_UBWC ||
drv_ctx.output_format == VDEC_YUV_FORMAT_NV12_TP10_UBWC) {
DEBUG_PRINT_HIGH("Logging UBWC yuv width/height(%u/%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height);
if (m_debug.outfile)
fwrite(temp, buffer->nFilledLen, 1, m_debug.outfile);
if (m_debug.out_ymeta_file && m_debug.out_uvmeta_file) {
unsigned int width = 0, height = 0;
unsigned int y_plane, y_meta_plane;
int y_stride = 0, y_sclines = 0;
int y_meta_stride = 0, y_meta_scanlines = 0, uv_meta_stride = 0, uv_meta_scanlines = 0;
int color_fmt = (drv_ctx.output_format== VDEC_YUV_FORMAT_NV12_UBWC)? COLOR_FMT_NV12_UBWC: COLOR_FMT_NV12_BPP10_UBWC;
int i;
int bytes_written = 0;
width = drv_ctx.video_resolution.frame_width;
height = drv_ctx.video_resolution.frame_height;
y_meta_stride = VENUS_Y_META_STRIDE(color_fmt, width);
y_meta_scanlines = VENUS_Y_META_SCANLINES(color_fmt, height);
y_stride = VENUS_Y_STRIDE(color_fmt, width);
y_sclines = VENUS_Y_SCANLINES(color_fmt, height);
uv_meta_stride = VENUS_UV_META_STRIDE(color_fmt, width);
uv_meta_scanlines = VENUS_UV_META_SCANLINES(color_fmt, height);
y_meta_plane = MSM_MEDIA_ALIGN(y_meta_stride * y_meta_scanlines, 4096);
y_plane = MSM_MEDIA_ALIGN(y_stride * y_sclines, 4096);
for (i = 0; i < y_meta_scanlines; i++) {
bytes_written = fwrite(temp, y_meta_stride, 1, m_debug.out_ymeta_file);
temp += y_meta_stride;
}
temp = bufaddr + y_meta_plane + y_plane;
for(i = 0; i < uv_meta_scanlines; i++) {
bytes_written += fwrite(temp, uv_meta_stride, 1, m_debug.out_uvmeta_file);
temp += uv_meta_stride;
}
}
} else if (m_debug.outfile && drv_ctx.output_format == VDEC_YUV_FORMAT_NV12) {
int stride = drv_ctx.video_resolution.stride;
int scanlines = drv_ctx.video_resolution.scan_lines;
if (m_smoothstreaming_mode) {
stride = drv_ctx.video_resolution.frame_width;
scanlines = drv_ctx.video_resolution.frame_height;
stride = (stride + DEFAULT_WIDTH_ALIGNMENT - 1) & (~(DEFAULT_WIDTH_ALIGNMENT - 1));
scanlines = (scanlines + DEFAULT_HEIGHT_ALIGNMENT - 1) & (~(DEFAULT_HEIGHT_ALIGNMENT - 1));
}
unsigned i;
DEBUG_PRINT_HIGH("Logging width/height(%u/%u) stride/scanlines(%u/%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height, stride, scanlines);
int bytes_written = 0;
for (i = 0; i < drv_ctx.video_resolution.frame_height; i++) {
bytes_written = fwrite(temp, drv_ctx.video_resolution.frame_width, 1, m_debug.outfile);
temp += stride;
}
temp = bufaddr + stride * scanlines;
int stride_c = stride;
for(i = 0; i < drv_ctx.video_resolution.frame_height/2; i++) {
bytes_written += fwrite(temp, drv_ctx.video_resolution.frame_width, 1, m_debug.outfile);
temp += stride_c;
}
} else if (m_debug.outfile && drv_ctx.output_format == VDEC_YUV_FORMAT_P010_VENUS) {
int stride = drv_ctx.video_resolution.stride;
int scanlines = drv_ctx.video_resolution.scan_lines;
if (m_smoothstreaming_mode) {
stride = drv_ctx.video_resolution.frame_width * 2;
scanlines = drv_ctx.video_resolution.frame_height;
stride = (stride + DEFAULT_WIDTH_ALIGNMENT - 1) & (~(DEFAULT_WIDTH_ALIGNMENT - 1));
scanlines = (scanlines + DEFAULT_HEIGHT_ALIGNMENT - 1) & (~(DEFAULT_HEIGHT_ALIGNMENT - 1));
}
unsigned i;
DEBUG_PRINT_HIGH("Logging width/height(%u/%u) stride/scanlines(%u/%u)",
drv_ctx.video_resolution.frame_width,
drv_ctx.video_resolution.frame_height, stride, scanlines);
int bytes_written = 0;
for (i = 0; i < drv_ctx.video_resolution.frame_height; i++) {
bytes_written = fwrite(temp, drv_ctx.video_resolution.frame_width, 2, m_debug.outfile);
temp += stride;
}
temp = bufaddr + stride * scanlines;
int stride_c = stride;
for(i = 0; i < drv_ctx.video_resolution.frame_height/2; i++) {
bytes_written += fwrite(temp, drv_ctx.video_resolution.frame_width, 2, m_debug.outfile);
temp += stride_c;
}
}
if (dynamic_buf_mode && !secure_mode) {
ion_unmap(drv_ctx.ptr_outputbuffer[buf_index].pmem_fd, bufaddr,
drv_ctx.ptr_outputbuffer[buf_index].buffer_len);
}
return 0;
}
void omx_vdec::init_color_aspects_map()
{
mPrimariesMap.insert({
{ColorAspects::PrimariesUnspecified, (ColorPrimaries)(2)},
{ColorAspects::PrimariesBT709_5, ColorPrimaries_BT709_5},
{ColorAspects::PrimariesBT470_6M, ColorPrimaries_BT470_6M},
{ColorAspects::PrimariesBT601_6_625, ColorPrimaries_BT601_6_625},
{ColorAspects::PrimariesBT601_6_525, ColorPrimaries_BT601_6_525},
{ColorAspects::PrimariesGenericFilm, ColorPrimaries_GenericFilm},
{ColorAspects::PrimariesBT2020, ColorPrimaries_BT2020},
});
mTransferMap.insert({
{ColorAspects::TransferUnspecified, (GammaTransfer)(2)},
{ColorAspects::TransferLinear, Transfer_Linear},
{ColorAspects::TransferSRGB, Transfer_sRGB},
{ColorAspects::TransferSMPTE170M, Transfer_SMPTE_170M},
{ColorAspects::TransferGamma22, Transfer_Gamma2_2},
{ColorAspects::TransferGamma28, Transfer_Gamma2_8},
{ColorAspects::TransferST2084, Transfer_SMPTE_ST2084},
{ColorAspects::TransferHLG, Transfer_HLG},
{ColorAspects::TransferSMPTE240M, Transfer_SMPTE_240M},
{ColorAspects::TransferXvYCC, Transfer_XvYCC},
{ColorAspects::TransferBT1361, Transfer_BT1361},
{ColorAspects::TransferST428, Transfer_ST_428},
});
mMatrixCoeffMap.insert({
{ColorAspects::MatrixUnspecified, (MatrixCoEfficients)(2)},
{ColorAspects::MatrixBT709_5, MatrixCoEff_BT709_5},
{ColorAspects::MatrixBT470_6M, MatrixCoeff_FCC_73_682},
{ColorAspects::MatrixBT601_6, MatrixCoEff_BT601_6_625},
{ColorAspects::MatrixSMPTE240M, MatrixCoEff_SMPTE240M},
{ColorAspects::MatrixBT2020, MatrixCoEff_BT2020},
{ColorAspects::MatrixBT2020Constant, MatrixCoEff_BT2020Constant},
});
mColorRangeMap.insert({
{ColorAspects::RangeUnspecified, (ColorRange)(2)},
{ColorAspects::RangeFull, Range_Full},
{ColorAspects::RangeLimited, Range_Limited},
});
}
/* ======================================================================
FUNCTION
omx_vdec::ComponentInit
DESCRIPTION
Initialize the component.
PARAMETERS
ctxt -- Context information related to the self.
id -- Event identifier. This could be any of the following:
1. Command completion event
2. Buffer done callback event
3. Frame done callback event
RETURN VALUE
None.
========================================================================== */
OMX_ERRORTYPE omx_vdec::component_init(OMX_STRING role)
{
OMX_ERRORTYPE eRet = OMX_ErrorNone;
struct v4l2_fmtdesc fdesc;
struct v4l2_format fmt;
struct v4l2_requestbuffers bufreq;
struct v4l2_control control;
struct v4l2_frmsizeenum frmsize;
struct v4l2_queryctrl query;
unsigned int alignment = 0,buffer_size = 0;
int fds[2];
int r,ret=0;
bool codec_ambiguous = false;
OMX_STRING device_name = (OMX_STRING)"/dev/video32";
char property_value[PROPERTY_VALUE_MAX] = {0};
FILE *soc_file = NULL;
char buffer[10];
struct v4l2_ext_control ctrl[2];
struct v4l2_ext_controls controls;
int conceal_color_8bit = 0, conceal_color_10bit = 0;
property_get("ro.board.platform", m_platform_name, "0");
#ifdef _ANDROID_
if (!strncmp(m_platform_name, "msm8610", 7)) {
device_name = (OMX_STRING)"/dev/video/q6_dec";
is_q6_platform = true;
maxSmoothStreamingWidth = 1280;
maxSmoothStreamingHeight = 720;
}
#endif
if (!strncmp(role, "OMX.qcom.video.decoder.avc.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
role = (OMX_STRING)"OMX.qcom.video.decoder.avc";
} else if (!strncmp(role, "OMX.qcom.video.decoder.mpeg2.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
role = (OMX_STRING)"OMX.qcom.video.decoder.mpeg2";
} else if (!strncmp(role, "OMX.qcom.video.decoder.hevc.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
role = (OMX_STRING)"OMX.qcom.video.decoder.hevc";
} else if (!strncmp(role, "OMX.qcom.video.decoder.vp9.secure",
OMX_MAX_STRINGNAME_SIZE)) {
secure_mode = true;
role = (OMX_STRING)"OMX.qcom.video.decoder.vp9";
}
#ifdef HYPERVISOR
drv_ctx.video_driver_fd = hypv_open(device_name, O_RDWR);
#else
drv_ctx.video_driver_fd = open(device_name, O_RDWR);
#endif
DEBUG_PRINT_INFO("component_init: %s : fd=%d", role, drv_ctx.video_driver_fd);
if (drv_ctx.video_driver_fd < 0) {
DEBUG_PRINT_ERROR("Omx_vdec::Comp Init Returning failure, errno %d", errno);
return OMX_ErrorInsufficientResources;
}
drv_ctx.frame_rate.fps_numerator = DEFAULT_FPS;
drv_ctx.frame_rate.fps_denominator = 1;
operating_frame_rate = DEFAULT_FPS;
m_poll_efd = eventfd(0, 0);
if (m_poll_efd < 0) {
DEBUG_PRINT_ERROR("Failed to create event fd(%s)", strerror(errno));
return OMX_ErrorInsufficientResources;
}
ret = subscribe_to_events(drv_ctx.video_driver_fd);
if (!ret) {
async_thread_created = true;
ret = pthread_create(&async_thread_id,0,async_message_thread,this);
}
if (ret) {
DEBUG_PRINT_ERROR("Failed to create async_message_thread");
async_thread_created = false;
return OMX_ErrorInsufficientResources;
}
#ifdef OUTPUT_EXTRADATA_LOG
outputExtradataFile = fopen (output_extradata_filename, "ab");
#endif
// Copy the role information which provides the decoder kind
strlcpy(drv_ctx.kind,role,128);
if (!strncmp(drv_ctx.kind,"OMX.qcom.video.decoder.mpeg2",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.mpeg2",\
OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_MPEG2;
output_capability = V4L2_PIX_FMT_MPEG2;
eCompressionFormat = OMX_VIDEO_CodingMPEG2;
/*Initialize Start Code for MPEG2*/
codec_type_parse = CODEC_TYPE_MPEG2;
m_frame_parser.init_start_codes(codec_type_parse);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.avc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.avc",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_H264;
output_capability=V4L2_PIX_FMT_H264;
eCompressionFormat = OMX_VIDEO_CodingAVC;
codec_type_parse = CODEC_TYPE_H264;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.mvc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.mvc", OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_MVC;
output_capability = V4L2_PIX_FMT_H264_MVC;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingMVC;
codec_type_parse = CODEC_TYPE_H264;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.hevc",\
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.hevc",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_HEVC;
output_capability = V4L2_PIX_FMT_HEVC;
eCompressionFormat = (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingHevc;
codec_type_parse = CODEC_TYPE_HEVC;
m_frame_parser.init_start_codes(codec_type_parse);
m_frame_parser.init_nal_length(nal_length);
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp8", \
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vp8",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VP8;
output_capability = V4L2_PIX_FMT_VP8;
eCompressionFormat = OMX_VIDEO_CodingVP8;
codec_type_parse = CODEC_TYPE_VP8;
} else if (!strncmp(drv_ctx.kind, "OMX.qcom.video.decoder.vp9", \
OMX_MAX_STRINGNAME_SIZE)) {
strlcpy((char *)m_cRole, "video_decoder.vp9",OMX_MAX_STRINGNAME_SIZE);
drv_ctx.decoder_format = VDEC_CODECTYPE_VP9;
output_capability = V4L2_PIX_FMT_VP9;
eCompressionFormat = OMX_VIDEO_CodingVP9;
codec_type_parse = CODEC_TYPE_VP9;
} else {
DEBUG_PRINT_ERROR("ERROR:Unknown Component");
eRet = OMX_ErrorInvalidComponentName;
}
m_progressive = MSM_VIDC_PIC_STRUCT_PROGRESSIVE;
if (eRet == OMX_ErrorNone) {
OMX_COLOR_FORMATTYPE dest_color_format;
if (m_disable_ubwc_mode) {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12;
} else {
drv_ctx.output_format = VDEC_YUV_FORMAT_NV12_UBWC;
}
if (eCompressionFormat == (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingMVC)
dest_color_format = (OMX_COLOR_FORMATTYPE)
QOMX_COLOR_FORMATYUV420PackedSemiPlanar32mMultiView;
else
dest_color_format = (OMX_COLOR_FORMATTYPE)
QOMX_COLOR_FORMATYUV420PackedSemiPlanar32m;
if (!client_buffers.set_color_format(dest_color_format)) {
DEBUG_PRINT_ERROR("Setting color format failed");
eRet = OMX_ErrorInsufficientResources;
}
dpb_bit_depth = MSM_VIDC_BIT_DEPTH_8;
is_flexible_format = FALSE;
is_mbaff = FALSE;
if (m_disable_ubwc_mode) {
capture_capability = V4L2_PIX_FMT_NV12;
} else {
capture_capability = V4L2_PIX_FMT_NV12_UBWC;
}
struct v4l2_capability cap;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_QUERYCAP, &cap);
if (ret) {
DEBUG_PRINT_ERROR("Failed to query capabilities");
/*TODO: How to handle this case */
} else {
DEBUG_PRINT_LOW("Capabilities: driver_name = %s, card = %s, bus_info = %s,"
" version = %d, capabilities = %x", cap.driver, cap.card,
cap.bus_info, cap.version, cap.capabilities);
}
ret=0;
fdesc.type=V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fdesc.index=0;
while (ioctl(drv_ctx.video_driver_fd, VIDIOC_ENUM_FMT, &fdesc) == 0) {
DEBUG_PRINT_HIGH("fmt: description: %s, fmt: %x, flags = %x", fdesc.description,
fdesc.pixelformat, fdesc.flags);
fdesc.index++;
}
fdesc.type=V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fdesc.index=0;
while (ioctl(drv_ctx.video_driver_fd, VIDIOC_ENUM_FMT, &fdesc) == 0) {
DEBUG_PRINT_HIGH("fmt: description: %s, fmt: %x, flags = %x", fdesc.description,
fdesc.pixelformat, fdesc.flags);
fdesc.index++;
}
m_extradata_info.output_crop_rect.nLeft = 0;
m_extradata_info.output_crop_rect.nTop = 0;
m_extradata_info.output_crop_rect.nWidth = 320;
m_extradata_info.output_crop_rect.nHeight = 240;
update_resolution(320, 240, 320, 240);
fmt.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
fmt.fmt.pix_mp.height = drv_ctx.video_resolution.frame_height;
fmt.fmt.pix_mp.width = drv_ctx.video_resolution.frame_width;
fmt.fmt.pix_mp.pixelformat = output_capability;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (ret) {
/*TODO: How to handle this case */
DEBUG_PRINT_ERROR("Failed to set format on output port");
return OMX_ErrorInsufficientResources;
}
DEBUG_PRINT_HIGH("Set Format was successful");
/*
* refer macro DEFAULT_CONCEAL_COLOR to set conceal color values
*/
Platform::Config::getInt32(Platform::vidc_dec_conceal_color_8bit, &conceal_color_8bit, DEFAULT_VIDEO_CONCEAL_COLOR_BLACK);
Platform::Config::getInt32(Platform::vidc_dec_conceal_color_10bit, &conceal_color_10bit, DEFAULT_VIDEO_CONCEAL_COLOR_BLACK);
memset(&controls, 0, sizeof(controls));
memset(ctrl, 0, sizeof(ctrl));
ctrl[0].id = V4L2_CID_MPEG_VIDC_VIDEO_CONCEAL_COLOR_8BIT;
ctrl[0].value = conceal_color_8bit;
ctrl[1].id = V4L2_CID_MPEG_VIDC_VIDEO_CONCEAL_COLOR_10BIT;
ctrl[1].value = conceal_color_10bit;
controls.count = 2;
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
controls.controls = ctrl;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_EXT_CTRLS, &controls);
if (ret) {
DEBUG_PRINT_ERROR("Failed to set conceal color %d\n", ret);
}
//Get the hardware capabilities
memset((void *)&frmsize,0,sizeof(frmsize));
frmsize.index = 0;
frmsize.pixel_format = output_capability;
ret = ioctl(drv_ctx.video_driver_fd,
VIDIOC_ENUM_FRAMESIZES, &frmsize);
if (ret || frmsize.type != V4L2_FRMSIZE_TYPE_STEPWISE) {
DEBUG_PRINT_ERROR("Failed to get framesizes");
return OMX_ErrorHardware;
}
if (frmsize.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
m_decoder_capability.min_width = frmsize.stepwise.min_width;
m_decoder_capability.max_width = frmsize.stepwise.max_width;
m_decoder_capability.min_height = frmsize.stepwise.min_height;
m_decoder_capability.max_height = frmsize.stepwise.max_height;
}
/* Based on UBWC enable, decide split mode to driver before calling S_FMT */
eRet = set_dpb(m_disable_ubwc_mode);
memset(&fmt, 0x0, sizeof(struct v4l2_format));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
fmt.fmt.pix_mp.height = drv_ctx.video_resolution.frame_height;
fmt.fmt.pix_mp.width = drv_ctx.video_resolution.frame_width;
fmt.fmt.pix_mp.pixelformat = capture_capability;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_FMT, &fmt);
if (ret) {
/*TODO: How to handle this case */
DEBUG_PRINT_ERROR("Failed to set format on capture port");
}
memset(&framesize, 0, sizeof(OMX_FRAMESIZETYPE));
framesize.nWidth = drv_ctx.video_resolution.frame_width;
framesize.nHeight = drv_ctx.video_resolution.frame_height;
memset(&rectangle, 0, sizeof(OMX_CONFIG_RECTTYPE));
rectangle.nWidth = drv_ctx.video_resolution.frame_width;
rectangle.nHeight = drv_ctx.video_resolution.frame_height;
DEBUG_PRINT_HIGH("Set Format was successful");
if (secure_mode) {
control.id = V4L2_CID_MPEG_VIDC_VIDEO_SECURE;
control.value = 1;
DEBUG_PRINT_LOW("Omx_vdec:: calling to open secure device %d", ret);
ret=ioctl(drv_ctx.video_driver_fd, VIDIOC_S_CTRL,&control);
if (ret) {
DEBUG_PRINT_ERROR("Omx_vdec:: Unable to open secure device %d", ret);
return OMX_ErrorInsufficientResources;
}
}
/*Get the Buffer requirements for input and output ports*/
drv_ctx.ip_buf.buffer_type = VDEC_BUFFER_TYPE_INPUT;
drv_ctx.op_buf.buffer_type = VDEC_BUFFER_TYPE_OUTPUT;
if (secure_mode) {
drv_ctx.op_buf.alignment = SECURE_ALIGN;
drv_ctx.ip_buf.alignment = SECURE_ALIGN;
} else {
drv_ctx.op_buf.alignment = SZ_4K;
drv_ctx.ip_buf.alignment = SZ_4K;
}
drv_ctx.interlace = VDEC_InterlaceFrameProgressive;
drv_ctx.extradata = 0;
drv_ctx.picture_order = VDEC_ORDER_DISPLAY;
control.id = V4L2_CID_MPEG_VIDC_VIDEO_OUTPUT_ORDER;
control.value = V4L2_MPEG_VIDC_VIDEO_OUTPUT_ORDER_DISPLAY;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_S_CTRL, &control);
drv_ctx.idr_only_decoding = 0;
#ifdef _ANDROID_
if (m_dec_hfr_fps) {
memset(&query, 0, sizeof(struct v4l2_queryctrl));
query.id = V4L2_CID_MPEG_VIDC_VIDEO_FRAME_RATE;
ret = ioctl(drv_ctx.video_driver_fd, VIDIOC_QUERYCTRL, &query);
if (!ret)
m_dec_hfr_fps = MIN(query.maximum, m_dec_hfr_fps);
DEBUG_PRINT_HIGH("Updated HFR fps value = %d", m_dec_hfr_fps);
}
#endif
m_state = OMX_StateLoaded;
unsigned long long extradata_mask = DEFAULT_EXTRADATA;
if (eCompressionFormat == (OMX_VIDEO_CODINGTYPE)QOMX_VIDEO_CodingHevc) {
extradata_mask |= OMX_HDR_COLOR_INFO_EXTRADATA | OMX_EXTNUSER_EXTRADATA;
}
enable_extradata(extradata_mask, true, true);
eRet = get_buffer_req(&drv_ctx.ip_buf);
DEBUG_PRINT_HIGH("Input Buffer Size =%u",(unsigned int)drv_ctx.ip_buf.buffer_size);
get_buffer_req(&drv_ctx.op_buf);
if (drv_ctx.decoder_format == VDEC_CODECTYPE_H264 ||
drv_ctx.decoder_format == VDEC_CODECTYPE_HEVC ||
drv_ctx.decoder_format == VDEC_CODECTYPE_MVC) {
h264_scratch.nAllocLen = drv_ctx.ip_buf.buffer_size;
h264_scratch.pBuffer = (OMX_U8 *)malloc (drv_ctx.ip_buf.buffer_size);
h264_scratch.nFilledLen = 0;
h264_scratch.nOffset = 0;
if (h264_scratch.pBuffer == NULL) {
DEBUG_PRINT_ERROR("h264_scratch.pBuffer Allocation failed ");
return OMX_ErrorInsufficientResources;
}
}
if (drv_ctx.decoder_format == VDEC_CODECTYPE_H264 ||
drv_ctx.decoder_format == VDEC_CODECTYPE_MVC) {
if (m_frame_parser.mutils == NULL) {
m_frame_parser.mutils = new H264_Utils();
if (m_frame_parser.mutils == NULL) {
DEBUG_PRINT_ERROR("parser utils Allocation failed ");
eRet = OMX_ErrorInsufficientResources;
} else {
m_frame_parser.mutils->initialize_frame_checking_environment();
m_frame_parser.mutils->allocate_rbsp_buffer (drv_ctx.ip_buf.buffer_size);
}
}
h264_parser = new h264_stream_parser();
if (!h264_parser) {
DEBUG_PRINT_ERROR("ERROR: H264 parser allocation failed!");
eRet = OMX_ErrorInsufficientResources;
}
}
msg_thread_created = true;
r = pthread_create(&msg_thread_id,0,message_thread_dec,this);
if (r < 0) {
DEBUG_PRINT_ERROR("component_init(): message_thread_dec creation failed");
msg_thread_created = false;
eRet = OMX_ErrorInsufficientResources;
} else if (secure_mode) {
this->post_event(PREFETCH_PIXEL_BUFFER_COUNT, m_dec_secure_prefetch_size_output, OMX_COMPONENT_GENERATE_ION_PREFETCH_PIXEL);
this->post_event(PREFETCH_NON_PIXEL_BUFFER_COUNT, m_dec_secure_prefetch_size_internal, OMX_COMPONENT_GENERATE_ION_PREFETCH_NON_PIXEL);
}
}
{
VendorExtensionStore *extStore = const_cast<VendorExtensionStore *>(&mVendorExtensionStore);
init_vendor_extensions(*extStore);
mVendorExtensionStore.dumpExtensions((const char *)role);
}
if (eRet != OMX_ErrorNone) {
DEBUG_PRINT_ERROR("Component Init Failed");
} else {
DEBUG_PRINT_INFO("omx_vdec::component_init() success : fd=%d",
drv_ctx.video_driver_fd);
}
//memset(&h264_mv_buff,0,sizeof(struct h264_mv_buffer));
OMX_INIT_STRUCT(&m_sParamLowLatency, QOMX_EXTNINDEX_VIDEO_LOW_LATENCY_MODE);
m_sParamLowLatency.nNumFrames = 0;
m_sParamLowLatency.bEnableLowLatencyMode = OMX_FALSE;
return eRet;
}
/* ======================================================================
FUNCTION
omx_vdec::GetComponentVersion
DESCRIPTION
Returns the component version.
PARAMETERS
TBD.
RETURN VALUE
OMX_ErrorNone.
========================================================================== */
OMX_ERRORTYPE omx_vdec::get_component_version
(
OMX_IN OMX_HANDLETYPE hComp,
OMX_OUT OMX_STRING componentName,
OMX_OUT OMX_VERSIONTYPE* componentVersion,
OMX_OUT OMX_VERSIONTYPE* specVersion,
OMX_OUT OMX_UUIDTYPE* componentUUID
)
{
(void) hComp;
(void) componentName;
(void) componentVersion;
(void) componentUUID;
if (m_state == OMX_StateInvalid) {
DEBUG_PRINT_ERROR("Get Comp Version in Invalid State");
return OMX_ErrorInvalidState;
}
/* TBD -- Return the proper version */
if (specVersion) {
specVersion->nVersion = OMX_SPEC_VERSION;
}
return OMX_ErrorNone;
}
/* ======================================================================
FUNCTION
omx_vdec::SendCommand
DESCRIPTION
Returns zero if all the buffers released..
PARAMETERS
None.
RETURN VALUE
true/false
========================================================================== */
OMX_ERRORTYPE omx_vdec::send_command(OMX_IN OMX_HANDLETYPE hComp,