Remove bad LDLIBS
am: c73f5bb534
Change-Id: Id8ab7b3422cff716f4b734fd662a9d37db5bad22
diff --git a/videocodec/Android.mk b/videocodec/Android.mk
old mode 100644
new mode 100755
index 3cbbe36..2c5a9ef
--- a/videocodec/Android.mk
+++ b/videocodec/Android.mk
@@ -228,6 +228,8 @@
ifeq ($(TARGET_BOARD_PLATFORM),baytrail)
LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/ufo
+else
+ LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/pvr/hal
endif
LOCAL_SRC_FILES := \
@@ -461,7 +463,8 @@
libva_videodecoder \
liblog \
libva \
- libva-android
+ libva-android \
+ libcutils
LOCAL_C_INCLUDES := \
$(TARGET_OUT_HEADERS)/wrs_omxil_core \
diff --git a/videocodec/OMXVideoDecoderBase.cpp b/videocodec/OMXVideoDecoderBase.cpp
old mode 100644
new mode 100755
index 975ac4c..323739b
--- a/videocodec/OMXVideoDecoderBase.cpp
+++ b/videocodec/OMXVideoDecoderBase.cpp
@@ -22,6 +22,8 @@
#include <va/va_android.h>
#include "OMXVideoDecoderBase.h"
+#include "ProtectedDataBuffer.h"
+
static const char* VA_RAW_MIME_TYPE = "video/x-raw-va";
static const uint32_t VA_COLOR_FORMAT = 0x7FA00E00;
@@ -38,6 +40,7 @@
mWorkingMode(RAWDATA_MODE),
mErrorReportEnabled (false),
mAPMode(LEGACY_MODE),
+ mFlushMode(false),
mFormatChanged(false) {
mOMXBufferHeaderTypePtrNum = 0;
mMetaDataBuffersNum = 0;
@@ -350,10 +353,18 @@
HandleFormatChange();
}
- // Actually, if mAPMode is set, mWorkingMode should be GRAPHICBUFFER_MODE.
- if (((mAPMode == METADATA_MODE) && (mWorkingMode == GRAPHICBUFFER_MODE)) && mFormatChanged) {
- if (((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS) || (mVideoDecoder->getOutputQueueLength() == 0)) {
- HandleFormatChange();
+ if (mFlushMode) {
+ LOGI("in mFlushMode, do HandleFormatChange.");
+ HandleFormatChange();
+ } else {
+ // Actually, if mAPMode is set, mWorkingMode should be GRAPHICBUFFER_MODE.
+ if (((mAPMode == METADATA_MODE) && (mWorkingMode == GRAPHICBUFFER_MODE)) && mFormatChanged) {
+ if (((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS) || (mVideoDecoder->getOutputQueueLength() == 0)) {
+ // Format changed, set mFlushMode, clear eos
+ mFlushMode = true;
+ mFormatChanged = false;
+ (*pBuffers[OUTPORT_INDEX])->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
}
}
@@ -434,9 +445,17 @@
HandleFormatChange();
}
- if (((mAPMode == METADATA_MODE) && (mWorkingMode == GRAPHICBUFFER_MODE)) && mFormatChanged) {
- if (((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS) || (mVideoDecoder->getOutputQueueLength() == 0)) {
- HandleFormatChange();
+ if (mFlushMode) {
+ LOGI("in mFlushMode, do HandleFormatChange.");
+ HandleFormatChange();
+ } else {
+ if (((mAPMode == METADATA_MODE) && (mWorkingMode == GRAPHICBUFFER_MODE)) && mFormatChanged) {
+ if (((*pBuffers[OUTPORT_INDEX])->nFlags & OMX_BUFFERFLAG_EOS) || (mVideoDecoder->getOutputQueueLength() == 0)) {
+ // Format changed, set mFlushMode, clear eos.
+ mFlushMode = true;
+ mFormatChanged = false;
+ (*pBuffers[OUTPORT_INDEX])->nFlags &= ~OMX_BUFFERFLAG_EOS;
+ }
}
}
@@ -497,9 +516,10 @@
mOMXBufferHeaderTypePtrNum = 0;
mGraphicBufferParam.graphicBufferColorFormat = def_output->format.video.eColorFormat;
- mGraphicBufferParam.graphicBufferStride = getStride(def_output->format.video.nFrameWidth);
+ mGraphicBufferParam.graphicBufferHStride = getStride(def_output->format.video.nFrameWidth);
+ mGraphicBufferParam.graphicBufferVStride = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
mGraphicBufferParam.graphicBufferWidth = def_output->format.video.nFrameWidth;
- mGraphicBufferParam.graphicBufferHeight = (def_output->format.video.nFrameHeight + 0xf) & ~0xf;
+ mGraphicBufferParam.graphicBufferHeight = def_output->format.video.nFrameHeight;
p->surfaceNumber = mMetaDataBuffersNum;
for (int i = 0; i < MAX_GRAPHIC_BUFFER_NUM; i++) {
@@ -515,10 +535,11 @@
}
}
p->flag |= USE_NATIVE_GRAPHIC_BUFFER;
- p->graphicBufferStride = mGraphicBufferParam.graphicBufferStride;
- p->graphicBufferColorFormat = mGraphicBufferParam.graphicBufferColorFormat;
+ p->graphicBufferHStride = mGraphicBufferParam.graphicBufferHStride;
+ p->graphicBufferVStride = mGraphicBufferParam.graphicBufferVStride;
p->graphicBufferWidth = mGraphicBufferParam.graphicBufferWidth;
p->graphicBufferHeight = mGraphicBufferParam.graphicBufferHeight;
+ p->graphicBufferColorFormat = mGraphicBufferParam.graphicBufferColorFormat;
if (p->graphicBufferColorFormat == OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar_Tiled
#ifdef USE_GEN_HW
|| p->graphicBufferColorFormat == HAL_PIXEL_FORMAT_NV12_X_TILED_INTEL
@@ -599,6 +620,61 @@
return OMX_ErrorNone;
}
+OMX_ERRORTYPE OMXVideoDecoderBase::PrepareDecodeNativeHandleBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p) {
+ // default decode buffer preparation
+
+ memset(p, 0, sizeof(VideoDecodeBuffer));
+ if (buffer->nFilledLen == 0) {
+ LOGW("Len of filled data to decode is 0.");
+ return OMX_ErrorNone; //OMX_ErrorBadParameter;
+ }
+
+ if (buffer->pBuffer == NULL) {
+ LOGE("Buffer to decode is empty.");
+ return OMX_ErrorBadParameter;
+ }
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
+ LOGI("Buffer has OMX_BUFFERFLAG_CODECCONFIG flag.");
+ }
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_DECODEONLY) {
+ // TODO: Handle OMX_BUFFERFLAG_DECODEONLY : drop the decoded frame without rendering it.
+ LOGW("Buffer has OMX_BUFFERFLAG_DECODEONLY flag.");
+ }
+ //Get data pointer from native_handle
+ native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
+ p->data = dataBuffer->data + buffer->nOffset;
+
+
+
+ p->size = buffer->nFilledLen;
+ p->timeStamp = buffer->nTimeStamp;
+ if (buffer->nFlags & (OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_EOS)) {
+ // TODO: OMX_BUFFERFLAG_ENDOFFRAME can be used to indicate end of a NAL unit.
+ // setting this flag may cause corruption if buffer does not contain end-of-frame data.
+ p->flag = HAS_COMPLETE_FRAME;
+ }
+
+ if (buffer->nFlags & OMX_BUFFERFLAG_SYNCFRAME) {
+ p->flag |= IS_SYNC_FRAME;
+ }
+
+ if (buffer->pInputPortPrivate) {
+ uint32_t degree = 0;
+ memcpy ((void *) °ree, buffer->pInputPortPrivate, sizeof(uint32_t));
+ p->rotationDegrees = degree;
+ LOGV("rotationDegrees = %d", p->rotationDegrees);
+ } else {
+ p->rotationDegrees = mRotationDegrees;
+ }
+
+ *retain= BUFFER_RETAIN_NOT_RETAIN;
+ return OMX_ErrorNone;
+}
+
+
OMX_ERRORTYPE OMXVideoDecoderBase::FillRenderBuffer(OMX_BUFFERHEADERTYPE **pBuffer, buffer_retain_t *retain, OMX_U32 inportBufferFlags, OMX_BOOL *isResolutionChange) {
OMX_BUFFERHEADERTYPE *buffer = *pBuffer;
OMX_BUFFERHEADERTYPE *buffer_orign = buffer;
@@ -619,8 +695,15 @@
bool draining = (inportBufferFlags & OMX_BUFFERFLAG_EOS);
//pthread_mutex_lock(&mSerializationLock);
- const VideoRenderBuffer *renderBuffer;
+ const VideoRenderBuffer *renderBuffer = NULL;
//pthread_mutex_unlock(&mSerializationLock);
+
+ // in mFlushMode, provide empty buffer.
+ if (mFlushMode) {
+ buffer->nFilledLen = 0;
+ return OMX_ErrorNone;
+ }
+
if (((mAPMode == METADATA_MODE) && (mWorkingMode == GRAPHICBUFFER_MODE)) && mFormatChanged) {
renderBuffer = mVideoDecoder->getOutput(true, ErrBufPtr);
} else {
@@ -700,6 +783,10 @@
uint32_t widthCropped = formatInfo->width - formatInfo->cropLeft - formatInfo->cropRight;
uint32_t heightCropped = formatInfo->height - formatInfo->cropTop - formatInfo->cropBottom;
+ if (strcasecmp(formatInfo->mimeType,"video/avc") == 0 ||
+ strcasecmp(formatInfo->mimeType,"video/h264") == 0) {
+ heightCropped = formatInfo->height;
+ }
uint32_t strideCropped = widthCropped;
uint32_t sliceHeightCropped = heightCropped;
int force_realloc = 0;
@@ -721,7 +808,7 @@
paramPortDefinitionInput.format.video.nSliceHeight = sliceHeight;
// output port
paramPortDefinitionOutput.format.video.nFrameWidth = width;
- paramPortDefinitionOutput.format.video.nFrameHeight = (height + 0x1f) & ~0x1f;
+ paramPortDefinitionOutput.format.video.nFrameHeight = height;
paramPortDefinitionOutput.format.video.eColorFormat = GetOutputColorFormat(paramPortDefinitionOutput.format.video.nFrameWidth);
paramPortDefinitionOutput.format.video.nStride = stride;
paramPortDefinitionOutput.format.video.nSliceHeight = sliceHeight;
@@ -740,7 +827,7 @@
this->ports[OUTPORT_INDEX]->ReportPortSettingsChanged();
- mFormatChanged = false;
+ mFlushMode = false;
return OMX_ErrorNone;
}
@@ -970,7 +1057,9 @@
if (mOMXBufferHeaderTypePtrNum == 1) {
mGraphicBufferParam.graphicBufferColorFormat = param->nativeBuffer->format;
- mGraphicBufferParam.graphicBufferStride = param->nativeBuffer->stride;
+ mGraphicBufferParam.graphicBufferHStride = param->nativeBuffer->stride;
+ // FIXME: use IMG_native_handle_t->aiVStride[0] instead..
+ mGraphicBufferParam.graphicBufferVStride = param->nativeBuffer->height;
mGraphicBufferParam.graphicBufferWidth = param->nativeBuffer->width;
mGraphicBufferParam.graphicBufferHeight = param->nativeBuffer->height;
}
@@ -1027,7 +1116,8 @@
}
port_def.format.video.cMIMEType = (OMX_STRING)VA_VED_RAW_MIME_TYPE;
port_def.format.video.eColorFormat = OMX_INTEL_COLOR_FormatYUV420PackedSemiPlanar;
- port_def.format.video.nFrameHeight = (port_def.format.video.nFrameHeight + 0x1f) & ~0x1f;
+ port_def.format.video.nFrameHeight = port_def.format.video.nFrameHeight;
+
port_def.format.video.eColorFormat = GetOutputColorFormat(
port_def.format.video.nFrameWidth);
port->SetPortDefinition(&port_def,true);
@@ -1122,6 +1212,10 @@
rectParams->nTop = formatInfo->cropTop;
rectParams->nWidth = formatInfo->width - formatInfo->cropLeft - formatInfo->cropRight;
rectParams->nHeight = formatInfo->height - formatInfo->cropTop - formatInfo->cropBottom;
+ if (strcasecmp(formatInfo->mimeType,"video/avc") == 0 ||
+ strcasecmp(formatInfo->mimeType,"video/h264") == 0) {
+ rectParams->nHeight = formatInfo->height;
+ }
// if port width parsed from extractor is not as same as from SPS/PPS nalu header,
// align it.
diff --git a/videocodec/OMXVideoDecoderBase.h b/videocodec/OMXVideoDecoderBase.h
old mode 100644
new mode 100755
index 27c2939..8df593e
--- a/videocodec/OMXVideoDecoderBase.h
+++ b/videocodec/OMXVideoDecoderBase.h
@@ -60,6 +60,7 @@
virtual OMX_ERRORTYPE ProcessorPreFreeBuffer(OMX_U32 nPortIndex,OMX_BUFFERHEADERTYPE * pBuffer);
virtual OMX_ERRORTYPE PrepareConfigBuffer(VideoConfigBuffer *p);
virtual OMX_ERRORTYPE PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p);
+ virtual OMX_ERRORTYPE PrepareDecodeNativeHandleBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p);
virtual OMX_ERRORTYPE FillRenderBuffer(OMX_BUFFERHEADERTYPE **pBuffer, buffer_retain_t *retain,
OMX_U32 inportBufferFlags, OMX_BOOL *isResolutionChange);
virtual OMX_ERRORTYPE HandleFormatChange(void);
@@ -104,7 +105,8 @@
};
struct GraphicBufferParam {
- uint32_t graphicBufferStride;
+ uint32_t graphicBufferHStride;
+ uint32_t graphicBufferVStride;
uint32_t graphicBufferWidth;
uint32_t graphicBufferHeight;
uint32_t graphicBufferColorFormat;
@@ -143,6 +145,7 @@
};
AdaptivePlaybackMode mAPMode;
uint32_t mMetaDataBuffersNum;
+ OMX_TICKS mFlushMode;
bool mFormatChanged;
uint32_t getStride(uint32_t width);
};
diff --git a/videocodec/OMXVideoDecoderVP9HWR.cpp b/videocodec/OMXVideoDecoderVP9HWR.cpp
index 4482c37..684993d 100644
--- a/videocodec/OMXVideoDecoderVP9HWR.cpp
+++ b/videocodec/OMXVideoDecoderVP9HWR.cpp
@@ -256,11 +256,12 @@
int surfaceWidth = mGraphicBufferParam.graphicBufferWidth;
int surfaceHeight = mGraphicBufferParam.graphicBufferHeight;
- int surfaceStride = mGraphicBufferParam.graphicBufferStride;
- extNativeBufferSize = mGraphicBufferParam.graphicBufferStride *
- mGraphicBufferParam.graphicBufferHeight * 1.5;
- extActualBufferStride = surfaceStride;
- extActualBufferHeightStride = surfaceHeight;
+ int surfaceHStride = mGraphicBufferParam.graphicBufferHStride;
+ int surfaceVStride = mGraphicBufferParam.graphicBufferVStride;
+ extNativeBufferSize = mGraphicBufferParam.graphicBufferHStride *
+ mGraphicBufferParam.graphicBufferVStride * 1.5;
+ extActualBufferStride = surfaceHStride;
+ extActualBufferHeightStride = surfaceVStride;
for (i = 0; i < mOMXBufferHeaderTypePtrNum; i++) {
OMX_BUFFERHEADERTYPE *buf_hdr = mOMXBufferHeaderTypePtrArray[i];
@@ -280,14 +281,14 @@
surfExtBuf->pixel_format = VA_FOURCC_NV12;
surfExtBuf->width = surfaceWidth;
surfExtBuf->height = surfaceHeight;
- surfExtBuf->data_size = surfaceStride * surfaceHeight * 1.5;
+ surfExtBuf->data_size = surfaceHStride * surfaceVStride * 1.5;
surfExtBuf->num_planes = 2;
- surfExtBuf->pitches[0] = surfaceStride;
- surfExtBuf->pitches[1] = surfaceStride;
+ surfExtBuf->pitches[0] = surfaceHStride;
+ surfExtBuf->pitches[1] = surfaceHStride;
surfExtBuf->pitches[2] = 0;
surfExtBuf->pitches[3] = 0;
surfExtBuf->offsets[0] = 0;
- surfExtBuf->offsets[1] = surfaceStride * surfaceHeight;
+ surfExtBuf->offsets[1] = surfaceHStride * surfaceVStride;
surfExtBuf->offsets[2] = 0;
surfExtBuf->offsets[3] = 0;
surfExtBuf->flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
diff --git a/videocodec/OMXVideoDecoderVP9Hybrid.cpp b/videocodec/OMXVideoDecoderVP9Hybrid.cpp
index 1360ca2..20075ab 100644
--- a/videocodec/OMXVideoDecoderVP9Hybrid.cpp
+++ b/videocodec/OMXVideoDecoderVP9Hybrid.cpp
@@ -25,6 +25,8 @@
#include <hardware/gralloc.h>
#include <system/graphics.h>
+#include <hal_public.h>
+
#define VP9_YV12_ALIGN (128-1)
static const char* VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
@@ -70,12 +72,12 @@
uint32_t buff[MAX_GRAPHIC_BUFFER_NUM];
uint32_t i, bufferCount;
bool gralloc_mode = (mWorkingMode == GRAPHICBUFFER_MODE);
- uint32_t bufferSize, bufferStride, bufferHeight, bufferWidth;
+ uint32_t bufferSize, bufferHStride, bufferHeight, bufferVStride, bufferWidth;
if (!gralloc_mode) {
- bufferSize = 1920 * 1088 * 1.5;
- bufferStride = 1920;
+ bufferHStride = 1920;
+ bufferVStride = 1088;
bufferWidth = 1920;
- bufferHeight = 1088;
+ bufferHeight = 1080;
bufferCount = 12;
} else {
if (mAPMode == METADATA_MODE) {
@@ -87,9 +89,10 @@
mOMXBufferHeaderTypePtrNum = 0;
mGraphicBufferParam.graphicBufferColorFormat = def_output->format.video.eColorFormat;
- mGraphicBufferParam.graphicBufferStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
+ mGraphicBufferParam.graphicBufferHStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
+ mGraphicBufferParam.graphicBufferVStride = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
mGraphicBufferParam.graphicBufferWidth = def_output->format.video.nFrameWidth;
- mGraphicBufferParam.graphicBufferHeight = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
+ mGraphicBufferParam.graphicBufferHeight = def_output->format.video.nFrameHeight;
mDecodedImageWidth = def_output->format.video.nFrameWidth;
mDecodedImageHeight = def_output->format.video.nFrameHeight;
} else{
@@ -101,12 +104,14 @@
}
}
- bufferSize = mGraphicBufferParam.graphicBufferStride *
- mGraphicBufferParam.graphicBufferHeight * 1.5;
- bufferStride = mGraphicBufferParam.graphicBufferStride;
- bufferHeight = mGraphicBufferParam.graphicBufferHeight;
+ bufferHStride = mGraphicBufferParam.graphicBufferHStride;
+ bufferVStride = mGraphicBufferParam.graphicBufferVStride;
bufferWidth = mGraphicBufferParam.graphicBufferWidth;
+ bufferHeight = mGraphicBufferParam.graphicBufferHeight;
}
+
+ bufferSize = bufferHStride * bufferVStride * 1.5;
+
mLibHandle = dlopen("libDecoderVP9Hybrid.so", RTLD_NOW);
if (mLibHandle == NULL) {
LOGE("dlopen libDecoderVP9Hybrid.so fail\n");
@@ -137,7 +142,11 @@
return OMX_ErrorBadParameter;
}
- mInitDecoder(mHybridCtx,bufferSize,bufferStride,bufferWidth, bufferHeight,bufferCount,gralloc_mode, buff, (uint32_t)mAPMode);
+ // FIXME: The proprietary part of the vp9hybrid decoder should be updated
+ // to take VStride as well as Height. For now it's convenient to
+ // use VStride as that was effectively what was done before..
+ mInitDecoder(mHybridCtx, bufferSize, bufferHStride, bufferWidth,
+ bufferHeight, bufferCount, gralloc_mode, buff, (uint32_t)mAPMode);
return OMX_ErrorNone;
}
@@ -146,12 +155,13 @@
uint32_t buff[MAX_GRAPHIC_BUFFER_NUM];
uint32_t i, bufferCount;
bool gralloc_mode = (mWorkingMode == GRAPHICBUFFER_MODE);
- uint32_t bufferSize, bufferStride, bufferHeight, bufferWidth;
+ uint32_t bufferSize, bufferHStride, bufferHeight, bufferVStride, bufferWidth;
if (!gralloc_mode) {
- bufferSize = mDecodedImageWidth * mDecodedImageHeight * 1.5;
- bufferStride = mDecodedImageWidth;
+ bufferHStride = mDecodedImageWidth;
+ bufferVStride = mDecodedImageHeight;
bufferWidth = mDecodedImageWidth;
bufferHeight = mDecodedImageHeight;
+ bufferSize = bufferHStride * bufferVStride * 1.5;
bufferCount = 12;
} else {
if (mAPMode == METADATA_MODE) {
@@ -163,9 +173,10 @@
mOMXBufferHeaderTypePtrNum = 0;
mGraphicBufferParam.graphicBufferColorFormat = def_output->format.video.eColorFormat;
- mGraphicBufferParam.graphicBufferStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
+ mGraphicBufferParam.graphicBufferHStride = (def_output->format.video.nFrameWidth + VP9_YV12_ALIGN) & ~VP9_YV12_ALIGN;
+ mGraphicBufferParam.graphicBufferVStride = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
mGraphicBufferParam.graphicBufferWidth = def_output->format.video.nFrameWidth;
- mGraphicBufferParam.graphicBufferHeight = (def_output->format.video.nFrameHeight + 0x1f) & ~0x1f;
+ mGraphicBufferParam.graphicBufferHeight = def_output->format.video.nFrameHeight;
} else{
bufferCount = mOMXBufferHeaderTypePtrNum;
@@ -174,14 +185,19 @@
buff[i] = (uint32_t)(buf_hdr->pBuffer);
}
}
- bufferSize = mGraphicBufferParam.graphicBufferStride *
- mGraphicBufferParam.graphicBufferHeight * 1.5;
- bufferStride = mGraphicBufferParam.graphicBufferStride;
- bufferHeight = mGraphicBufferParam.graphicBufferHeight;
+ bufferHStride = mGraphicBufferParam.graphicBufferHStride;
+ bufferVStride = mGraphicBufferParam.graphicBufferVStride;
bufferWidth = mGraphicBufferParam.graphicBufferWidth;
+ bufferHeight = mGraphicBufferParam.graphicBufferHeight;
}
- mInitDecoder(mHybridCtx,bufferSize,bufferStride,bufferWidth,bufferHeight,bufferCount,gralloc_mode, buff, (uint32_t)mAPMode);
+ bufferSize = bufferHStride * bufferVStride * 1.5;
+
+ // FIXME: The proprietary part of the vp9hybrid decoder should be updated
+ // to take VStride as well as Height. For now it's convenient to
+ // use VStride as that was effectively what was done before..
+ mInitDecoder(mHybridCtx, bufferSize, bufferHStride, bufferWidth,
+ bufferHeight, bufferCount, gralloc_mode, buff, (uint32_t)mAPMode);
mFormatChanged = false;
return OMX_ErrorNone;
}
@@ -193,6 +209,9 @@
bool ret = true;
if (gralloc_mode) {
ret = mGetFrameResolution(data,data_sz, &width, &height);
+ if (width == 0 || height == 0)
+ return false;
+
if (ret) {
if (mAPMode == METADATA_MODE) {
ret = (width != mDecodedImageWidth)
@@ -276,10 +295,11 @@
OMX_ERRORTYPE ret;
OMX_BUFFERHEADERTYPE *inBuffer = *pBuffers[INPORT_INDEX];
OMX_BUFFERHEADERTYPE *outBuffer = *pBuffers[OUTPORT_INDEX];
- bool eos = (inBuffer->nFlags & OMX_BUFFERFLAG_EOS)? true:false;
OMX_BOOL isResolutionChange = OMX_FALSE;
- bool formatChange = false;
+ bool eos = (inBuffer->nFlags & OMX_BUFFERFLAG_EOS)? true : false;
eos = eos && (inBuffer->nFilledLen == 0);
+ static unsigned char *firstFrame = NULL;
+ static uint32_t firstFrameSize = 0;
if (inBuffer->pBuffer == NULL) {
LOGE("Buffer to decode is empty.");
@@ -294,6 +314,52 @@
LOGW("Buffer has OMX_BUFFERFLAG_DECODEONLY flag.");
}
+ if (firstFrameSize == 0 && inBuffer->nFilledLen != 0 && inBuffer->nTimeStamp != 0) {
+ if (firstFrame != NULL) {
+ free(firstFrame);
+ firstFrame = NULL;
+ }
+
+ firstFrame = (unsigned char *)malloc(inBuffer->nFilledLen);
+ memcpy(firstFrame, inBuffer->pBuffer + inBuffer->nOffset, inBuffer->nFilledLen);
+ firstFrameSize = inBuffer->nFilledLen;
+ }
+
+ if ((mWorkingMode == GRAPHICBUFFER_MODE) && (mAPMode == METADATA_MODE) && (!mFormatChanged)) {
+ bool mRet = mGetFrameResolution(inBuffer->pBuffer + inBuffer->nOffset, inBuffer->nFilledLen,
+ &mDecodedImageNewWidth,&mDecodedImageNewHeight);
+
+ if (mRet && ((mDecodedImageNewWidth != 0) && (mDecodedImageNewHeight != 0)) &&
+ ((mDecodedImageWidth != 0) && (mDecodedImageHeight != 0)) &&
+ ((mDecodedImageNewWidth != mDecodedImageWidth || mDecodedImageNewHeight != mDecodedImageHeight))) {
+ if (mLastTimeStamp == 0) {
+ retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
+ HandleFormatChange();
+ return OMX_ErrorNone;
+ } else {
+ // Detected format change in time.
+ // drain the last frame, keep the current input buffer
+ mDecoderDecode(mCtx, mHybridCtx, firstFrame, firstFrameSize, false);
+ retains[INPORT_INDEX] = BUFFER_RETAIN_GETAGAIN;
+
+ mFormatChanged = true;
+
+ ret = FillRenderBuffer(pBuffers[OUTPORT_INDEX], &retains[OUTPORT_INDEX],
+ eos ? OMX_BUFFERFLAG_EOS : 0, &isResolutionChange);
+
+ if (ret == OMX_ErrorNone)
+ (*pBuffers[OUTPORT_INDEX])->nTimeStamp = mLastTimeStamp;
+
+ mLastTimeStamp = inBuffer->nTimeStamp;
+
+ free(firstFrame);
+ firstFrame = NULL;
+ firstFrameSize = 0;
+ return ret;
+ }
+ }
+ }
+
#if LOG_TIME == 1
struct timeval tv_start, tv_end;
int32_t time_ms;
@@ -475,7 +541,7 @@
OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::HandleFormatChange(void)
{
- ALOGI("handle format change from %dx%d to %dx%d",
+ ALOGE("handle format change from %dx%d to %dx%d",
mDecodedImageWidth,mDecodedImageHeight,mDecodedImageNewWidth,mDecodedImageNewHeight);
mDecodedImageWidth = mDecodedImageNewWidth;
mDecodedImageHeight = mDecodedImageNewHeight;
@@ -540,10 +606,7 @@
// for graphic buffer reallocation
// when the width and height parsed from ES are larger than allocated graphic buffer in outport,
paramPortDefinitionOutput.format.video.nFrameWidth = width;
- if (mAPMode == METADATA_MODE)
- paramPortDefinitionOutput.format.video.nFrameHeight = (height + 0x1f) & ~0x1f;
- else
- paramPortDefinitionOutput.format.video.nFrameHeight = (height + 0x1f) & ~0x1f;
+ paramPortDefinitionOutput.format.video.nFrameHeight = height;
paramPortDefinitionOutput.format.video.eColorFormat = GetOutputColorFormat(
paramPortDefinitionOutput.format.video.nFrameWidth);
paramPortDefinitionOutput.format.video.nStride = stride;
@@ -566,8 +629,8 @@
OMX_COLOR_FORMATTYPE OMXVideoDecoderVP9Hybrid::GetOutputColorFormat(int) {
- LOGV("Output color format is HAL_PIXEL_FORMAT_YV12.");
- return (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12;
+ LOGV("Output color format is HAL_PIXEL_FORMAT_INTEL_YV12.");
+ return (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_INTEL_YV12;
}
OMX_ERRORTYPE OMXVideoDecoderVP9Hybrid::GetDecoderOutputCropSpecific(OMX_PTR pStructure) {
@@ -612,26 +675,33 @@
CHECK_PORT_INDEX_RANGE(param);
CHECK_SET_PARAM_STATE();
+ PortVideo *port = NULL;
+ port = static_cast<PortVideo *>(this->ports[OUTPORT_INDEX]);
+ OMX_PARAM_PORTDEFINITIONTYPE port_def;
+ memcpy(&port_def,port->GetPortDefinition(),sizeof(port_def));
+
if (!param->enable) {
mWorkingMode = RAWDATA_MODE;
LOGI("Raw data mode is used");
+ // If it is fallback from native mode the color format has been
+ // already set to INTEL format.
+ // We need to set back the default color format and Native stuff.
+ port_def.format.video.eColorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
+ port_def.format.video.pNativeRender = NULL;
+ port_def.format.video.pNativeWindow = NULL;
+ port->SetPortDefinition(&port_def,true);
return OMX_ErrorNone;
}
- mWorkingMode = GRAPHICBUFFER_MODE;
- PortVideo *port = NULL;
- port = static_cast<PortVideo *>(this->ports[OUTPORT_INDEX]);
- OMX_PARAM_PORTDEFINITIONTYPE port_def;
- memcpy(&port_def,port->GetPortDefinition(),sizeof(port_def));
+ mWorkingMode = GRAPHICBUFFER_MODE;
port_def.nBufferCountMin = mNativeBufferCount - 4;
port_def.nBufferCountActual = mNativeBufferCount;
port_def.format.video.cMIMEType = (OMX_STRING)VA_VED_RAW_MIME_TYPE;
// add borders for libvpx decode need.
port_def.format.video.nFrameWidth += VPX_DECODE_BORDER * 2;
+ port_def.format.video.nFrameHeight += VPX_DECODE_BORDER * 2;
mDecodedImageWidth = port_def.format.video.nFrameWidth;
mDecodedImageHeight = port_def.format.video.nFrameHeight;
- // make heigth 32bit align
- port_def.format.video.nFrameHeight = (port_def.format.video.nFrameHeight + 0x1f) & ~0x1f;
port_def.format.video.eColorFormat = GetOutputColorFormat(port_def.format.video.nFrameWidth);
port->SetPortDefinition(&port_def,true);
diff --git a/videocodec/OMXVideoEncoderBase.cpp b/videocodec/OMXVideoEncoderBase.cpp
index 37f53d2..ca4ad21 100644
--- a/videocodec/OMXVideoEncoderBase.cpp
+++ b/videocodec/OMXVideoEncoderBase.cpp
@@ -20,6 +20,7 @@
#include "IntelMetadataBuffer.h"
#include <cutils/properties.h>
#include <wrs_omxil_core/log.h>
+#include <media/stagefright/foundation/AUtils.h>
static const char *RAW_MIME_TYPE = "video/raw";
@@ -195,7 +196,13 @@
mParamVideoRefresh.nAirMBs = 0;
mParamVideoRefresh.nAirRef = 0;
mParamVideoRefresh.nCirMBs = 0;
-
+
+ // OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESH
+ memset(&mConfigAndroidIntraRefresh, 0, sizeof(mConfigAndroidIntraRefresh));
+ SetTypeHeader(&mConfigAndroidIntraRefresh, sizeof(mConfigAndroidIntraRefresh));
+ mConfigAndroidIntraRefresh.nPortIndex = OUTPORT_INDEX;
+ mConfigAndroidIntraRefresh.nRefreshPeriod = 0; // default feature closed
+
// OMX_CONFIG_FRAMERATETYPE
memset(&mConfigFramerate, 0, sizeof(mConfigFramerate));
SetTypeHeader(&mConfigFramerate, sizeof(mConfigFramerate));
@@ -406,6 +413,7 @@
AddHandler((OMX_INDEXTYPE)OMX_IndexExtTemporalLayer, GetTemporalLayer,SetTemporalLayer);
AddHandler((OMX_INDEXTYPE)OMX_IndexConfigVideoBitrate, GetConfigVideoBitrate, SetConfigVideoBitrate);
AddHandler((OMX_INDEXTYPE)OMX_IndexExtRequestBlackFramePointer, GetBlackFramePointer, GetBlackFramePointer);
+ AddHandler((OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, GetConfigAndroidIntraRefresh, SetConfigAndroidIntraRefresh);
return OMX_ErrorNone;
}
@@ -972,3 +980,55 @@
}
return OMX_ErrorNone;
}
+
+OMX_ERRORTYPE OMXVideoEncoderBase::GetConfigAndroidIntraRefresh(OMX_PTR pStructure) {
+ OMX_ERRORTYPE ret;
+ OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *p = (OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *)pStructure;
+
+ CHECK_TYPE_HEADER(p);
+ CHECK_PORT_INDEX(p, OUTPORT_INDEX);
+
+ memcpy(p, &mConfigAndroidIntraRefresh, sizeof(*p));
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OMXVideoEncoderBase::SetConfigAndroidIntraRefresh(OMX_PTR pStructure) {
+ OMX_ERRORTYPE ret;
+
+ OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *p = (OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE *)pStructure;
+
+ CHECK_TYPE_HEADER(p);
+ CHECK_PORT_INDEX(p, OUTPORT_INDEX);
+
+ // set in either Loaded state (ComponentSetParam) or Executing state (ComponentSetConfig)
+ mConfigAndroidIntraRefresh = *p;
+
+ // return OMX_ErrorNone if not in Executing state
+ // TODO: return OMX_ErrorIncorrectStateOperation?
+ CHECK_SET_PARAM_STATE();
+
+ OMX_VIDEO_PARAM_INTRAREFRESHTYPE intraRefresh;
+ memset(&intraRefresh, 0, sizeof(intraRefresh));
+ intraRefresh.nSize = sizeof(intraRefresh);
+ intraRefresh.nVersion = p->nVersion;
+ intraRefresh.nPortIndex = mConfigAndroidIntraRefresh.nPortIndex;
+ intraRefresh.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
+ intraRefresh.nAirMBs = 0;
+ intraRefresh.nAirRef = 0;
+
+ if (0 == mConfigAndroidIntraRefresh.nRefreshPeriod) {
+ intraRefresh.nCirMBs = 0;
+ } else {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+
+ if (intraRefresh.nPortIndex < nr_ports) {
+ memcpy(&def, ports[intraRefresh.nPortIndex]->GetPortDefinition(),sizeof(def));
+ } else {
+ LOGW("Failed tp set AIR config, bad port index");
+ return OMX_ErrorBadPortIndex;
+ }
+
+ intraRefresh.nCirMBs = divUp((divUp(def.format.video.nFrameWidth, 16u) * divUp(def.format.video.nFrameHeight,16u)), mConfigAndroidIntraRefresh.nRefreshPeriod);
+ }
+ return SetParamVideoIntraRefresh(&intraRefresh);
+}
diff --git a/videocodec/OMXVideoEncoderBase.h b/videocodec/OMXVideoEncoderBase.h
index f2926b3..6ce0b30 100644
--- a/videocodec/OMXVideoEncoderBase.h
+++ b/videocodec/OMXVideoEncoderBase.h
@@ -24,6 +24,9 @@
#include <va/va_android.h>
#include <VideoEncoderHost.h>
+#include <OMX_VideoExt.h>
+#include <OMX_IndexExt.h>
+
#define LOGV(...) ALOGI_IF(mOmxLogLevel, __VA_ARGS__)
#define LOGI(...) ALOGI_IF(mOmxLogLevel, __VA_ARGS__)
#define LOGW(...) ALOGI_IF(mOmxLogLevel, __VA_ARGS__)
@@ -31,6 +34,7 @@
#define LOGE ALOGE
#define LOGV_IF ALOGV_IF
+
using android::sp;
class OMXVideoEncoderBase : public OMXComponentCodecBase {
@@ -74,6 +78,7 @@
DECLARE_HANDLER(OMXVideoEncoderBase, TemporalLayer);
DECLARE_HANDLER(OMXVideoEncoderBase, ConfigVideoBitrate);
DECLARE_HANDLER(OMXVideoEncoderBase, BlackFramePointer);
+ DECLARE_HANDLER(OMXVideoEncoderBase, ConfigAndroidIntraRefresh);
protected:
virtual OMX_ERRORTYPE SetVideoEncoderParam();
@@ -82,6 +87,7 @@
OMX_VIDEO_CONFIG_PRI_INFOTYPE mConfigPriInfo;
OMX_VIDEO_CONFIG_INTEL_BITRATETYPE mConfigIntelBitrate;
OMX_VIDEO_CONFIG_INTEL_AIR mConfigIntelAir;
+ OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE mConfigAndroidIntraRefresh;
OMX_VIDEO_PARAM_INTRAREFRESHTYPE mParamVideoRefresh;
OMX_CONFIG_FRAMERATETYPE mConfigFramerate;
OMX_VIDEO_PARAM_INTEL_ADAPTIVE_SLICE_CONTROL mParamIntelAdaptiveSliceControl;
diff --git a/videocodec/securevideo/moorefield/ProtectedDataBuffer.h b/videocodec/ProtectedDataBuffer.h
similarity index 100%
rename from videocodec/securevideo/moorefield/ProtectedDataBuffer.h
rename to videocodec/ProtectedDataBuffer.h
diff --git a/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.cpp b/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.cpp
index ea4460b..7ec2d48 100755
--- a/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.cpp
+++ b/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.cpp
@@ -16,12 +16,16 @@
//#define LOG_NDEBUG 0
-#define LOG_TAG "OMXVideoDecoder"
+#define LOG_TAG "OMXVideoDecoderAVCSecure"
#include <wrs_omxil_core/log.h>
#include "OMXVideoDecoderAVCSecure.h"
#include <time.h>
#include <signal.h>
#include <pthread.h>
+#include <sys/mman.h>
+#include <cutils/ashmem.h>
+#include <OMX_IntelIndexExt.h>
+#include <OMXComponentCodecBase.h>
#include "LogDumpHelper.h"
#include "VideoFrameInfo.h"
@@ -168,7 +172,9 @@
int ret_value;
OMX_BUFFERHEADERTYPE *pInput = *pBuffers[INPORT_INDEX];
- ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *)pInput->pBuffer;
+ native_handle_t *native_handle = (native_handle_t *)pInput->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
+
// Check that we are dealing with the right buffer
if (dataBuffer->magic != PROTECTED_DATA_BUFFER_MAGIC)
{
@@ -270,8 +276,8 @@
if (buffer->nOffset != 0) {
ALOGW("buffer offset %u is not zero!!!", buffer->nOffset);
}
-
- ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *)buffer->pBuffer;
+ native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
if (dataBuffer->clear) {
p->data = dataBuffer->data + buffer->nOffset;
p->size = buffer->nFilledLen;
@@ -346,7 +352,9 @@
ALOGW("buffer offset %u is not zero!!!", buffer->nOffset);
}
- ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *)buffer->pBuffer;
+ native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
+
p->data = dataBuffer->data;
p->size = sizeof(frame_info_t);
p->flag |= IS_SECURE_DATA;
@@ -371,7 +379,8 @@
ALOGW("PR:buffer offset %u is not zero!!!", buffer->nOffset);
}
- ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *)buffer->pBuffer;
+ native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
if (dataBuffer->clear) {
p->data = dataBuffer->data + buffer->nOffset;
p->size = buffer->nFilledLen;
@@ -433,14 +442,16 @@
OMX_ERRORTYPE OMXVideoDecoderAVCSecure::PrepareDecodeBuffer(OMX_BUFFERHEADERTYPE *buffer, buffer_retain_t *retain, VideoDecodeBuffer *p) {
OMX_ERRORTYPE ret;
- ret = OMXVideoDecoderBase::PrepareDecodeBuffer(buffer, retain, p);
+ ret = OMXVideoDecoderBase::PrepareDecodeNativeHandleBuffer(buffer, retain, p);
CHECK_RETURN_VALUE("OMXVideoDecoderBase::PrepareDecodeBuffer");
if (buffer->nFilledLen == 0) {
return OMX_ErrorNone;
}
+ native_handle_t *native_handle = (native_handle_t *)buffer->pBuffer;
- ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *)buffer->pBuffer;
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
+
// Check that we are dealing with the right buffer
if (dataBuffer->magic != PROTECTED_DATA_BUFFER_MAGIC)
{
@@ -487,6 +498,7 @@
OMXVideoDecoderBase::BuildHandlerList();
AddHandler(OMX_IndexParamVideoAvc, GetParamVideoAvc, SetParamVideoAvc);
AddHandler(OMX_IndexParamVideoProfileLevelQuerySupported, GetParamVideoAVCProfileLevel, SetParamVideoAVCProfileLevel);
+ AddHandler(static_cast<OMX_INDEXTYPE>(OMX_IndexExtAllocateNativeHandle), GetExtAllocateNativeHandle, SetExtAllocateNativeHandle);
return OMX_ErrorNone;
}
@@ -544,6 +556,22 @@
return OMX_ErrorUnsupportedSetting;
}
+
+OMX_ERRORTYPE OMXVideoDecoderAVCSecure::GetExtAllocateNativeHandle(OMX_PTR pStructure) {
+ (void) pStructure; // unused parameter
+
+ return OMX_ErrorNone;
+
+}
+
+OMX_ERRORTYPE OMXVideoDecoderAVCSecure::SetExtAllocateNativeHandle(OMX_PTR pStructure) {
+ OMX_ERRORTYPE ret;
+ android:: EnableAndroidNativeBuffersParams *p = (android::EnableAndroidNativeBuffersParams *)pStructure;
+ CHECK_TYPE_HEADER(p);
+ CHECK_SET_PARAM_STATE();
+
+ return OMX_ErrorNone;
+}
OMX_U8* OMXVideoDecoderAVCSecure::MemAllocDataBuffer(OMX_U32 nSizeBytes, OMX_PTR pUserData) {
OMXVideoDecoderAVCSecure* p = (OMXVideoDecoderAVCSecure *)pUserData;
if (p) {
@@ -574,22 +602,33 @@
__FUNCTION__, mNumInportBuffers);
return NULL;
}
-
- ProtectedDataBuffer *pBuffer = new ProtectedDataBuffer;
- if (pBuffer == NULL)
- {
- ALOGE("%s: failed to allocate memory.", __FUNCTION__);
+
+
+ int fd = ashmem_create_region("protectd-content-buffer", sizeof(ProtectedDataBuffer));
+ if(fd < 0) {
+ ALOGE("Unable to create ashmem region");
return NULL;
}
+ native_handle_t *native = native_handle_create(1, 2);
+
+ native->data[0] = fd;
+ ProtectedDataBuffer *pBuffer =(ProtectedDataBuffer *) mmap(NULL, sizeof(ProtectedDataBuffer), PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
+ if (pBuffer == MAP_FAILED) {
+ ALOGE("%s: mmap failed", __FUNCTION__);
+ return NULL;
+ }
+ native->data[1] = (int) pBuffer;
+ // Use a random value as the buffer id
+ native->data[2] = rand();
++mNumInportBuffers;
Init_ProtectedDataBuffer(pBuffer);
pBuffer->size = INPORT_BUFFER_SIZE;
- ALOGV("Allocating buffer = %#x, data = %#x", (uint32_t)pBuffer, (uint32_t)pBuffer->data);
- return (OMX_U8 *) pBuffer;
+ ALOGV("Allocating native=[%p] buffer = %#x, data = %#x data_end= %#x size=%d",(OMX_U8 *)native,(uint32_t)pBuffer, (uint32_t)pBuffer->data, (uint32_t)pBuffer->data + sizeof(ProtectedDataBuffer) ,sizeof(ProtectedDataBuffer));
+ return (OMX_U8 *) native;
}
void OMXVideoDecoderAVCSecure::MemFreeDataBuffer(OMX_U8 *pBuffer) {
@@ -607,15 +646,20 @@
return;
}
- ProtectedDataBuffer *p = (ProtectedDataBuffer*) pBuffer;
- if (p->magic != PROTECTED_DATA_BUFFER_MAGIC)
+ native_handle_t *native_handle = (native_handle_t *) pBuffer;
+
+ ProtectedDataBuffer *dataBuffer = (ProtectedDataBuffer *) native_handle->data[1];
+ if (dataBuffer->magic != PROTECTED_DATA_BUFFER_MAGIC)
{
- ALOGE("%s: attempting to free buffer with a wrong magic 0x%08x", __FUNCTION__, p->magic);
+ ALOGE("%s: attempting to free buffer with a wrong magic 0x%08x", __FUNCTION__, dataBuffer->magic);
return;
}
- ALOGV("Freeing Data buffer %p with data = %p", p, p->data);
- delete p;
+ if (munmap(dataBuffer, sizeof(ProtectedDataBuffer)) != 0) {
+ ALOGE("%s: Faild to munmap %p",__FUNCTION__, dataBuffer);
+ return;
+ }
+ ALOGV("Free databuffer %p with data = %p", dataBuffer, dataBuffer->data);
--mNumInportBuffers;
}
diff --git a/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.h b/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.h
index 2a99ef5..f119335 100755
--- a/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.h
+++ b/videocodec/securevideo/moorefield/OMXVideoDecoderAVCSecure.h
@@ -47,6 +47,7 @@
virtual OMX_ERRORTYPE BuildHandlerList(void);
virtual OMX_ERRORTYPE SetMaxOutputBufferCount(OMX_PARAM_PORTDEFINITIONTYPE *p);
+ DECLARE_HANDLER(OMXVideoDecoderAVCSecure, ExtAllocateNativeHandle);
DECLARE_HANDLER(OMXVideoDecoderAVCSecure, ParamVideoAvc);
DECLARE_HANDLER(OMXVideoDecoderAVCSecure, ParamVideoAVCProfileLevel);