blob: 876a143ab2e794947a2af46f0ead3a5a173cfe4b [file] [log] [blame]
/* INTEL CONFIDENTIAL
* Copyright (c) 2009 Intel Corporation. All rights reserved.
*
* The source code contained or described herein and all documents
* related to the source code ("Material") are owned by Intel
* Corporation or its suppliers or licensors. Title to the
* Material remains with Intel Corporation or its suppliers and
* licensors. The Material contains trade secrets and proprietary
* and confidential information of Intel or its suppliers and
* licensors. The Material is protected by worldwide copyright and
* trade secret laws and treaty provisions. No part of the Material
* may be used, copied, reproduced, modified, published, uploaded,
* posted, transmitted, distributed, or disclosed in any way without
* Intel's prior express written permission.
*
* No license under any patent, copyright, trade secret or other
* intellectual property right is granted to or conferred upon you
* by disclosure or delivery of the Materials, either expressly, by
* implication, inducement, estoppel or otherwise. Any license
* under such intellectual property rights must be express and
* approved by Intel in writing.
*
*/
#include "VideoDecoderBase.h"
#include "VideoDecoderTrace.h"
#include <string.h>
#include <va/va_android.h>
#include <va/va_tpi.h>
#define INVALID_PTS ((uint64_t)-1)
#define INVALID_POC ((uint32_t)-1)
#define ANDROID_DISPLAY_HANDLE 0x18C34078
// TODO: check what is the best number. Must be at least 2 to support one backward reference frame.
// Currently set to 3 to support 2 backward reference frames. This value is used for AVC frame reordering only.
// e.g:
// POC: 4P, 8P, 10P, 6B and mNextOutputPOC = 5
#define OUTPUT_WINDOW_SIZE 3
VideoDecoderBase::VideoDecoderBase(const char *mimeType, _vbp_parser_type type)
: mDisplay(NULL),
mVADisplay(NULL),
mVAContext(VA_INVALID_ID),
mVAConfig(VA_INVALID_ID),
mVAStarted(false),
mCurrentPTS(INVALID_PTS),
mAcquiredBuffer(NULL),
mLastReference(NULL),
mForwardReference(NULL),
mDecodingFrame(false),
mSizeChanged(false),
// private member variables
mFirstFrame(true),
mLowDelay(false),
mRawOutput(false),
mManageReference(true),
mOutputMethod (OUTPUT_BY_PCT),
mOutputWindowSize(OUTPUT_WINDOW_SIZE),
mNumSurfaces(0),
mSurfaceBuffers(NULL),
mOutputHead(NULL),
mOutputTail(NULL),
mSurfaces(NULL),
mSurfaceUserPtr(NULL),
mSurfaceAcquirePos(0),
mNextOutputPOC(0),
mParserType(type),
mParserHandle(NULL) {
memset(&mVideoFormatInfo, 0, sizeof(VideoFormatInfo));
memset(&mConfigBuffer, 0, sizeof(mConfigBuffer));
mVideoFormatInfo.mimeType = strdup(mimeType);
}
VideoDecoderBase::~VideoDecoderBase() {
stop();
free(mVideoFormatInfo.mimeType);
}
Decode_Status VideoDecoderBase::start(VideoConfigBuffer *buffer) {
if (buffer == NULL) {
return DECODE_INVALID_DATA;
}
if (mParserHandle != NULL) {
WTRACE("Decoder has already started.");
return DECODE_SUCCESS;
}
if ((int32_t)mParserType != VBP_INVALID) {
if (vbp_open(mParserType, &mParserHandle) != VBP_OK) {
ETRACE("Failed to open VBP parser.");
return DECODE_NO_PARSER;
}
}
// keep a copy of configure buffer, meta data only. It can be used to override VA setup parameter.
mConfigBuffer = *buffer;
mConfigBuffer.data = NULL;
mConfigBuffer.size = 0;
mVideoFormatInfo.width = buffer->width;
mVideoFormatInfo.height = buffer->height;
mLowDelay = buffer->flag & WANT_LOW_DELAY;
mRawOutput = buffer->flag & WANT_RAW_OUTPUT;
if (mRawOutput) {
WTRACE("Output is raw data.");
}
return DECODE_SUCCESS;
}
void VideoDecoderBase::stop(void) {
terminateVA();
mCurrentPTS = INVALID_PTS;
mAcquiredBuffer = NULL;
mLastReference = NULL;
mForwardReference = NULL;
mDecodingFrame = false;
mSizeChanged = false;
// private variables
mFirstFrame = true;
mLowDelay = false;
mRawOutput = false;
mNumSurfaces = 0;
mSurfaceAcquirePos = 0;
mNextOutputPOC = 0;
mVideoFormatInfo.valid = false;
if (mParserHandle){
vbp_close(mParserHandle);
mParserHandle = NULL;
}
}
void VideoDecoderBase::flush(void) {
if (mVAStarted == false) {
// nothing to flush at this stage
return;
}
endDecodingFrame(true);
// avoid setting mSurfaceAcquirePos to 0 as it may cause tearing
// (surface is still being rendered)
mSurfaceAcquirePos = (mSurfaceAcquirePos + 1) % mNumSurfaces;
mNextOutputPOC = 0;
mCurrentPTS = INVALID_PTS;
mAcquiredBuffer = NULL;
mLastReference = NULL;
mForwardReference = NULL;
mOutputHead = NULL;
mOutputTail = NULL;
mDecodingFrame = false;
mSizeChanged = false;
mFirstFrame = true;
// initialize surface buffer without resetting mapped/raw data
initSurfaceBuffer(false);
}
const VideoFormatInfo* VideoDecoderBase::getFormatInfo(void) {
return &mVideoFormatInfo;
}
const VideoRenderBuffer* VideoDecoderBase::getOutput(bool draining) {
if (mVAStarted == false) {
return NULL;
}
if (draining) {
// complete decoding the last frame and ignore return
endDecodingFrame(false);
}
if (mOutputHead == NULL) {
return NULL;
}
// output by position (the first buffer)
VideoSurfaceBuffer *outputByPos = mOutputHead;
if (mLowDelay || mFirstFrame) {
mOutputHead = mOutputHead->next;
if (mOutputHead == NULL) {
mOutputTail = NULL;
}
mFirstFrame = false;
mNextOutputPOC = outputByPos->pictureOrder + 1;
//VTRACE("Output POC %u for display (pts = %.2f)", outputByPos->pictureOrder, outputByPos->renderBuffer.timeStamp/1E6);
return &(outputByPos->renderBuffer);
}
// output by presentation time stamp (the smallest pts)
VideoSurfaceBuffer *outputByPts = NULL;
// output by picture coding type (PCT) or by picture order count (POC)
// for output by PCT:
// if there is more than one reference frame, the first reference frame is ouput, otherwise,
// output non-reference frame if any.
// for output by POC:
//
VideoSurfaceBuffer *output = NULL;
VideoSurfaceBuffer *p = mOutputHead;
int32_t reference = 0;
int32_t count = 0;
uint64_t pts = INVALID_PTS;
uint32_t poc = INVALID_POC;
do {
if ((uint64_t)(p->renderBuffer.timeStamp) <= pts) {
// find buffer with the smallest PTS
pts = p->renderBuffer.timeStamp;
outputByPts = p;
}
if (mOutputMethod == OUTPUT_BY_PCT) {
if (p->referenceFrame) {
reference++;
} else if (output == NULL) {
// first non-reference frame
output = p;
}
if (reference > 1 && output == NULL) {
// first reference frame
output = outputByPos;
}
} else if (mOutputMethod == OUTPUT_BY_POC) {
count++;
if (p->pictureOrder == 0) {
// any picture before this POC (new IDR) must be output
if (output == NULL) {
output = p;
mNextOutputPOC = 1;
} else {
mNextOutputPOC = output->pictureOrder + 1;
}
break;
}
if (p->pictureOrder < poc && p->pictureOrder >= mNextOutputPOC) {
// this POC meets ouput criteria.
poc = p->pictureOrder;
output = p;
}
if (poc == mNextOutputPOC || count == mOutputWindowSize) {
if (output != NULL) {
// this indicates two cases:
// 1) the next output POC is found.
// 2) output queue is full and there is at least one buffer meeting the output criteria.
mNextOutputPOC = output->pictureOrder + 1;
break;
} else {
// this indicates output queue is full and no buffer in the queue meets the output criteria
// restart processing as queue is FULL and output criteria is changed. (next output POC is 0)
mNextOutputPOC = 0;
count = 0;
reference = 0;
poc = INVALID_POC;
pts = INVALID_PTS;
p = mOutputHead;
continue;
}
}
if (p->next == NULL) {
output = NULL;
}
} else {
ETRACE("Invalid output method.");
return NULL;
}
p = p->next;
} while (p != NULL);
if (output != NULL) {
if (output != outputByPts) {
// swap time stamp
uint64_t ts = output->renderBuffer.timeStamp;
output->renderBuffer.timeStamp = outputByPts->renderBuffer.timeStamp;
outputByPts->renderBuffer.timeStamp = ts;
}
if (output != outputByPos) {
// remove this output from middle or end of the list
p = outputByPos;
while (p->next != output) {
p = p->next;
}
p->next = output->next;
if (mOutputTail == output) {
mOutputTail = p;
}
} else {
// remove this output from head of the list
mOutputHead = mOutputHead->next;
if (mOutputHead == NULL) {
mOutputTail = NULL;
}
}
//VTRACE("Output POC %u for display (pts = %.2f)", output->pictureOrder, output->renderBuffer.timeStamp/1E6);
return &(output->renderBuffer);
}
if (draining){
// output buffer in the head of list
mOutputHead = mOutputHead->next;
if (mOutputHead == NULL) {
mOutputTail = NULL;
}
return &(outputByPos->renderBuffer);
}
return NULL;
}
Decode_Status VideoDecoderBase::acquireSurfaceBuffer(void) {
if (mVAStarted == false) {
return DECODE_FAIL;
}
if (mAcquiredBuffer != NULL) {
ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
return DECODE_FAIL;
}
int nextAcquire = mSurfaceAcquirePos;
VideoSurfaceBuffer *acquiredBuffer = NULL;
bool acquired = false;
while (acquired == false) {
acquiredBuffer = mSurfaceBuffers + nextAcquire;
if (acquiredBuffer->asReferernce == false && acquiredBuffer->renderBuffer.renderDone == true) {
// this is potential buffer for acquisition. Check if it is referenced by other surface for frame skipping
VideoSurfaceBuffer *temp;
acquired = true;
for (int i = 0; i < mNumSurfaces; i++) {
if (i == nextAcquire) {
continue;
}
temp = mSurfaceBuffers + i;
// use mSurfaces[nextAcquire] instead of acquiredBuffer->renderBuffer.surface as its the actual surface to use.
if (temp->renderBuffer.surface == mSurfaces[nextAcquire] &&
temp->renderBuffer.renderDone == false) {
ITRACE("Surface is referenced by other surface buffer.");
acquired = false;
break;
}
}
}
if (acquired) {
break;
}
nextAcquire++;
if (nextAcquire == mNumSurfaces) {
nextAcquire = 0;
}
if (nextAcquire == mSurfaceAcquirePos) {
return DECODE_NO_SURFACE;
}
}
if (acquired == false) {
return DECODE_NO_SURFACE;
}
mAcquiredBuffer = acquiredBuffer;
mSurfaceAcquirePos = nextAcquire;
// set surface again as surface maybe reset by skipped frame.
// skipped frame is a "non-coded frame" and decoder needs to duplicate the previous reference frame as the output.
mAcquiredBuffer->renderBuffer.surface = mSurfaces[mSurfaceAcquirePos];
if (mSurfaceUserPtr && mAcquiredBuffer->mappedData) {
mAcquiredBuffer->mappedData->data = mSurfaceUserPtr[mSurfaceAcquirePos];
}
mAcquiredBuffer->renderBuffer.timeStamp = INVALID_PTS;
mAcquiredBuffer->renderBuffer.display = mVADisplay;
mAcquiredBuffer->renderBuffer.flag = 0;
mAcquiredBuffer->renderBuffer.renderDone = true;
mAcquiredBuffer->asReferernce = false;
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::outputSurfaceBuffer(void) {
Decode_Status status;
if (mAcquiredBuffer == NULL) {
ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
return DECODE_FAIL;
}
if (mRawOutput) {
status = getRawDataFromSurface();
CHECK_STATUS();
}
// frame is successfly decoded to the current surface, it is ready for output
mAcquiredBuffer->renderBuffer.renderDone = false;
// decoder must set "asReference and referenceFrame" flags properly
// update reference frames
if (mAcquiredBuffer->referenceFrame) {
if (mManageReference) {
// managing reference for MPEG4/H.263/WMV.
// AVC should manage reference frame in a different way
if (mForwardReference != NULL) {
// this foward reference is no longer needed
mForwardReference->asReferernce = false;
}
// Forware reference for either P or B frame prediction
mForwardReference = mLastReference;
mAcquiredBuffer->asReferernce = true;
}
// the last reference frame.
mLastReference = mAcquiredBuffer;
}
// add to the output list
if (mOutputHead == NULL) {
mOutputHead = mAcquiredBuffer;
} else {
mOutputTail->next = mAcquiredBuffer;
}
mOutputTail = mAcquiredBuffer;
mOutputTail->next = NULL;
//VTRACE("Pushing POC %u to queue (pts = %.2f)", mAcquiredBuffer->pictureOrder, mAcquiredBuffer->renderBuffer.timeStamp/1E6);
mAcquiredBuffer = NULL;
mSurfaceAcquirePos = (mSurfaceAcquirePos + 1 ) % mNumSurfaces;
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::releaseSurfaceBuffer(void) {
if (mAcquiredBuffer == NULL) {
// this is harmless error
return DECODE_SUCCESS;
}
// frame is not decoded to the acquired buffer, current surface is invalid, and can't be output.
mAcquiredBuffer->asReferernce = false;
mAcquiredBuffer->renderBuffer.renderDone = true;
mAcquiredBuffer = NULL;
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::endDecodingFrame(bool dropFrame) {
Decode_Status status = DECODE_SUCCESS;
VAStatus vaStatus;
if (mDecodingFrame == false) {
if (mAcquiredBuffer != NULL) {
//ETRACE("mAcquiredBuffer is not NULL. Implementation bug.");
releaseSurfaceBuffer();
status = DECODE_FAIL;
}
return status;
}
// return through exit label to reset mDecodingFrame
if (mAcquiredBuffer == NULL) {
ETRACE("mAcquiredBuffer is NULL. Implementation bug.");
status = DECODE_FAIL;
goto exit;
}
vaStatus = vaEndPicture(mVADisplay, mVAContext);
if (vaStatus != VA_STATUS_SUCCESS) {
releaseSurfaceBuffer();
ETRACE("vaEndPicture failed. vaStatus = %d", vaStatus);
status = DECODE_DRIVER_FAIL;
goto exit;
}
if (dropFrame) {
// we are asked to drop this decoded picture
releaseSurfaceBuffer();
goto exit;
}
status = outputSurfaceBuffer();
// fall through
exit:
mDecodingFrame = false;
return status;
}
Decode_Status VideoDecoderBase::setupVA(int32_t numSurface, VAProfile profile) {
VAStatus vaStatus = VA_STATUS_SUCCESS;
Decode_Status status;
VAConfigAttrib attrib;
if (mVAStarted) {
return DECODE_SUCCESS;
}
// TODO: validate profile
if (numSurface == 0) {
return DECODE_FAIL;
}
if (mConfigBuffer.flag & HAS_MINIMUM_SURFACE_NUMBER) {
if (numSurface < mConfigBuffer.surfaceNumber) {
LOGW("surface to allocated %d is less than minimum number required %d",
numSurface, mConfigBuffer.surfaceNumber);
numSurface = mConfigBuffer.surfaceNumber;
}
}
if (mVADisplay != NULL) {
ETRACE("VA is partially started.");
return DECODE_FAIL;
}
// Display is defined as "unsigned int"
mDisplay = new Display;
*mDisplay = ANDROID_DISPLAY_HANDLE;
mVADisplay = vaGetDisplay(mDisplay);
if (mVADisplay == NULL) {
ETRACE("vaGetDisplay failed.");
return DECODE_DRIVER_FAIL;
}
int majorVersion, minorVersion;
vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
CHECK_VA_STATUS("vaInitialize");
if ((int32_t)profile != VAProfileSoftwareDecoding) {
//We are requesting RT attributes
attrib.type = VAConfigAttribRTFormat;
attrib.value = VA_RT_FORMAT_YUV420;
vaStatus = vaCreateConfig(
mVADisplay,
profile,
VAEntrypointVLD,
&attrib,
1,
&mVAConfig);
CHECK_VA_STATUS("vaCreateConfig");
}
mNumSurfaces = numSurface;
mSurfaces = new VASurfaceID [mNumSurfaces];
if (mSurfaces == NULL) {
return DECODE_MEMORY_FAIL;
}
vaStatus = vaCreateSurfaces(
mVADisplay,
mVideoFormatInfo.width,
mVideoFormatInfo.height,
VA_RT_FORMAT_YUV420,
mNumSurfaces,
mSurfaces);
CHECK_VA_STATUS("vaCreateSurfaces");
mVideoFormatInfo.surfaceWidth = mVideoFormatInfo.width;
mVideoFormatInfo.surfaceHeight = mVideoFormatInfo.height;
mVideoFormatInfo.surfaceNumber = mNumSurfaces;
if ((int32_t)profile != VAProfileSoftwareDecoding) {
vaStatus = vaCreateContext(
mVADisplay,
mVAConfig,
mVideoFormatInfo.width,
mVideoFormatInfo.height,
0,
mSurfaces,
mNumSurfaces,
&mVAContext);
CHECK_VA_STATUS("vaCreateContext");
}
mSurfaceBuffers = new VideoSurfaceBuffer [mNumSurfaces];
if (mSurfaceBuffers == NULL) {
return DECODE_MEMORY_FAIL;
}
initSurfaceBuffer(true);
if ((int32_t)profile == VAProfileSoftwareDecoding) {
// derive user pointer from surface for direct access
status = mapSurface();
CHECK_STATUS("mapSurface")
}
mVAStarted = true;
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::terminateVA(void) {
if (mSurfaceBuffers) {
for (int32_t i = 0; i < mNumSurfaces; i++) {
if (mSurfaceBuffers[i].renderBuffer.rawData) {
if (mSurfaceBuffers[i].renderBuffer.rawData->data) {
delete [] mSurfaceBuffers[i].renderBuffer.rawData->data;
}
delete mSurfaceBuffers[i].renderBuffer.rawData;
}
if (mSurfaceBuffers[i].mappedData) {
// don't delete data pointer as it is mapped from surface
delete mSurfaceBuffers[i].mappedData;
}
}
delete [] mSurfaceBuffers;
mSurfaceBuffers = NULL;
}
if (mSurfaceUserPtr) {
delete [] mSurfaceUserPtr;
mSurfaceUserPtr = NULL;
}
if (mSurfaces)
{
vaDestroySurfaces(mVADisplay, mSurfaces, mNumSurfaces);
delete [] mSurfaces;
mSurfaces = NULL;
}
if (mVAContext != VA_INVALID_ID) {
vaDestroyContext(mVADisplay, mVAContext);
mVAContext = VA_INVALID_ID;
}
if (mVAConfig != VA_INVALID_ID) {
vaDestroyConfig(mVADisplay, mVAConfig);
mVAConfig = VA_INVALID_ID;
}
if (mVADisplay) {
vaTerminate(mVADisplay);
mVADisplay = NULL;
}
if (mDisplay) {
delete mDisplay;
mDisplay = NULL;
}
mVAStarted = false;
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::parseBuffer(uint8_t *buffer, int32_t size, bool config, void** vbpData) {
// DON'T check if mVAStarted == true
if (mParserHandle == NULL) {
return DECODE_NO_PARSER;
}
uint32_t vbpStatus;
if (buffer == NULL || size <= 0) {
return DECODE_INVALID_DATA;
}
uint8_t configFlag = config ? 1 : 0;
vbpStatus = vbp_parse(mParserHandle, buffer, size, configFlag);
CHECK_VBP_STATUS("vbp_parse");
vbpStatus = vbp_query(mParserHandle, vbpData);
CHECK_VBP_STATUS("vbp_query");
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::mapSurface(void){
VAStatus vaStatus = VA_STATUS_SUCCESS;
VAImage image;
uint8_t *userPtr;
mSurfaceUserPtr = new uint8_t* [mNumSurfaces];
if (mSurfaceUserPtr == NULL) {
return DECODE_MEMORY_FAIL;
}
for (int32_t i = 0; i< mNumSurfaces; i++) {
vaStatus = vaDeriveImage(mVADisplay, mSurfaces[i], &image);
CHECK_VA_STATUS("vaDeriveImage");
vaStatus = vaMapBuffer(mVADisplay, image.buf, (void**)&userPtr);
CHECK_VA_STATUS("vaMapBuffer");
mSurfaceUserPtr[i] = userPtr;
mSurfaceBuffers[i].mappedData = new VideoFrameRawData;
if (mSurfaceBuffers[i].mappedData == NULL) {
return DECODE_MEMORY_FAIL;
}
mSurfaceBuffers[i].mappedData->own = false; // derived from surface so can't be released
mSurfaceBuffers[i].mappedData->data = NULL; // specified during acquireSurfaceBuffer
mSurfaceBuffers[i].mappedData->fourcc = image.format.fourcc;
mSurfaceBuffers[i].mappedData->width = mVideoFormatInfo.width;
mSurfaceBuffers[i].mappedData->height = mVideoFormatInfo.height;
mSurfaceBuffers[i].mappedData->size = image.data_size;
for (int pi = 0; pi < 3; pi++) {
mSurfaceBuffers[i].mappedData->pitch[pi] = image.pitches[pi];
mSurfaceBuffers[i].mappedData->offset[pi] = image.offsets[pi];
}
// debug information
if (image.pitches[0] != image.pitches[1] ||
image.width != mVideoFormatInfo.width ||
image.height != mVideoFormatInfo.height ||
image.offsets[0] != 0) {
WTRACE("Unexpected VAImage format, w = %d, h = %d, offset = %d", image.width, image.height, image.offsets[0]);
}
// TODO: do we need to unmap buffer?
//vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
//CHECK_VA_STATUS("vaMapBuffer");
vaStatus = vaDestroyImage(mVADisplay,image.image_id);
CHECK_VA_STATUS("vaDestroyImage");
}
return DECODE_SUCCESS;
}
Decode_Status VideoDecoderBase::getRawDataFromSurface(void) {
if (mAcquiredBuffer == NULL) {
return DECODE_FAIL;
}
VAStatus vaStatus;
VAImageFormat imageFormat;
VAImage vaImage;
vaStatus = vaSyncSurface(mVADisplay, mAcquiredBuffer->renderBuffer.surface);
CHECK_VA_STATUS("vaSyncSurface");
vaImage.image_id = VA_INVALID_ID;
// driver currently only supports NV12 and IYUV format.
// byte_order information is from driver and hard-coded here
imageFormat.fourcc = VA_FOURCC_NV12;
imageFormat.byte_order = VA_LSB_FIRST;
imageFormat.bits_per_pixel = 16;
vaStatus = vaCreateImage(
mVADisplay,
&imageFormat,
mVideoFormatInfo.width,
mVideoFormatInfo.height,
&vaImage);
CHECK_VA_STATUS("vaCreateImage");
vaStatus = vaGetImage(
mVADisplay,
mAcquiredBuffer->renderBuffer.surface,
0,
0,
vaImage.width,
vaImage.height,
vaImage.image_id);
CHECK_VA_STATUS("vaGetImage");
void *pBuf = NULL;
vaStatus = vaMapBuffer(mVADisplay, vaImage.buf, &pBuf);
CHECK_VA_STATUS("vaMapBuffer");
VideoFrameRawData *rawData = NULL;
if (mAcquiredBuffer->renderBuffer.rawData == NULL) {
rawData = new VideoFrameRawData;
if (rawData == NULL) {
return DECODE_MEMORY_FAIL;
}
memset(rawData, 0, sizeof(VideoFrameRawData));
mAcquiredBuffer->renderBuffer.rawData = rawData;
} else {
rawData = mAcquiredBuffer->renderBuffer.rawData;
}
// size in NV12 format
int32_t size = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2;
if (rawData->data != NULL && rawData->size != size) {
delete [] rawData->data;
rawData->data = NULL;
rawData->size = 0;
}
if (rawData->data == NULL) {
rawData->data = new uint8_t [size];
if (rawData->data == NULL) {
return DECODE_MEMORY_FAIL;
}
}
rawData->own = true; // allocated by this library
rawData->width = mVideoFormatInfo.width;
rawData->height = mVideoFormatInfo.height;
rawData->pitch[0] = mVideoFormatInfo.width;
rawData->pitch[1] = mVideoFormatInfo.width;
rawData->pitch[2] = 0; // interleaved U/V, two planes
rawData->offset[0] = 0;
rawData->offset[1] = mVideoFormatInfo.width * mVideoFormatInfo.height;
rawData->offset[2] = mVideoFormatInfo.width * mVideoFormatInfo.height * 3/2;
rawData->size = size;;
rawData->fourcc = 'NV12';
if (size == (int32_t)vaImage.data_size) {
memcpy(rawData->data, pBuf, size);
} else {
// copy Y data
uint8_t *src = (uint8_t*)pBuf;
uint8_t *dst = rawData->data;
int32_t row = 0;
for (row = 0; row < mVideoFormatInfo.height; row++) {
memcpy(dst, src, mVideoFormatInfo.width);
dst += mVideoFormatInfo.width;
src += vaImage.pitches[0];
}
// copy interleaved V and U data
src = (uint8_t*)pBuf + vaImage.offsets[1];
for (row = 0; row < mVideoFormatInfo.height/2; row++) {
memcpy(dst, src, mVideoFormatInfo.width);
dst += mVideoFormatInfo.width;
src += vaImage.pitches[1];
}
}
// TODO: image may not get destroyed if error happens.
if (vaImage.image_id != VA_INVALID_ID) {
vaDestroyImage(mVADisplay, vaImage.image_id);
}
return DECODE_SUCCESS;
}
void VideoDecoderBase::initSurfaceBuffer(bool reset) {
for (int32_t i = 0; i < mNumSurfaces; i++) {
mSurfaceBuffers[i].renderBuffer.display = mVADisplay;
mSurfaceBuffers[i].renderBuffer.surface = VA_INVALID_SURFACE; // set in acquireSurfaceBuffer
mSurfaceBuffers[i].renderBuffer.flag = 0;
mSurfaceBuffers[i].renderBuffer.scanFormat = VA_FRAME_PICTURE;
mSurfaceBuffers[i].renderBuffer.timeStamp = 0;
mSurfaceBuffers[i].renderBuffer.renderDone = true;
mSurfaceBuffers[i].referenceFrame = false;
mSurfaceBuffers[i].asReferernce= false;
mSurfaceBuffers[i].pictureOrder = 0;
mSurfaceBuffers[i].next = NULL;
if (reset == true) {
mSurfaceBuffers[i].renderBuffer.rawData = NULL;
mSurfaceBuffers[i].mappedData = NULL;
}
}
}