Refine multiple frame decoder support code
BZ: 26883
Add PACKED_FRAME_TYPE for VideoExtensionBuffer type.
Move next frame info to PackedFrameData(from Andy)
Change-Id: I0964f122bb0eecc5f56355cdb47bd7561cc19251
Signed-off-by: xiao <fengx.xiao@intel.com>
Reviewed-on: http://android.intel.com:8080/37901
Reviewed-by: Qiu, Junhai <junhai.qiu@intel.com>
Reviewed-by: Ding, Haitao <haitao.ding@intel.com>
Tested-by: Ding, Haitao <haitao.ding@intel.com>
Reviewed-by: buildbot <buildbot@intel.com>
Tested-by: buildbot <buildbot@intel.com>
diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h
index 219a559..c13966e 100644
--- a/videodecoder/VideoDecoderDefs.h
+++ b/videodecoder/VideoDecoderDefs.h
@@ -30,12 +30,16 @@
// format specific data, for future extension.
-struct VideoFormatSpecificData {
- int32_t formatType;
- int32_t formatSize;
- uint8_t *formatData;
+struct VideoExtensionBuffer {
+ int32_t extType;
+ int32_t extSize;
+ uint8_t *extData;
};
+typedef enum {
+ PACKED_FRAME_TYPE,
+} VIDEO_EXTENSION_TYPE;
+
struct VideoFrameRawData {
int32_t width;
int32_t height;
@@ -47,6 +51,11 @@
bool own; // own data or derived from surface. If true, the library will release the memory during clearnup
};
+struct PackedFrameData {
+ int64_t timestamp;
+ int32_t offSet;
+};
+
// flags for VideoDecodeBuffer, VideoConfigBuffer and VideoRenderBuffer
typedef enum {
// indicates if sample has discontinuity in time stamp (happen after seeking usually)
@@ -98,10 +107,7 @@
int32_t size;
int64_t timeStamp;
uint32_t flag;
- VideoFormatSpecificData *ext;
- bool hasNext; // for multiple frame in a buffer
- int64_t nextTimeStamp; // next frame timestamp
- int32_t offSet; // next frame offset
+ VideoExtensionBuffer *ext;
};
@@ -118,7 +124,7 @@
void *graphicBufferHandler[ MAX_GRAPHIC_NUM ];
uint32_t graphicBufferStride;
uint32_t graphicBufferColorFormat;
- VideoFormatSpecificData *ext;
+ VideoExtensionBuffer *ext;
void* nativeWindow;
};
@@ -166,7 +172,7 @@
int32_t bitrate;
int32_t framerateNom;
int32_t framerateDenom;
- VideoFormatSpecificData *ext;
+ VideoExtensionBuffer *ext;
};
// TODO: categorize the follow errors as fatal and non-fatal.
diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp
index 4bb73f4..d8243be 100644
--- a/videodecoder/VideoDecoderMPEG4.cpp
+++ b/videodecoder/VideoDecoderMPEG4.cpp
@@ -78,6 +78,7 @@
} else {
mIsSyncFrame = false;
}
+ buffer->ext = NULL;
status = VideoDecoderBase::parseBuffer(
buffer->data,
buffer->size,
@@ -125,14 +126,24 @@
CHECK_STATUS("endDecodingFrame");
// start decoding a new frame
- status = beginDecodingFrame(data, buffer);
- if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) {
+ status = beginDecodingFrame(data);
+ if (status == DECODE_MULTIPLE_FRAME) {
+ buffer->ext = &mExtensionBuffer;
+ mExtensionBuffer.extType = PACKED_FRAME_TYPE;
+ mExtensionBuffer.extSize = sizeof(mPackedFrame);
+ mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
+ } else if (status != DECODE_SUCCESS) {
endDecodingFrame(true);
}
CHECK_STATUS("beginDecodingFrame");
} else {
- status = continueDecodingFrame(data, buffer);
- if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) {
+ status = continueDecodingFrame(data);
+ if (status == DECODE_MULTIPLE_FRAME) {
+ buffer->ext = &mExtensionBuffer;
+ mExtensionBuffer.extType = PACKED_FRAME_TYPE;
+ mExtensionBuffer.extSize = sizeof(mPackedFrame);
+ mExtensionBuffer.extData = (uint8_t*)&mPackedFrame;
+ } else if (status != DECODE_SUCCESS) {
endDecodingFrame(true);
}
CHECK_STATUS("continueDecodingFrame");
@@ -147,7 +158,7 @@
return DECODE_SUCCESS;
}
-Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) {
+Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
Decode_Status status = DECODE_SUCCESS;
vbp_picture_data_mp42 *picData = data->picture_data;
@@ -224,12 +235,12 @@
}
}
// all sanity checks pass, continue decoding through continueDecodingFrame
- status = continueDecodingFrame(data, buffer);
+ status = continueDecodingFrame(data);
}
return status;
}
-Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) {
+Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
Decode_Status status = DECODE_SUCCESS;
VAStatus vaStatus = VA_STATUS_SUCCESS;
bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
@@ -285,7 +296,7 @@
increment = increment * 1e6 / picParam->vop_time_increment_resolution;
mAcquiredBuffer->renderBuffer.timeStamp += increment;
if (useGraphicBuffer){
- buffer->nextTimeStamp = mCurrentPTS;
+ mPackedFrame.timestamp = mCurrentPTS;
mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
}
}
@@ -298,7 +309,7 @@
//convert to micro-second
increment = increment * 1e6 / picParam->vop_time_increment_resolution;
if (useGraphicBuffer) {
- buffer->nextTimeStamp = mCurrentPTS + increment;
+ mPackedFrame.timestamp = mCurrentPTS + increment;
}
else {
mCurrentPTS += increment;
@@ -306,7 +317,7 @@
} else {
if (useGraphicBuffer) {
- buffer->nextTimeStamp = mCurrentPTS + 30000;
+ mPackedFrame.timestamp = mCurrentPTS + 30000;
}
else {
mCurrentPTS += 30000;
@@ -319,9 +330,8 @@
mExpectingNVOP = false;
}
if (useGraphicBuffer) {
- buffer->hasNext = true;
- buffer->offSet = data->frameSize;
- VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,buffer->nextTimeStamp);
+ mPackedFrame.offSet = data->frameSize;
+ VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,mPackedFrame.timestamp);
return DECODE_MULTIPLE_FRAME;
}
}
diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h
index bfa8ca2..234eaac 100644
--- a/videodecoder/VideoDecoderMPEG4.h
+++ b/videodecoder/VideoDecoderMPEG4.h
@@ -40,8 +40,8 @@
private:
Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data);
- Decode_Status beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer);
- Decode_Status continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer);
+ Decode_Status beginDecodingFrame(vbp_data_mp42 *data);
+ Decode_Status continueDecodingFrame(vbp_data_mp42 *data);
Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData);
Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam);
Decode_Status startVA(vbp_data_mp42 *data);
@@ -65,6 +65,8 @@
bool mSendIQMatrixBuf; // indicate if iq_matrix_buffer is sent to driver
int32_t mLastVOPCodingType;
bool mIsSyncFrame; // indicate if it is SyncFrame in container
+ VideoExtensionBuffer mExtensionBuffer;
+ PackedFrameData mPackedFrame;
};