VideoDecoderMPEG4: Support multiple frame in decode buffer

BZ: 21585

Add mix handling for the multiple frame in decode buffer.
return next frame timestamp,offset and DECODE_MULTIPLE_FRAME
to OMX IL for handling.
Signed-off-by: xiao <fengx.xiao@intel.com>

Change-Id: Ic64e5c24cd5f72ab2a7007f8ab274af2993f8cb3
Reviewed-on: http://android.intel.com:8080/35189
Reviewed-by: Ding, Haitao <haitao.ding@intel.com>
Tested-by: Ding, Haitao <haitao.ding@intel.com>
Reviewed-by: buildbot <buildbot@intel.com>
Tested-by: buildbot <buildbot@intel.com>
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
index 38e2a05..7037fd0 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
@@ -113,6 +113,7 @@
     uint32 number_pictures;
 
     vbp_picture_data_mp42 *picture_data;
+    uint32 frameSize; // fist frame size in buffer. Use for multiple frame in a buffer
 
 } vbp_data_mp42;
 
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
index 249a9f8..ccc0ab5 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
@@ -632,6 +632,13 @@
 
 void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
 {
+    if(list_index == 0) {
+        // for the fist list item
+        viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+        vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+        query_data->frameSize = parent->list.total_bytes; //record the first item frame size
+    }
+
     vbp_fill_codec_data(pcontext);
     vbp_fill_picture_param(pcontext, 1);
     vbp_fill_iq_matrix_buffer(pcontext);
diff --git a/videodecoder/VideoDecoderBase.cpp b/videodecoder/VideoDecoderBase.cpp
index 1cf8e25..341a092 100644
--- a/videodecoder/VideoDecoderBase.cpp
+++ b/videodecoder/VideoDecoderBase.cpp
@@ -610,6 +610,7 @@
 
     if (dropFrame) {
         // we are asked to drop this decoded picture
+        LOGW("Frame dropped");
         releaseSurfaceBuffer();
         goto exit;
     }
diff --git a/videodecoder/VideoDecoderDefs.h b/videodecoder/VideoDecoderDefs.h
index f4cea28..561a1aa 100644
--- a/videodecoder/VideoDecoderDefs.h
+++ b/videodecoder/VideoDecoderDefs.h
@@ -99,6 +99,9 @@
     int64_t timeStamp;
     uint32_t flag;
     VideoFormatSpecificData *ext;
+    bool hasNext; // for multiple frame in a buffer
+    int64_t nextTimeStamp; // next frame timestamp
+    int32_t offSet; // next frame offset
 };
 
 
@@ -181,6 +184,7 @@
     DECODE_SUCCESS = 1,
     DECODE_FORMAT_CHANGE = 2,
     DECODE_FRAME_DROPPED = 3,
+    DECODE_MULTIPLE_FRAME = 4,
 } VIDEO_DECODE_STATUS;
 
 typedef int32_t Decode_Status;
diff --git a/videodecoder/VideoDecoderMPEG4.cpp b/videodecoder/VideoDecoderMPEG4.cpp
index c77f24c..4bb73f4 100644
--- a/videodecoder/VideoDecoderMPEG4.cpp
+++ b/videodecoder/VideoDecoderMPEG4.cpp
@@ -125,14 +125,14 @@
         CHECK_STATUS("endDecodingFrame");
 
         // start decoding a new frame
-        status = beginDecodingFrame(data);
-        if (status != DECODE_SUCCESS) {
+        status = beginDecodingFrame(data, buffer);
+        if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) {
             endDecodingFrame(true);
         }
         CHECK_STATUS("beginDecodingFrame");
     } else {
-        status = continueDecodingFrame(data);
-        if (status != DECODE_SUCCESS) {
+        status = continueDecodingFrame(data, buffer);
+        if (status != DECODE_SUCCESS && status != DECODE_MULTIPLE_FRAME) {
             endDecodingFrame(true);
         }
         CHECK_STATUS("continueDecodingFrame");
@@ -147,7 +147,8 @@
     return DECODE_SUCCESS;
 }
 
-Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data) {
+Decode_Status VideoDecoderMPEG4::beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) {
+
     Decode_Status status = DECODE_SUCCESS;
     vbp_picture_data_mp42 *picData = data->picture_data;
     VAPictureParameterBufferMPEG4 *picParam = &(picData->picture_param);
@@ -223,14 +224,15 @@
             }
         }
         // all sanity checks pass, continue decoding through continueDecodingFrame
-        status = continueDecodingFrame(data);
+        status = continueDecodingFrame(data, buffer);
     }
     return status;
 }
 
-Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data) {
+Decode_Status VideoDecoderMPEG4::continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer) {
     Decode_Status status = DECODE_SUCCESS;
     VAStatus vaStatus = VA_STATUS_SUCCESS;
+    bool useGraphicBuffer = mConfigBuffer.flag & USE_NATIVE_GRAPHIC_BUFFER;
 
     /*
          Packed Frame Assumption:
@@ -271,28 +273,59 @@
                 // TODO: handle this case
             }
             if (mDecodingFrame) {
-                // this indicates the start of a new frame in the packed frame
-                // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
-                if (picParam->vop_time_increment_resolution)
-                {
-                    uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
-                            picParam->vop_time_increment_resolution;
-                    increment = increment % picParam->vop_time_increment_resolution;
-                    // convert to nano-second
-                    // TODO: unit of time stamp varies on different frame work
-                    increment = increment * 1e9 / picParam->vop_time_increment_resolution;
-                    mAcquiredBuffer->renderBuffer.timeStamp += increment;
+                if (codingType == MP4_VOP_TYPE_B){
+                    // this indicates the start of a new frame in the packed frame
+                    // Update timestamp for P frame in the packed frame as timestamp here is for the B frame!
+                    if (picParam->vop_time_increment_resolution){
+                        uint64_t increment = mLastVOPTimeIncrement - picData->vop_time_increment +
+                                picParam->vop_time_increment_resolution;
+                        increment = increment % picParam->vop_time_increment_resolution;
+                        // convert to micro-second
+                        // TODO: unit of time stamp varies on different frame work
+                        increment = increment * 1e6 / picParam->vop_time_increment_resolution;
+                        mAcquiredBuffer->renderBuffer.timeStamp += increment;
+                        if (useGraphicBuffer){
+                           buffer->nextTimeStamp = mCurrentPTS;
+                           mCurrentPTS = mAcquiredBuffer->renderBuffer.timeStamp;
+                        }
+                    }
+                } else {
+                    // this indicates the start of a new frame in the packed frame. no B frame int the packet
+                    // Update the timestamp according the increment
+                    if (picParam->vop_time_increment_resolution){
+                        int64_t increment = picData->vop_time_increment - mLastVOPTimeIncrement + picParam->vop_time_increment_resolution;
+                        increment = increment % picParam->vop_time_increment_resolution;
+                        //convert to micro-second
+                        increment = increment * 1e6 / picParam->vop_time_increment_resolution;
+                        if (useGraphicBuffer) {
+                            buffer->nextTimeStamp = mCurrentPTS + increment;
+                        }
+                        else {
+                            mCurrentPTS += increment;
+                        }
+
+                    } else {
+                        if (useGraphicBuffer) {
+                            buffer->nextTimeStamp = mCurrentPTS + 30000;
+                        }
+                        else {
+                            mCurrentPTS += 30000;
+                        }
+                    }
                 }
                 endDecodingFrame(false);
                 mExpectingNVOP = true;
+                if (codingType != MP4_VOP_TYPE_B) {
+                    mExpectingNVOP = false;
+                }
+                if (useGraphicBuffer) {
+                    buffer->hasNext  = true;
+                    buffer->offSet = data->frameSize;
+                    VTRACE("Report OMX to handle for Multiple frame offset=%d time=%lld",data->frameSize,buffer->nextTimeStamp);
+                    return DECODE_MULTIPLE_FRAME;
+                }
             }
 
-            if (mExpectingNVOP == true && codingType != MP4_VOP_TYPE_B) {
-                ETRACE("The second frame in the packed frame is not B frame.");
-                mExpectingNVOP = false;
-                // TODO:  should be able to continue
-                return DECODE_FAIL;
-            }
             // acquire a new surface buffer
             status = acquireSurfaceBuffer();
             CHECK_STATUS("acquireSurfaceBuffer");
diff --git a/videodecoder/VideoDecoderMPEG4.h b/videodecoder/VideoDecoderMPEG4.h
index 300b583..bfa8ca2 100644
--- a/videodecoder/VideoDecoderMPEG4.h
+++ b/videodecoder/VideoDecoderMPEG4.h
@@ -40,8 +40,8 @@
 
 private:
     Decode_Status decodeFrame(VideoDecodeBuffer *buffer, vbp_data_mp42 *data);
-    Decode_Status beginDecodingFrame(vbp_data_mp42 *data);
-    Decode_Status continueDecodingFrame(vbp_data_mp42 *data);
+    Decode_Status beginDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer);
+    Decode_Status continueDecodingFrame(vbp_data_mp42 *data, VideoDecodeBuffer *buffer);
     Decode_Status decodeSlice(vbp_data_mp42 *data, vbp_picture_data_mp42 *picData);
     Decode_Status setReference(VAPictureParameterBufferMPEG4 *picParam);
     Decode_Status startVA(vbp_data_mp42 *data);