update new rule for IpPeriod/IntraPeriod/IdrInterval and enable auto reference feature

BZ: 87331

1. refine frame_type detection including 11 cases{I(IDR), I(IDR)+P, I(IDR)+P+B},
2. query profile configure,
3. query auto reference configure, enhance it
4. change the profile level default value, and change the ipPeroid default value.

Change-Id: Ia3dc3d376c557219c2382c1590c4624f4856a7e8
Signed-off-by: jiguoliang <guoliang.ji@intel.com>
Reviewed-on: http://android.intel.com:8080/85120
Reviewed-by: cactus <cactus@intel.com>
Reviewed-by: Yuan, Shengquan <shengquan.yuan@intel.com>
Reviewed-by: Zhao, Leo <leo.zhao@intel.com>
Reviewed-by: Shi, PingX <pingx.shi@intel.com>
Tested-by: Shi, PingX <pingx.shi@intel.com>
Reviewed-by: buildbot <buildbot@intel.com>
Tested-by: buildbot <buildbot@intel.com>
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
index 4c2661a..ae0293b 100644
--- a/videoencoder/VideoEncoderAVC.cpp
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -15,6 +15,13 @@
 
 VideoEncoderAVC::VideoEncoderAVC()
     :VideoEncoderBase() {
+    if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264High) == ENCODE_SUCCESS){
+        mComParams.profile = VAProfileH264High;
+        mComParams.level = 42;
+    }else if(VideoEncoderBase::queryProfileLevelConfig(mVADisplay, VAProfileH264Main) == ENCODE_SUCCESS){
+        mComParams.profile = VAProfileH264Main;
+        mComParams.level = 41;
+    }
     mVideoParamsAVC.basicUnitSize = 0;
     mVideoParamsAVC.VUIFlag = 0;
     mVideoParamsAVC.sliceNum.iSliceNum = 2;
@@ -30,6 +37,7 @@
     mVideoParamsAVC.crop.BottomOffset = 0;
     mVideoParamsAVC.SAR.SarWidth = 0;
     mVideoParamsAVC.SAR.SarHeight = 0;
+    mAutoReferenceSurfaceNum = 4;
 }
 
 Encode_Status VideoEncoderAVC::start() {
@@ -60,6 +68,12 @@
         return ENCODE_INVALID_PARAMS;
     }
 
+    if(encParamsAVC->ipPeriod == 0 || encParamsAVC->ipPeriod >4)
+        return ENCODE_INVALID_PARAMS;
+
+    if((mComParams.intraPeriod >1)&&(mComParams.intraPeriod % encParamsAVC->ipPeriod !=0))
+        return ENCODE_INVALID_PARAMS;
+
     mVideoParamsAVC = *encParamsAVC;
     return ENCODE_SUCCESS;
 }
@@ -94,6 +108,11 @@
                 return ENCODE_INVALID_PARAMS;
             }
 
+            if(configAVCIntraPeriod->ipPeriod == 0 || configAVCIntraPeriod->ipPeriod >4)
+                return ENCODE_INVALID_PARAMS;
+            if((configAVCIntraPeriod->intraPeriod >1)&&(configAVCIntraPeriod->intraPeriod % configAVCIntraPeriod->ipPeriod !=0))
+                return ENCODE_INVALID_PARAMS;
+
             mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval;
             mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod;
             mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod;
@@ -199,29 +218,34 @@
     uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
     FrameType frametype;
     uint32_t frame_num = mFrameNum;
+    uint32_t intraPeriod = mComParams.intraPeriod;
 
-    if (mVideoParamsAVC.idrInterval != 0) {
+    if (idrPeroid != 0) {
         if(mVideoParamsAVC.ipPeriod > 1)
             frame_num = frame_num % (idrPeroid + 1);
-        else  if(mComParams.intraPeriod != 0)
+        else
             frame_num = frame_num % idrPeroid ;
+    }else{
+        if (mComParams.intraPeriod == 0)
+            intraPeriod = 0xFFFFFFFF;
     }
 
+
     if(frame_num ==0){
         frametype = FTYPE_IDR;
-    }else if(mComParams.intraPeriod ==0)
+    }else if(intraPeriod ==1)
         // only I frame need intraPeriod=idrInterval=ipPeriod=0
         frametype = FTYPE_I;
     else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame
-        if(mComParams.intraPeriod != 0 && (frame_num >  1) &&((frame_num -1)%mComParams.intraPeriod == 0))
+        if((frame_num >  1) &&((frame_num -1)%intraPeriod == 0))
             frametype = FTYPE_I;
         else
             frametype = FTYPE_P;
-    } else { 
-        if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod))
+    } else {
+        if(((frame_num-1)%intraPeriod == 0)&&(frame_num >intraPeriod))
             frametype = FTYPE_I;
         else{
-            frame_num = frame_num%mComParams.intraPeriod;
+            frame_num = frame_num%intraPeriod;
             if(frame_num == 0)
                 frametype = FTYPE_B;
             else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0)
@@ -887,17 +911,19 @@
 
     LOG_V( "Begin\n\n");
     // set picture params for HW
-    avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0];
-    avcPicParams.CurrPic.picture_id= task->rec_surface;
+    if(mAutoReference == false){
+        avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface;
+        avcPicParams.CurrPic.picture_id= task->rec_surface;
+    }else {
+        for(int i =0; i< mAutoReferenceSurfaceNum; i++)
+            avcPicParams.ReferenceFrames[i].picture_id = mAutoRefSurfaces[i];
+    }
     avcPicParams.coded_buf = task->coded_buffer;
-    //avcPicParams.picture_width = mComParams.resolution.width;
-    //avcPicParams.picture_height = mComParams.resolution.height;
     avcPicParams.last_picture = 0;
 
     LOG_V("======h264 picture params======\n");
     LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id);
     LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id);
-//    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
     LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf);
     //LOG_I( "picture_width = %d\n", avcPicParams.picture_width);
     //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height);
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
index 0ea91c9..d0a4d89 100644
--- a/videoencoder/VideoEncoderBase.cpp
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -41,7 +41,6 @@
     ,mVAEntrypoint(VAEntrypointEncSlice)
     ,mCodedBufSize(0)
     ,mNewHeader(false)
-    //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/)
     ,mRenderMaxSliceSize(false)
     ,mRenderQP (false)
     ,mRenderAIR(false)
@@ -53,7 +52,10 @@
     ,mSliceParamBuf(0)
     ,mRefSurface(VA_INVALID_SURFACE)
     ,mRecSurface(VA_INVALID_SURFACE)
+    ,mAutoRefSurfaces(NULL)
     ,mFrameNum(0)
+    ,mAutoReference(false)
+    ,mAutoReferenceSurfaceNum(4)
     ,mSliceSizeOverflow(false)
     ,mCurOutputTask(NULL)
     ,mOutCodedBuffer(0)
@@ -85,7 +87,6 @@
         LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
         mInitialized = false;
     }
-
 }
 
 VideoEncoderBase::~VideoEncoderBase() {
@@ -118,25 +119,32 @@
         return ENCODE_ALREADY_INIT;
     }
 
-    VAConfigAttrib vaAttrib[2];
+    queryAutoReferenceConfig(mComParams.profile);
+
+    VAConfigAttrib vaAttrib[3];
     vaAttrib[0].type = VAConfigAttribRTFormat;
     vaAttrib[1].type = VAConfigAttribRateControl;
+    vaAttrib[2].type = VAConfigAttribEncAutoReference;
     vaAttrib[0].value = VA_RT_FORMAT_YUV420;
     vaAttrib[1].value = mComParams.rcMode;
+    vaAttrib[2].value = mAutoReference ? 1 : VA_ATTRIB_NOT_SUPPORTED;
 
     LOG_V( "======VA Configuration======\n");
     LOG_I( "profile = %d\n", mComParams.profile);
     LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint);
     LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
     LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
+    LOG_I( "vaAttrib[2].type = %d\n", vaAttrib[2].type);
     LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
     LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
+    LOG_I( "vaAttrib[2].value (AutoReference) = %d\n", vaAttrib[2].value);
 
     LOG_V( "vaCreateConfig\n");
 
     vaStatus = vaCreateConfig(
             mVADisplay, mComParams.profile, mVAEntrypoint,
             &vaAttrib[0], 2, &(mVAConfig));
+//            &vaAttrib[0], 3, &(mVAConfig));  //uncomment this after psb_video supports
     CHECK_VA_STATUS_RETURN("vaCreateConfig");
 
     if (mComParams.rcMode == VA_RC_VCM) {
@@ -150,7 +158,12 @@
 
     VASurfaceID surfaces[2];
     VASurfaceAttributeTPI attribute_tpi;
-    uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16;
+    uint32_t stride_aligned;
+    if(mAutoReference == false)
+        stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16;
+    else
+        stride_aligned = ((mComParams.resolution.width + 63) / 64 ) * 64;  //on Merr, stride must be 64 aligned.
+
     uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16;
 
     attribute_tpi.size = stride_aligned * height_aligned * 3 / 2;
@@ -163,21 +176,25 @@
     attribute_tpi.pixel_format = VA_FOURCC_NV12;
     attribute_tpi.type = VAExternalMemoryNULL;
 
-#ifndef AUTO_REFERENCE
+    if(mAutoReference == false){
         vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned,
                 VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi);
         CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
         mRefSurface = surfaces[0];
         mRecSurface = surfaces[1];
-#endif
+    }else {
+        mAutoRefSurfaces = new VASurfaceID [mAutoReferenceSurfaceNum];
+        vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned,
+                VA_RT_FORMAT_YUV420, mAutoReferenceSurfaceNum, mAutoRefSurfaces, &attribute_tpi);
+        CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
+    }
 
     //Prepare all Surfaces to be added into Context
     uint32_t contextSurfaceCnt;
-#ifndef AUTO_REFERENCE
+    if(mAutoReference == false )
         contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
-#else
-        contextSurfaceCnt = mSrcSurfaceMapList.size();
-#endif
+    else
+        contextSurfaceCnt = mAutoReferenceSurfaceNum + mSrcSurfaceMapList.size();
 
     VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
     int32_t index = -1;
@@ -189,10 +206,13 @@
         (*map_node)->added = true;
     }
 
-#ifndef AUTO_REFERENCE
+    if(mAutoReference == false){
         contextSurfaces[++index] = mRefSurface;
         contextSurfaces[++index] = mRecSurface;
-#endif
+    } else {
+        for (int i=0; i < mAutoReferenceSurfaceNum; i++)
+            contextSurfaces[++index] = mAutoRefSurfaces[i];
+    }
 
     //Initialize and save the VA context ID
     LOG_V( "vaCreateContext\n");
@@ -288,14 +308,14 @@
     task->enc_surface = sid;
     task->coded_buffer = coded_buf;
     task->timestamp = inBuffer->timeStamp;
-    task->in_data = inBuffer->data;
+    task->priv = inBuffer->priv;
 
     //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
     task->type = inBuffer->type;
     task->flag = inBuffer->flag;
     PrepareFrameInfo(task);
 
-#ifndef AUTO_REFERENCE
+    if(mAutoReference == false){
         //Setup ref /rec frames
         //TODO: B frame support, temporary use same logic
         switch (inBuffer->type) {
@@ -310,8 +330,7 @@
                     mRefSurface = tmpSurface;
                 }
 
-                task->ref_surface[0] = mRefSurface;
-                task->ref_surface[1] = VA_INVALID_SURFACE;
+                task->ref_surface = mRefSurface;
                 task->rec_surface = mRecSurface;
 
                 break;
@@ -322,12 +341,10 @@
                 ret = ENCODE_NOT_SUPPORTED;
                 goto CLEAN_UP;
         }
-#else
-        task->ref_surface[0] = VA_INVALID_SURFACE;
-        task->ref_surface[1] = VA_INVALID_SURFACE;
+    }else {
+        task->ref_surface = VA_INVALID_SURFACE;
         task->rec_surface = VA_INVALID_SURFACE;
-#endif
-
+    }
     //======Start Encoding, add task to list======
     LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
 
@@ -464,6 +481,7 @@
     outBuffer->flag = mCurOutputTask->flag;
     outBuffer->type = mCurOutputTask->type;
     outBuffer->timeStamp = mCurOutputTask->timestamp;
+    outBuffer->priv = mCurOutputTask->priv;
 
     if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
         ret = outputAllData(outBuffer);
@@ -530,6 +548,10 @@
         LOG_V("Encoder has been stopped\n");
         return ENCODE_SUCCESS;
     }
+    if (mAutoRefSurfaces) {
+        delete[] mAutoRefSurfaces;
+        mAutoRefSurfaces = NULL;
+    }
 
     mCodedBuffer_Lock.lock();
     mVACodedBufferList.clear();
@@ -698,6 +720,42 @@
     return ENCODE_SUCCESS;
 }
 
+Encode_Status VideoEncoderBase::queryProfileLevelConfig(VADisplay dpy, VAProfile profile) {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEntrypoint entryPtr[8];
+    int i, entryPtrNum;
+
+    if(profile ==  VAProfileH264Main) //need to be fixed
+        return ENCODE_NOT_SUPPORTED;
+
+    vaStatus = vaQueryConfigEntrypoints(dpy, profile, entryPtr, &entryPtrNum);
+    CHECK_VA_STATUS_RETURN("vaQueryConfigEntrypoints");
+
+    for(i=0; i<entryPtrNum; i++){
+        if(entryPtr[i] == VAEntrypointEncSlice)
+            return ENCODE_SUCCESS;
+    }
+
+    return ENCODE_NOT_SUPPORTED;
+}
+
+Encode_Status VideoEncoderBase::queryAutoReferenceConfig(VAProfile profile) {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAConfigAttrib attrib_list;
+    attrib_list.type = VAConfigAttribEncAutoReference;
+    attrib_list.value = VA_ATTRIB_NOT_SUPPORTED;
+
+    vaStatus = vaGetConfigAttributes(mVADisplay, profile, VAEntrypointEncSlice, &attrib_list, 1);
+    if(attrib_list.value == VA_ATTRIB_NOT_SUPPORTED )
+        mAutoReference = false;
+    else
+        mAutoReference = true;
+
+    return ENCODE_SUCCESS;
+}
+
 Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
 
     // Data size been copied for every single call
@@ -753,7 +811,7 @@
 
     // Set default value for input parameters
     mComParams.profile = VAProfileH264Baseline;
-    mComParams.level = 40;
+    mComParams.level = 41;
     mComParams.rawFormat = RAW_FORMAT_NV12;
     mComParams.frameRate.frameRateNum = 30;
     mComParams.frameRate.frameRateDenom = 1;
@@ -943,6 +1001,30 @@
             break;
         }
 
+        case VideoParamsTypeProfileLevel: {
+            VideoParamsProfileLevel *profilelevel =
+                reinterpret_cast <VideoParamsProfileLevel *> (videoEncParams);
+
+            if (profilelevel->size != sizeof (VideoParamsProfileLevel)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            profilelevel->level = 0;
+            if(queryProfileLevelConfig(mVADisplay, profilelevel->profile) == ENCODE_SUCCESS){
+                profilelevel->isSupported = true;
+                if(profilelevel->profile == VAProfileH264High)
+                    profilelevel->level = 42;
+                else if(profilelevel->profile == VAProfileH264Main)
+                     profilelevel->level = 42;
+                else if(profilelevel->profile == VAProfileH264Baseline)
+                     profilelevel->level = 41;
+                else{
+                    profilelevel->level = 0;
+                    profilelevel->isSupported = false;
+                }
+            }
+        }
+
         case VideoParamsTypeAVC:
         case VideoParamsTypeH263:
         case VideoParamsTypeMP4:
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
index 924c4da..554173e 100644
--- a/videoencoder/VideoEncoderBase.h
+++ b/videoencoder/VideoEncoderBase.h
@@ -17,7 +17,6 @@
 #include <utils/List.h>
 #include <utils/threads.h>
 
-//#define AUTO_REFERENCE
 struct SurfaceMap {
     VASurfaceID surface;
     MetadataBufferType type;
@@ -28,14 +27,14 @@
 
 struct EncodeTask {
     VASurfaceID enc_surface;
-    VASurfaceID ref_surface[2];
+    VASurfaceID ref_surface;
     VASurfaceID rec_surface;
     VABufferID coded_buffer;
 
     FrameType type;
     int flag;
     int64_t timestamp;  //corresponding input frame timestamp
-    uint8_t *in_data;  //input buffer data
+    void *priv;  //input buffer data
 
     bool completed;   //if encode task is done complet by HW
 };
@@ -77,6 +76,7 @@
     Encode_Status renderDynamicFrameRate();
     Encode_Status renderDynamicBitrate();
     Encode_Status renderHrd();
+    Encode_Status queryProfileLevelConfig(VADisplay dpy, VAProfile profile);
 
 private:
     void setDefaultParams(void);
@@ -96,6 +96,7 @@
     Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
     Encode_Status cleanupForOutput();
     Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
+    Encode_Status queryAutoReferenceConfig(VAProfile profile);
 
 protected:
 
@@ -125,6 +126,7 @@
     VABufferID mFrameRateParamBuf;
     VABufferID mPicParamBuf;
     VABufferID mSliceParamBuf;
+    VASurfaceID* mAutoRefSurfaces;
 
     android::List <SurfaceMap *> mSrcSurfaceMapList;  //all mapped surface info list from input buffer
     android::List <EncodeTask *> mEncodeTaskList;  //all encode tasks list
@@ -134,6 +136,8 @@
     VASurfaceID mRecSurface;        //reconstructed surface, only used in base
     uint32_t mFrameNum;
     uint32_t mCodedBufSize;
+    bool mAutoReference;
+    uint32_t mAutoReferenceSurfaceNum;
 
     bool mSliceSizeOverflow;
 
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
index f5174aa..b1674be 100644
--- a/videoencoder/VideoEncoderDef.h
+++ b/videoencoder/VideoEncoderDef.h
@@ -142,7 +142,7 @@
     VideoOutputFormat format; //output format
     int64_t timeStamp; //reserved
     FrameType type;
-    uint8_t *in_data; //indicate corresponding input data
+    void *priv; //indicate corresponding input data
 } VideoEncOutputBuffer;
 
 typedef struct {
@@ -152,6 +152,7 @@
     int64_t timeStamp; //reserved
     FrameType type; //frame type expected to be encoded
     int flag; // flag to indicate buffer property
+    void *priv; //indicate corresponding input data
 } VideoEncRawBuffer;
 
 struct VideoEncSurfaceBuffer {
@@ -288,6 +289,7 @@
     VideoParamsTypeUsrptrBuffer,
     VideoParamsTypeHRD,
     VideoParamsTypeStoreMetaDataInBuffers,
+    VideoParamsTypeProfileLevel,
 
     VideoConfigTypeFrameRate,
     VideoConfigTypeBitRate,
@@ -476,6 +478,18 @@
     bool isEnabled;
 };
 
+struct VideoParamsProfileLevel : VideoParamConfigSet {
+
+    VideoParamsProfileLevel() {
+        type = VideoParamsTypeProfileLevel;
+        size = sizeof(VideoParamsProfileLevel);
+    }
+
+    VAProfile profile;
+    uint32_t level;
+    bool isSupported;
+};
+
 struct VideoConfigFrameRate : VideoParamConfigSet {
 
     VideoConfigFrameRate() {
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
index b9b9c99..8891c06 100644
--- a/videoencoder/VideoEncoderH263.cpp
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -14,6 +14,7 @@
 
 VideoEncoderH263::VideoEncoderH263() {
     mComParams.profile = (VAProfile)PROFILE_H263BASELINE;
+    mAutoReferenceSurfaceNum = 2;
 }
 
 Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) {
@@ -86,8 +87,14 @@
     LOG_V( "Begin\n\n");
 
     // set picture params for HW
-    h263PictureParams.reference_picture = task->ref_surface[0];
-    h263PictureParams.reconstructed_picture = task->rec_surface;
+    if(mAutoReference == false){
+        h263PictureParams.reference_picture = task->ref_surface;
+        h263PictureParams.reconstructed_picture = task->rec_surface;
+    }else {
+        h263PictureParams.reference_picture = mAutoRefSurfaces[0];
+        h263PictureParams.reconstructed_picture = mAutoRefSurfaces[1];
+    }
+
     h263PictureParams.coded_buf = task->coded_buffer;
     h263PictureParams.picture_width = mComParams.resolution.width;
     h263PictureParams.picture_height = mComParams.resolution.height;
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
index 8afb215..50abc68 100644
--- a/videoencoder/VideoEncoderMP4.cpp
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -17,6 +17,7 @@
     :mProfileLevelIndication(3)
     ,mFixedVOPTimeIncrement(0) {
     mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE;
+    mAutoReferenceSurfaceNum = 2;
 }
 
 Encode_Status VideoEncoderMP4::getHeaderPos(
@@ -175,8 +176,14 @@
     LOG_V( "Begin\n\n");
 
     // set picture params for HW
-    mpeg4_pic_param.reference_picture = task->ref_surface[0];
-    mpeg4_pic_param.reconstructed_picture = task->rec_surface;
+    if(mAutoReference == false){
+        mpeg4_pic_param.reference_picture = task->ref_surface;
+        mpeg4_pic_param.reconstructed_picture = task->rec_surface;
+    }else {
+        mpeg4_pic_param.reference_picture = mAutoRefSurfaces[0];
+        mpeg4_pic_param.reconstructed_picture = mAutoRefSurfaces[1];
+    }
+
     mpeg4_pic_param.coded_buf = task->coded_buffer;
     mpeg4_pic_param.picture_width = mComParams.resolution.width;
     mpeg4_pic_param.picture_height = mComParams.resolution.height;