initial version for HiP encoding support

BZ: 76823

1.  Support thread safe on Queue operations
2.  Support getOutput in different type codec
3.  Support getOutput multi calling for some output format
4.  Support non-block / timeout mode
5.  Support EOS
6.  Support B frame without reconstructed frame output, MRFLD done in driver, MFLD done in libMIX
7.  Support baseline/highprofile select
8.  Support HiP parameters
9.  Support CodedBuffer number setting
10. Support auto frame type detection for both HiP and baseline
11. Support add the high profile parameter and change the type of timestamp
12. Support refine the encode/getout block/nonblock mode with List container
13. Support auto reconstructed and reference frame management in driver, remove MFLD logic  (Done)
14. Support the new libva VAEncSliceParameterBufferH264 structure, substitute VAEncSliceParameterBuffer(BZ 75766)
15. refine the name style, refine the frame type detect, B frame has/not impact the frame num of GOP
16. refine the slice_type assignment
17. Support frame skip on MFLD
18. fix the klockwork issue

Change-Id: Ifbc230d8d0985e4411ac5b79f04d29a6edcf501d
Signed-off-by: jiguoliang <guoliang.ji@intel.com>
Reviewed-on: http://android.intel.com:8080/87040
Reviewed-by: Yuan, Shengquan <shengquan.yuan@intel.com>
Reviewed-by: Shi, PingX <pingx.shi@intel.com>
Tested-by: Shi, PingX <pingx.shi@intel.com>
Reviewed-by: cactus <cactus@intel.com>
Tested-by: cactus <cactus@intel.com>
diff --git a/test/Android.mk b/test/Android.mk
index 2f4d6a8..abded5d 100644
--- a/test/Android.mk
+++ b/test/Android.mk
@@ -45,6 +45,10 @@
         libva-android           \
         libva-tpi		\
 	libgui			\
+	libui			\
+	libutils		\
+	libcutils		\
+	libhardware		\
 	libbinder
 
 LOCAL_MODULE_TAGS := optional
diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp
index 464b759..4662947 100644
--- a/test/mix_encoder.cpp
+++ b/test/mix_encoder.cpp
@@ -257,7 +257,7 @@
     memset(&tmpStoreMetaDataInBuffers,0x00,sizeof(VideoParamsStoreMetaDataInBuffers));
     gVideoEncoder->getParameters(&tmpStoreMetaDataInBuffers);
     gVideoEncoder->setParameters(&tmpStoreMetaDataInBuffers);
-
+#if 0
     VideoParamsUpstreamBuffer tmpVideoParamsUpstreamBuffer;
     tmpVideoParamsUpstreamBuffer.bufCnt = 0;
     gVideoEncoder->setParameters(&tmpVideoParamsUpstreamBuffer);
@@ -275,7 +275,7 @@
     VideoParamsUsrptrBuffer tmpVideoParamsUsrptrBuffer;
     tmpVideoParamsUsrptrBuffer.width = 0;
     gVideoEncoder->getParameters(&tmpVideoParamsUsrptrBuffer);
-
+#endif
     //---------------------add for libmix encode code coverage test
     // VideoEncodeBase.cpp file setConfig && getConfig code coverage test
     // only for VCM mode
@@ -346,6 +346,9 @@
         // for VideoConfigTypeAVCIntraPeriod derivedSetConfig && derivedGetConfig
         VideoConfigAVCIntraPeriod configAVCIntraPeriod;
         gVideoEncoder->getConfig(&configAVCIntraPeriod);
+        configAVCIntraPeriod.ipPeriod = 1;
+        configAVCIntraPeriod.intraPeriod = 30;
+        configAVCIntraPeriod.idrInterval = 1;
         gVideoEncoder->setConfig(&configAVCIntraPeriod);
         VideoConfigTypeIDRReq tmpVideoConfigTypeIDRReq;
         gVideoEncoder->setConfig(&tmpVideoConfigTypeIDRReq);
@@ -989,30 +992,24 @@
         InBuf.data = data;
         InBuf.size = size;
         InBuf.bufAvailable = true;
+        InBuf.type = FTYPE_UNKNOWN;
+        InBuf.flag = 0;
 
         ret = gVideoEncoder->encode(&InBuf);
         CHECK_ENCODE_STATUS("encode");
 
+        if (i > 0) {
         ret = gVideoEncoder->getOutput(&OutBuf);
         CHECK_ENCODE_STATUS("getOutput");
-        CHECK_ENCODE_STATUS_RETURN("getOutput");
-    //    printf("OutBuf.dataSize = %d  .........\n", OutBuf.dataSize);
+//        printf("OutBuf.dataSize = %d, flag=0x%08x  .........\n", OutBuf.dataSize, OutBuf.flag);
         fwrite(OutBuf.data, 1, OutBuf.dataSize, file);
-        
+        }
         printf("Encoding %d Frames \r", i+1);
         fflush(stdout);
     }	
+        ret = gVideoEncoder->getOutput(&OutBuf);
     fclose(file);
     
-    VideoStatistics stat;
-    if (gVideoEncoder->getStatistics(&stat) == ENCODE_SUCCESS)
-    {
-        printf("\nVideoStatistics\n");
-        printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", \
-		stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, \
-		stat.min_encode_time, stat.min_encode_frame );
-    }
-
     gVideoEncoder->stop();
     releaseVideoEncoder(gVideoEncoder);
     gVideoEncoder = NULL;
diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk
index 7c8314a..0b13e7e 100644
--- a/videoencoder/Android.mk
+++ b/videoencoder/Android.mk
@@ -2,7 +2,6 @@
 include $(CLEAR_VARS)
 
 #VIDEO_ENC_LOG_ENABLE := true
-#VIDEO_ENC_STATISTICS_ENABLE := true
 
 LOCAL_SRC_FILES :=              \
     VideoEncoderBase.cpp        \
@@ -16,11 +15,13 @@
 LOCAL_C_INCLUDES :=             \
     $(LOCAL_PATH)               \
     $(TARGET_OUT_HEADERS)/libva \
+    $(TOPDIR)/frameworks/native/include \
 
 #LOCAL_LDLIBS += -lpthread
 
 LOCAL_SHARED_LIBRARIES :=       \
         libcutils               \
+        libutils               \
         libva                   \
         libva-android           \
         libva-tpi		\
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
index c4bf805..4c2661a 100644
--- a/videoencoder/VideoEncoderAVC.cpp
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -20,6 +20,7 @@
     mVideoParamsAVC.sliceNum.iSliceNum = 2;
     mVideoParamsAVC.sliceNum.pSliceNum = 2;
     mVideoParamsAVC.idrInterval = 2;
+    mVideoParamsAVC.ipPeriod = 1;
     mVideoParamsAVC.maxSliceSize = 0;
     mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB;
     mSliceNum = 2;
@@ -94,6 +95,7 @@
             }
 
             mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval;
+            mVideoParamsAVC.ipPeriod = configAVCIntraPeriod->ipPeriod;
             mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod;
             mNewHeader = true;
             break;
@@ -154,6 +156,7 @@
 
             configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval;
             configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod;
+            configAVCIntraPeriod->ipPeriod = mVideoParamsAVC.ipPeriod;
 
             break;
         }
@@ -192,30 +195,67 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) {
+Encode_Status VideoEncoderAVC::updateFrameInfo(EncodeTask* task) {
+    uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
+    FrameType frametype;
+    uint32_t frame_num = mFrameNum;
+
+    if (mVideoParamsAVC.idrInterval != 0) {
+        if(mVideoParamsAVC.ipPeriod > 1)
+            frame_num = frame_num % (idrPeroid + 1);
+        else  if(mComParams.intraPeriod != 0)
+            frame_num = frame_num % idrPeroid ;
+    }
+
+    if(frame_num ==0){
+        frametype = FTYPE_IDR;
+    }else if(mComParams.intraPeriod ==0)
+        // only I frame need intraPeriod=idrInterval=ipPeriod=0
+        frametype = FTYPE_I;
+    else if(mVideoParamsAVC.ipPeriod == 1){ // no B frame
+        if(mComParams.intraPeriod != 0 && (frame_num >  1) &&((frame_num -1)%mComParams.intraPeriod == 0))
+            frametype = FTYPE_I;
+        else
+            frametype = FTYPE_P;
+    } else { 
+        if(mComParams.intraPeriod != 0 &&((frame_num-1)%mComParams.intraPeriod == 0)&&(frame_num >mComParams.intraPeriod))
+            frametype = FTYPE_I;
+        else{
+            frame_num = frame_num%mComParams.intraPeriod;
+            if(frame_num == 0)
+                frametype = FTYPE_B;
+            else if((frame_num-1)%mVideoParamsAVC.ipPeriod == 0)
+                frametype = FTYPE_P;
+            else
+                frametype = FTYPE_B;
+        }
+    }
+
+    if (frametype == FTYPE_IDR || frametype == FTYPE_I)
+        task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+    if (frametype != task->type) {
+        const char* FrameTypeStr[10] = {"UNKNOWN", "I", "P", "B", "SI", "SP", "EI", "EP", "S", "IDR"};
+        if ((uint32_t) task->type < 9)
+            LOG_V("libMIX thinks it is %s Frame, the input is %s Frame", FrameTypeStr[frametype], FrameTypeStr[task->type]);
+        else
+            LOG_V("Wrong Frame type %d, type may not be initialized ?\n", task->type);
+    }
+
+//temparily comment out to avoid uninitialize error
+//    if (task->type == FTYPE_UNKNOWN || (uint32_t) task->type > 9)
+        task->type = frametype;
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
 
     Encode_Status ret = ENCODE_SUCCESS;
-    VAStatus vaStatus = VA_STATUS_SUCCESS;
-    bool useLocalBuffer = false;
-    uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
 
     LOG_V("Begin\n");
-    CHECK_NULL_RETURN_IFFAIL(outBuffer);
-
-    setKeyFrame(idrPeroid);
-
-    // prepare for output, map the coded buffer
-    ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer);
-    CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
 
     switch (outBuffer->format) {
-        case OUTPUT_EVERYTHING:
-        case OUTPUT_FRAME_DATA: {
-            // Output whatever we have
-            ret = VideoEncoderBase::outputAllData(outBuffer);
-            CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
-            break;
-        }
         case OUTPUT_CODEC_DATA: {
             // Output the codec data
             ret = outputCodecData(outBuffer);
@@ -251,26 +291,10 @@
 
     LOG_I("out size is = %d\n", outBuffer->dataSize);
 
-    // cleanup, unmap the coded buffer if all
-    // data has been copied out
-    ret = VideoEncoderBase::cleanupForOutput();
 
 CLEAN_UP:
 
-    if (ret < ENCODE_SUCCESS) {
-        if (outBuffer->data && (useLocalBuffer == true)) {
-            delete[] outBuffer->data;
-            outBuffer->data = NULL;
-            useLocalBuffer = false;
-        }
 
-        // error happens, unmap the buffer
-        if (mCodedBufferMapped) {
-            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
-            mCodedBufferMapped = false;
-            mCurSegment = NULL;
-        }
-    }
     LOG_V("End\n");
     return ret;
 }
@@ -481,7 +505,6 @@
         mOffsetInSeg += (nalSize + nalOffset);
         outBuffer->dataSize = sizeToBeCopied;
         outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
-        if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
         outBuffer->remainingSize = 0;
     } else {
         // if nothing to be copied out, set flag to invalid
@@ -500,7 +523,6 @@
         } else {
             LOG_V("End of stream\n");
             outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
-            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
             mCurSegment = NULL;
         }
     }
@@ -554,7 +576,6 @@
             // so the remainingSize size may larger than the remaining data size
             outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
             outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
-            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
             LOG_E("Buffer size too small\n");
             return ENCODE_BUFFER_TOO_SMALL;
         }
@@ -569,7 +590,6 @@
                 outBuffer->dataSize = sizeCopiedHere;
                 outBuffer->remainingSize = 0;
                 outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
-                if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
                 mCurSegment = NULL;
                 break;
             }
@@ -579,7 +599,7 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderAVC::sendEncodeCommand(void) {
+Encode_Status VideoEncoderAVC::sendEncodeCommand(EncodeTask *task) {
     Encode_Status ret = ENCODE_SUCCESS;
 
     LOG_V( "Begin\n");
@@ -592,7 +612,7 @@
             CHECK_ENCODE_STATUS_RETURN("renderHrd");
         }
 
-        ret = renderSequenceParams();
+        ret = renderSequenceParams(task);
         CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
         mNewHeader = false; //Set to require new header filed to false
     }
@@ -628,10 +648,10 @@
         mRenderFrameRate = false;
     }
 
-    ret = renderPictureParams();
+    ret = renderPictureParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
 
-    ret = renderSliceParams();
+    ret = renderSliceParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
 
     LOG_V( "End\n");
@@ -745,7 +765,7 @@
     return level;
 }
 
-Encode_Status VideoEncoderAVC::renderSequenceParams() {
+Encode_Status VideoEncoderAVC::renderSequenceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncSequenceParameterBufferH264 avcSeqParams = {};
@@ -756,7 +776,6 @@
     int level;
     uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
     uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
-    const char* device_info;
 
     LOG_V( "Begin\n\n");
     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
@@ -767,7 +786,7 @@
     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
     vaStatus = vaMapBuffer(mVADisplay, mRcParamBuf, (void **)&miscEncRCParamBuf);
     CHECK_VA_STATUS_RETURN("vaMapBuffer");
-    
+
     vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
             VAEncMiscParameterBufferType,
             sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterFrameRate),
@@ -776,7 +795,7 @@
     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
     vaStatus = vaMapBuffer(mVADisplay, mFrameRateParamBuf, (void **)&miscEncFrameRateParamBuf);
     CHECK_VA_STATUS_RETURN("vaMapBuffer");
-	
+
     miscEncRCParamBuf->type = VAEncMiscParameterTypeRateControl;
     rcMiscParam = (VAEncMiscParameterRateControl  *)miscEncRCParamBuf->data;
     miscEncFrameRateParamBuf->type = VAEncMiscParameterTypeFrameRate;
@@ -785,6 +804,7 @@
     // avcSeqParams.level_idc = mLevel;
     avcSeqParams.intra_period = mComParams.intraPeriod;
     avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval;
+    avcSeqParams.ip_period = mVideoParamsAVC.ipPeriod;
     avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16;
     avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16;
 
@@ -822,7 +842,9 @@
     }
 
     // This is a temporary fix suggested by Binglin for bad encoding quality issue
-    avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field
+    avcSeqParams.max_num_ref_frames = 1; 
+    if(avcSeqParams.ip_period > 1)
+        avcSeqParams.max_num_ref_frames = 2; 
 
     LOG_V("===h264 sequence params===\n");
     LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id);
@@ -847,28 +869,27 @@
             sizeof(avcSeqParams), 1, &avcSeqParams,
             &mSeqParamBuf);
     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
-	
-    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1);
-    CHECK_VA_STATUS_RETURN("vaRenderPicture");
     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mFrameRateParamBuf, 1);
     CHECK_VA_STATUS_RETURN("vaRenderPicture");
     vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
     CHECK_VA_STATUS_RETURN("vaRenderPicture");
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mRcParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
 
     return ENCODE_SUCCESS;
 }
 
 
-Encode_Status VideoEncoderAVC::renderPictureParams() {
+Encode_Status VideoEncoderAVC::renderPictureParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncPictureParameterBufferH264 avcPicParams = {};
 
     LOG_V( "Begin\n\n");
     // set picture params for HW
-    avcPicParams.ReferenceFrames[0].picture_id= mRefSurface;
-    avcPicParams.CurrPic.picture_id= mRecSurface;
-    avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
+    avcPicParams.ReferenceFrames[0].picture_id= task->ref_surface[0];
+    avcPicParams.CurrPic.picture_id= task->rec_surface;
+    avcPicParams.coded_buf = task->coded_buffer;
     //avcPicParams.picture_width = mComParams.resolution.width;
     //avcPicParams.picture_height = mComParams.resolution.height;
     avcPicParams.last_picture = 0;
@@ -876,7 +897,7 @@
     LOG_V("======h264 picture params======\n");
     LOG_I( "reference_picture = 0x%08x\n", avcPicParams.ReferenceFrames[0].picture_id);
     LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.CurrPic.picture_id);
-    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+//    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
     LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf);
     //LOG_I( "picture_width = %d\n", avcPicParams.picture_width);
     //LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height);
@@ -897,7 +918,7 @@
 }
 
 
-Encode_Status VideoEncoderAVC::renderSliceParams() {
+Encode_Status VideoEncoderAVC::renderSliceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
 
@@ -906,8 +927,8 @@
     uint32_t sliceHeightInMB = 0;
     uint32_t maxSliceNum = 0;
     uint32_t minSliceNum = 0;
-    int actualSliceHeightInMB = 0;
-    int startRowInMB = 0;
+    uint32_t actualSliceHeightInMB = 0;
+    uint32_t startRowInMB = 0;
     uint32_t modulus = 0;
 
     LOG_V( "Begin\n\n");
@@ -915,7 +936,7 @@
     maxSliceNum = (mComParams.resolution.height + 15) / 16;
     minSliceNum = 1;
 
-    if (mIsIntra) {
+    if (task->type == FTYPE_I || task->type == FTYPE_IDR) {
         sliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
     } else {
         sliceNum = mVideoParamsAVC.sliceNum.pSliceNum;
@@ -938,14 +959,20 @@
     vaStatus = vaCreateBuffer(
             mVADisplay, mVAContext,
             VAEncSliceParameterBufferType,
-            sizeof(VAEncSliceParameterBuffer),
+            sizeof(VAEncSliceParameterBufferH264),
             sliceNum, NULL,
             &mSliceParamBuf);
     CHECK_VA_STATUS_RETURN("vaCreateBuffer");
 
-    VAEncSliceParameterBuffer *sliceParams, *currentSlice;
+    VAEncSliceParameterBufferH264 *sliceParams, *currentSlice;
+
     vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
     CHECK_VA_STATUS_RETURN("vaMapBuffer");
+    if(!sliceParams)
+        return ENCODE_NULL_PTR;
+    memset(sliceParams, 0 , sizeof(VAEncSliceParameterBufferH264));
+    if(!sliceParams)
+        return ENCODE_NULL_PTR;
 
     currentSlice = sliceParams;
     startRowInMB = 0;
@@ -956,25 +983,29 @@
             actualSliceHeightInMB ++;
         }
 
-        // starting MB row number for this slice
-        currentSlice->start_row_number = startRowInMB;
+        // starting MB row number for this slice, suppose macroblock 16x16
+        currentSlice->macroblock_address = startRowInMB * mComParams.resolution.width /16;
         // slice height measured in MB
-        currentSlice->slice_height = actualSliceHeightInMB;
-        currentSlice->slice_flags.bits.is_intra = mIsIntra;
-        currentSlice->slice_flags.bits.disable_deblocking_filter_idc
-        = mComParams.disableDeblocking;
+        currentSlice->num_macroblocks = actualSliceHeightInMB * mComParams.resolution.width /16;
+        if(task->type == FTYPE_I||task->type == FTYPE_IDR)
+            currentSlice->slice_type = 2;
+        else if(task->type == FTYPE_P)
+            currentSlice->slice_type = 0;
+        else if(task->type == FTYPE_B)
+            currentSlice->slice_type = 1;
+        currentSlice->disable_deblocking_filter_idc = mComParams.disableDeblocking;
 
         // This is a temporary fix suggested by Binglin for bad encoding quality issue
         // TODO: We need a long term design for this field
-        currentSlice->slice_flags.bits.uses_long_term_ref = 0;
-        currentSlice->slice_flags.bits.is_long_term_ref = 0;
+        //currentSlice->slice_flags.bits.uses_long_term_ref = 0;
+        //currentSlice->slice_flags.bits.is_long_term_ref = 0;
 
         LOG_V("======AVC slice params======\n");
         LOG_I( "slice_index = %d\n", (int) sliceIndex);
-        LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number);
-        LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height);
-        LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra);
-        LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc);
+        LOG_I( "macroblock_address = %d\n", (int) currentSlice->macroblock_address);
+        LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->num_macroblocks);
+        LOG_I( "slice.type = %d\n", (int) currentSlice->slice_type);
+        LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->disable_deblocking_filter_idc);
 
         startRowInMB += actualSliceHeightInMB;
     }
diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h
index b57ef67..1248a3e 100644
--- a/videoencoder/VideoEncoderAVC.h
+++ b/videoencoder/VideoEncoderAVC.h
@@ -18,7 +18,6 @@
     ~VideoEncoderAVC() {};
 
     virtual Encode_Status start();
-    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
 
     virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
     virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
@@ -27,8 +26,9 @@
 
 protected:
 
-    virtual Encode_Status sendEncodeCommand(void);
-
+    virtual Encode_Status sendEncodeCommand(EncodeTask *task);
+    virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+    virtual Encode_Status updateFrameInfo(EncodeTask* task);
 private:
     // Local Methods
 
@@ -40,9 +40,9 @@
 
     Encode_Status renderMaxSliceSize();
     Encode_Status renderAIR();
-    Encode_Status renderSequenceParams();
-    Encode_Status renderPictureParams();
-    Encode_Status renderSliceParams();
+    Encode_Status renderSequenceParams(EncodeTask *task);
+    Encode_Status renderPictureParams(EncodeTask *task);
+    Encode_Status renderSliceParams(EncodeTask *task);
     int calcLevel(int numMbs);
 
 public:
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
index 55012d7..83126c6 100644
--- a/videoencoder/VideoEncoderBase.cpp
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -12,7 +12,6 @@
 #include <va/va_tpi.h>
 #include <va/va_android.h>
 
-#undef DUMP_SRC_DATA // To dump source data
 // API declaration
 extern "C" {
 VAStatus vaLockSurface(VADisplay dpy,
@@ -33,48 +32,36 @@
 );
 }
 VideoEncoderBase::VideoEncoderBase()
-    :mInitialized(false)
+    :mInitialized(true)
+    ,mStarted(false)
     ,mVADisplay(NULL)
-    ,mVAContext(0)
-    ,mVAConfig(0)
+    ,mVAContext(VA_INVALID_ID)
+    ,mVAConfig(VA_INVALID_ID)
     ,mVAEntrypoint(VAEntrypointEncSlice)
-    ,mCurSegment(NULL)
-    ,mOffsetInSeg(0)
-    ,mTotalSize(0)
-    ,mTotalSizeCopied(0)
-    ,mForceKeyFrame(false)
+    ,mCodedBufSize(0)
     ,mNewHeader(false)
-    ,mFirstFrame (true)
+    //,mAutoReference(17 /*VAConfigAttribEncAutoReference*/)
     ,mRenderMaxSliceSize(false)
     ,mRenderQP (false)
     ,mRenderAIR(false)
     ,mRenderFrameRate(false)
     ,mRenderBitRate(false)
     ,mRenderHrd(false)
-    ,mLastCodedBuffer(0)
-    ,mOutCodedBuffer(0)
     ,mSeqParamBuf(0)
     ,mPicParamBuf(0)
     ,mSliceParamBuf(0)
-    ,mSurfaces(NULL)
-    ,mSurfaceCnt(0)
-    ,mSrcSurfaceMapList(NULL)
-    ,mCurSurface(VA_INVALID_SURFACE)
     ,mRefSurface(VA_INVALID_SURFACE)
     ,mRecSurface(VA_INVALID_SURFACE)
-    ,mLastSurface(VA_INVALID_SURFACE)
-    ,mLastInputRawBuffer(NULL)
-    ,mEncodedFrames(0)
     ,mFrameNum(0)
-    ,mCodedBufSize(0)
-    ,mCodedBufIndex(0)
-    ,mPicSkipped(false)
-    ,mIsIntra(true)
     ,mSliceSizeOverflow(false)
+    ,mCurOutputTask(NULL)
+    ,mOutCodedBuffer(0)
     ,mCodedBufferMapped(false)
-    ,mDataCopiedOut(false)
-    ,mKeyFrame(true)
-    ,mInitCheck(true) {
+    ,mCurSegment(NULL)
+    ,mOffsetInSeg(0)
+    ,mTotalSize(0)
+    ,mTotalSizeCopied(0)
+    ,mFrameSkipped(false){
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     // here the display can be any value, use following one
@@ -84,8 +71,6 @@
     int minorVersion = -1;
 
     setDefaultParams();
-    mVACodedBuffer [0] = 0;
-    mVACodedBuffer [1] = 0;
 
     LOG_V("vaGetDisplay \n");
     mVADisplay = vaGetDisplay(&display);
@@ -97,19 +82,17 @@
     LOG_V("vaInitialize \n");
     if (vaStatus != VA_STATUS_SUCCESS) {
         LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
-        mInitCheck = false;
+        mInitialized = false;
     }
 
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    memset(&mVideoStat, 0, sizeof(VideoStatistics));
-    mVideoStat.min_encode_time = 0xFFFFFFFF;
-#endif
-
 }
 
 VideoEncoderBase::~VideoEncoderBase() {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    stop();
+
     vaStatus = vaTerminate(mVADisplay);
     LOG_V( "vaTerminate\n");
     if (vaStatus != VA_STATUS_SUCCESS) {
@@ -123,32 +106,24 @@
 
     Encode_Status ret = ENCODE_SUCCESS;
     VAStatus vaStatus = VA_STATUS_SUCCESS;
-    VASurfaceID surfaces[2];
-    int32_t index = -1;
-    SurfaceMap *map = mSrcSurfaceMapList;
-    uint32_t stride_aligned = 0;
-    uint32_t height_aligned = 0;
 
-    VAConfigAttrib vaAttrib[2];
-    uint32_t maxSize = 0;
-
-    if (mInitialized) {
-        LOG_V("Encoder has been started\n");
-        return ENCODE_ALREADY_INIT;
-    }
-
-    if (!mInitCheck) {
+    if (!mInitialized) {
         LOGE("Encoder Initialize fail can not start");
         return ENCODE_DRIVER_FAIL;
     }
 
+    if (mStarted) {
+        LOG_V("Encoder has been started\n");
+        return ENCODE_ALREADY_INIT;
+    }
+
+    VAConfigAttrib vaAttrib[2];
     vaAttrib[0].type = VAConfigAttribRTFormat;
     vaAttrib[1].type = VAConfigAttribRateControl;
     vaAttrib[0].value = VA_RT_FORMAT_YUV420;
     vaAttrib[1].value = mComParams.rcMode;
 
     LOG_V( "======VA Configuration======\n");
-
     LOG_I( "profile = %d\n", mComParams.profile);
     LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint);
     LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
@@ -161,10 +136,9 @@
     vaStatus = vaCreateConfig(
             mVADisplay, mComParams.profile, mVAEntrypoint,
             &vaAttrib[0], 2, &(mVAConfig));
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig");
+    CHECK_VA_STATUS_RETURN("vaCreateConfig");
 
     if (mComParams.rcMode == VA_RC_VCM) {
-
         // Following three features are only enabled in VCM mode
         mRenderMaxSliceSize = true;
         mRenderAIR = true;
@@ -173,10 +147,10 @@
 
     LOG_V( "======VA Create Surfaces for Rec/Ref frames ======\n");
 
+    VASurfaceID surfaces[2];
     VASurfaceAttributeTPI attribute_tpi;
-
-    stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16;
-    height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16;
+    uint32_t stride_aligned = ((mComParams.resolution.width + 15) / 16 ) * 16;
+    uint32_t height_aligned = ((mComParams.resolution.height + 15) / 16 ) * 16;
 
     attribute_tpi.size = stride_aligned * height_aligned * 3 / 2;
     attribute_tpi.luma_stride = stride_aligned;
@@ -188,358 +162,217 @@
     attribute_tpi.pixel_format = VA_FOURCC_NV12;
     attribute_tpi.type = VAExternalMemoryNULL;
 
-    vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned,
-            VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi);
-    CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
+#ifndef AUTO_REFERENCE
+        vaCreateSurfacesWithAttribute(mVADisplay, stride_aligned, height_aligned,
+                VA_RT_FORMAT_YUV420, 2, surfaces, &attribute_tpi);
+        CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
+        mRefSurface = surfaces[0];
+        mRecSurface = surfaces[1];
+#endif
 
-    mRefSurface = surfaces[0];
-    mRecSurface = surfaces[1];
+    //Prepare all Surfaces to be added into Context
+    uint32_t contextSurfaceCnt;
+#ifndef AUTO_REFERENCE
+        contextSurfaceCnt = 2 + mSrcSurfaceMapList.size();
+#else
+        contextSurfaceCnt = mSrcSurfaceMapList.size();
+#endif
 
-    //count total surface id already allocated
-    mSurfaceCnt = 2;
-    
-    while(map) {
-        mSurfaceCnt ++;
-        map = map->next;
+    VASurfaceID *contextSurfaces = new VASurfaceID[contextSurfaceCnt];
+    int32_t index = -1;
+    android::List<SurfaceMap *>::iterator map_node;
+
+    for(map_node = mSrcSurfaceMapList.begin(); map_node !=  mSrcSurfaceMapList.end(); map_node++)
+    {
+        contextSurfaces[++index] = (*map_node)->surface;
+        (*map_node)->added = true;
     }
 
-    mSurfaces = new VASurfaceID[mSurfaceCnt];
-    map = mSrcSurfaceMapList;
-    while(map) {
-        mSurfaces[++index] = map->surface;
-        map->added = true;
-        map = map->next;
-    }
-    mSurfaces[++index] = mRefSurface;
-    mSurfaces[++index] = mRecSurface;
+#ifndef AUTO_REFERENCE
+        contextSurfaces[++index] = mRefSurface;
+        contextSurfaces[++index] = mRecSurface;
+#endif
 
     //Initialize and save the VA context ID
     LOG_V( "vaCreateContext\n");
-
     vaStatus = vaCreateContext(mVADisplay, mVAConfig,
             mComParams.resolution.width,
             mComParams.resolution.height,
-            0, mSurfaces, mSurfaceCnt,
+            0, contextSurfaces, contextSurfaceCnt,
             &(mVAContext));
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext");
 
-    LOG_I("Created libva context width %d, height %d\n",
+    delete [] contextSurfaces;
+
+    CHECK_VA_STATUS_RETURN("vaCreateContext");
+
+    LOG_I("Success to create libva context width %d, height %d\n",
           mComParams.resolution.width, mComParams.resolution.height);
 
+    uint32_t maxSize = 0;
     ret = getMaxOutSize(&maxSize);
-    CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize");
+    CHECK_ENCODE_STATUS_RETURN("getMaxOutSize");
 
-    // Create coded buffer for output
-    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
-            VAEncCodedBufferType,
-            mCodedBufSize,
-            1, NULL,
-            &(mVACodedBuffer[0]));
+    // Create CodedBuffer for output
+    VABufferID VACodedBuffer;
 
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType");
+    for(uint32_t i = 0; i <mComParams.codedBufNum; i++) {
+            vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+                    VAEncCodedBufferType,
+                    mCodedBufSize,
+                    1, NULL,
+                    &VACodedBuffer);
+            CHECK_VA_STATUS_RETURN("vaCreateBuffer::VAEncCodedBufferType");
 
-    // Create coded buffer for output
-    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
-            VAEncCodedBufferType,
-            mCodedBufSize,
-            1, NULL,
-            &(mVACodedBuffer[1]));
-
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType");
-
-    mFirstFrame = true;
-
-CLEAN_UP:
-
-    if (ret == ENCODE_SUCCESS) {
-        mInitialized = true;
+            mVACodedBufferList.push_back(VACodedBuffer);
     }
 
+    if (ret == ENCODE_SUCCESS)
+        mStarted = true;
+
     LOG_V( "end\n");
     return ret;
 }
 
-Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) {
+Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer, uint32_t timeout) {
 
-    if (!mInitialized) {
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    if (!mStarted) {
         LOG_E("Encoder has not initialized yet\n");
         return ENCODE_NOT_INIT;
     }
 
     CHECK_NULL_RETURN_IFFAIL(inBuffer);
 
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    struct timespec ts1;
-    clock_gettime(CLOCK_MONOTONIC, &ts1);
+    //======Prepare all resources encoder needed=====.
 
-#endif
-
-    Encode_Status status;
-
-    if (mComParams.syncEncMode) {
-        LOG_I("Sync Enocde Mode, no optimization, no one frame delay\n");
-        status = syncEncode(inBuffer);
-    } else {
-        LOG_I("Async Enocde Mode, HW/SW works in parallel, introduce one frame delay\n");
-        status = asyncEncode(inBuffer);
-    }
-
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    struct timespec ts2;
-    clock_gettime(CLOCK_MONOTONIC, &ts2);
-
-    uint32_t encode_time = (ts2.tv_sec - ts1.tv_sec) * 1000000 + (ts2.tv_nsec - ts1.tv_nsec) / 1000;
-    if (encode_time > mVideoStat.max_encode_time) {
-        mVideoStat.max_encode_time = encode_time;
-        mVideoStat.max_encode_frame = mFrameNum;
-    }
-
-    if (encode_time <  mVideoStat.min_encode_time) {
-        mVideoStat.min_encode_time = encode_time;
-        mVideoStat.min_encode_frame = mFrameNum;
-    }
-
-    mVideoStat.average_encode_time += encode_time;
-#endif
-
-    return status;
-}
-
-Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) {
-
-    Encode_Status ret = ENCODE_SUCCESS;
-    VAStatus vaStatus = VA_STATUS_SUCCESS;
-    uint8_t *buf = NULL;
-
-    inBuffer->bufAvailable = false;
-    if (mNewHeader) mFrameNum = 0;
-
-    // current we use one surface for source data,
-    // one for reference and one for reconstructed
-    decideFrameType();
-    ret = manageSrcSurface(inBuffer);
+    //Prepare encode vaSurface
+    VASurfaceID sid = VA_INVALID_SURFACE;
+    ret = manageSrcSurface(inBuffer, &sid);
     CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
 
-    // Start encoding process
-    LOG_V( "vaBeginPicture\n");
-    LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext);
-    LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface);
-    LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay);
-
-#ifdef DUMP_SRC_DATA
-
-    if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){
-
-        FILE *fp = fopen("/data/data/dump_encoder.yuv", "wb");
-        VAImage image;
-        uint8_t *usrptr = NULL;
-        uint32_t stride = 0;
-        uint32_t frameSize = 0;
-
-        vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image);
-        CHECK_VA_STATUS_RETURN("vaDeriveImage");
-
-        LOG_V( "vaDeriveImage Done\n");
-
-        frameSize = image.data_size;
-        stride = image.pitches[0];
-
-        LOG_I("Source Surface/Image information --- start ---- :");
-        LOG_I("surface = 0x%08x\n",(uint32_t)mCurFrame->surface);
-        LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
-        LOG_I("image->pitches[1] = %d\n", image.pitches[1]);
-        LOG_I("image->offsets[0] = %d\n", image.offsets[0]);
-        LOG_I("image->offsets[1] = %d\n", image.offsets[1]);
-        LOG_I("image->num_planes = %d\n", image.num_planes);
-        LOG_I("image->width = %d\n", image.width);
-        LOG_I("image->height = %d\n", image.height);
-        LOG_I ("frameSize= %d\n", image.data_size);
-        LOG_I("Source Surface/Image information ----end ----");
-
-        vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) &usrptr);
-        CHECK_VA_STATUS_RETURN("vaMapBuffer");
-
-        fwrite(usrptr, frameSize, 1, fp);
-        fflush(fp);
-        fclose(fp);
-
-        vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
-        CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
-
-        vaStatus = vaDestroyImage(mVADisplay, image.image_id);
-        CHECK_VA_STATUS_RETURN("vaDestroyImage");
+    //Prepare CodedBuffer
+    mCodedBuffer_Lock.lock();
+    if(mVACodedBufferList.empty()){
+        if(timeout == FUNC_BLOCK)
+            mCodedBuffer_Cond.wait(mCodedBuffer_Lock);
+        else if (timeout > 0)
+            if(NO_ERROR != mEncodeTask_Cond.waitRelative(mCodedBuffer_Lock, 1000000*timeout)){
+                mCodedBuffer_Lock.unlock();
+                LOG_E("Time out wait for Coded buffer.\n");
+                return ENCODE_DEVICE_BUSY;
+            }
+        else {//Nonblock
+            mCodedBuffer_Lock.unlock();
+            LOG_E("Coded buffer is not ready now.\n");
+            return ENCODE_DEVICE_BUSY;
+        }
     }
+
+    if(mVACodedBufferList.empty()){
+        mCodedBuffer_Lock.unlock();
+        return ENCODE_DEVICE_BUSY;
+    }
+    VABufferID coded_buf = (VABufferID) *(mVACodedBufferList.begin());
+    mVACodedBufferList.erase(mVACodedBufferList.begin());
+    mCodedBuffer_Lock.unlock();
+
+    LOG_V("CodedBuffer ID 0x%08x\n", coded_buf);
+
+    //All resources are ready, start to assemble EncodeTask
+    EncodeTask* task = new EncodeTask();
+
+    task->completed = false;
+    task->enc_surface = sid;
+    task->coded_buffer = coded_buf;
+    task->timestamp = inBuffer->timeStamp;
+    task->in_data = inBuffer->data;
+
+    //Setup frame info, like flag ( SYNCFRAME), frame number, type etc
+    task->type = inBuffer->type;
+    task->flag = inBuffer->flag;
+    PrepareFrameInfo(task);
+
+#ifndef AUTO_REFERENCE
+        //Setup ref /rec frames
+        //TODO: B frame support, temporary use same logic
+        switch (inBuffer->type) {
+            case FTYPE_UNKNOWN:
+            case FTYPE_IDR:
+            case FTYPE_I:
+            case FTYPE_P:
+            {
+                if(!mFrameSkipped) {
+                    VASurfaceID tmpSurface = mRecSurface;
+                    mRecSurface = mRefSurface;
+                    mRefSurface = tmpSurface;
+                }
+
+                task->ref_surface[0] = mRefSurface;
+                task->ref_surface[1] = VA_INVALID_SURFACE;
+                task->rec_surface = mRecSurface;
+
+                break;
+            }
+            case FTYPE_B:
+            default:
+                LOG_V("Something wrong, B frame may not be supported in this mode\n");
+                ret = ENCODE_NOT_SUPPORTED;
+                goto CLEAN_UP;
+        }
+#else
+        task->ref_surface[0] = VA_INVALID_SURFACE;
+        task->ref_surface[1] = VA_INVALID_SURFACE;
+        task->rec_surface = VA_INVALID_SURFACE;
 #endif
 
-    vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
-    CHECK_VA_STATUS_RETURN("vaBeginPicture");
+    //======Start Encoding, add task to list======
+    LOG_V("Start Encoding vaSurface=0x%08x\n", task->enc_surface);
 
-    ret = sendEncodeCommand();
-    CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand");
+    vaStatus = vaBeginPicture(mVADisplay, mVAContext, task->enc_surface);
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaBeginPicture");
+
+    ret = sendEncodeCommand(task);
+    CHECK_ENCODE_STATUS_CLEANUP("sendEncodeCommand");
 
     vaStatus = vaEndPicture(mVADisplay, mVAContext);
-    CHECK_VA_STATUS_RETURN("vaEndPicture");
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaEndPicture");
 
-    LOG_V( "vaEndPicture\n");
+    LOG_V("Add Task %p into Encode Task list\n", task);
+    mEncodeTask_Lock.lock();
+    mEncodeTaskList.push_back(task);
+    mEncodeTask_Cond.signal();
+    mEncodeTask_Lock.unlock();
 
-    if (mFirstFrame) {
-        updateProperities();
-        decideFrameType();
-    }
-
-    LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface);
-    vaStatus = vaSyncSurface(mVADisplay, mLastSurface);
-    if (vaStatus != VA_STATUS_SUCCESS) {
-        LOG_W( "Failed vaSyncSurface\n");
-    }
-
-    mOutCodedBuffer = mLastCodedBuffer;
-
-    // Need map buffer before calling query surface below to get
-    // the right skip frame flag for current frame
-    // It is a requirement of video driver
-    vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf);
-    vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
-
-    if (mFirstFrame) {
-        vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
-        CHECK_VA_STATUS_RETURN("vaBeginPicture");
-
-        ret = sendEncodeCommand();
-        CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand");
-
-        vaStatus = vaEndPicture(mVADisplay, mVAContext);
-        CHECK_VA_STATUS_RETURN("vaEndPicture");
-
-        mKeyFrame = true;
-    }
-
-    // Query the status of last surface to check if its next frame is skipped
-    VASurfaceStatus vaSurfaceStatus;
-    vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus);
-    CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
-
-    mPicSkipped = vaSurfaceStatus & VASurfaceSkipped;
-
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    if (mPicSkipped)
-        mVideoStat.skipped_frames ++;
-#endif
-
-    mLastSurface = VA_INVALID_SURFACE;
-    updateProperities();
-    mCurSurface = VA_INVALID_SURFACE;
-
-    if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true;
-
-    LOG_V("ref the current inBuffer\n");
-
-    mLastInputRawBuffer = inBuffer;
-    mFirstFrame = false;
-
-    return ENCODE_SUCCESS;
-}
-
-Encode_Status VideoEncoderBase::syncEncode(VideoEncRawBuffer *inBuffer) {
-
-    Encode_Status ret = ENCODE_SUCCESS;
-    VAStatus vaStatus = VA_STATUS_SUCCESS;
-    uint8_t *buf = NULL;
-    VASurfaceID tmpSurface = VA_INVALID_SURFACE;
-
-    inBuffer->bufAvailable = false;
-    if (mNewHeader) mFrameNum = 0;
-
-    // current we use one surface for source data,
-    // one for reference and one for reconstructed
-    decideFrameType();
-    ret = manageSrcSurface(inBuffer);
-    CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
-
-    vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
-    CHECK_VA_STATUS_RETURN("vaBeginPicture");
-
-    ret = sendEncodeCommand();
-    CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand");
-
-    vaStatus = vaEndPicture(mVADisplay, mVAContext);
-    CHECK_VA_STATUS_RETURN("vaEndPicture");
-
-    LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface);
-    vaStatus = vaSyncSurface(mVADisplay, mCurSurface);
-    if (vaStatus != VA_STATUS_SUCCESS) {
-        LOG_W( "Failed vaSyncSurface\n");
-    }
-
-    mOutCodedBuffer = mVACodedBuffer[mCodedBufIndex];
-
-    // Need map buffer before calling query surface below to get
-    // the right skip frame flag for current frame
-    // It is a requirement of video driver
-    vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf);
-    vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
-
-    mPicSkipped = false;
-    if (!mFirstFrame) {
-        // Query the status of last surface to check if its next frame is skipped
-        VASurfaceStatus vaSurfaceStatus;
-        vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface,  &vaSurfaceStatus);
-        CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
-        mPicSkipped = vaSurfaceStatus & VASurfaceSkipped;
-    }
-
-    mLastSurface = mCurSurface;
-    mCurSurface = VA_INVALID_SURFACE;
-
-    mEncodedFrames ++;
     mFrameNum ++;
 
-    if (!mPicSkipped) {
-        tmpSurface = mRecSurface;
-        mRecSurface = mRefSurface;
-        mRefSurface = tmpSurface;
-    }
+    LOG_V("encode return Success\n");
 
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    if (mPicSkipped)
-        mVideoStat.skipped_frames ++;
-#endif
-
-    inBuffer->bufAvailable = true;
     return ENCODE_SUCCESS;
+
+CLEAN_UP:
+
+    delete task;
+    mCodedBuffer_Lock.lock();
+    mVACodedBufferList.push_back(coded_buf); //push to CodedBuffer pool again since it is not used
+    mCodedBuffer_Cond.signal();
+    mCodedBuffer_Lock.unlock();
+
+    LOG_V("encode return error=%x\n", ret);
+
+    return ret;
 }
 
-void VideoEncoderBase::setKeyFrame(int32_t keyFramePeriod) {
-
-    // For first getOutput async mode, the mFrameNum already increased to 2, and of course is key frame
-    // frame 0 is already encoded and will be outputed here
-    // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call
-    if (!mComParams.syncEncMode) {
-        if (mFrameNum > 2) {
-            if (keyFramePeriod != 0 &&
-                    (((mFrameNum - 2) % keyFramePeriod) == 0)) {
-                mKeyFrame = true;
-            } else {
-                mKeyFrame = false;
-            }
-        } else if (mFrameNum == 2) {
-            mKeyFrame = true;
-        }
-    } else {
-        if (mFrameNum > 1) {
-            if (keyFramePeriod != 0 &&
-                    (((mFrameNum - 1) % keyFramePeriod) == 0)) {
-                mKeyFrame = true;
-            } else {
-                mKeyFrame = false;
-            }
-        } else if (mFrameNum == 1) {
-            mKeyFrame = true;
-        }
-    }
-}
-
-Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) {
+/*
+  1. Firstly check if one task is outputting data, if yes, continue outputting, if not try to get one from list.
+  2. Due to block/non-block/block with timeout 3 modes, if task is not completed, then sync surface, if yes,
+    start output data
+  3. Use variable curoutputtask to record task which is getOutput() working on to avoid push again when get failure
+    on non-block/block with timeout modes.
+  4. if complete all output data, curoutputtask should be set NULL
+*/
+Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout) {
 
     Encode_Status ret = ENCODE_SUCCESS;
     VAStatus vaStatus = VA_STATUS_SUCCESS;
@@ -547,56 +380,138 @@
 
     CHECK_NULL_RETURN_IFFAIL(outBuffer);
 
-    LOG_V("Begin\n");
+    if (mCurOutputTask == NULL) {
+        mEncodeTask_Lock.lock();
+        if(mEncodeTaskList.empty()) {
+            LOG_V("getOutput CurrentTask is NULL\n");
+            if(timeout == FUNC_BLOCK) {
+                LOG_V("waiting for task....\n");
+                mEncodeTask_Cond.wait(mEncodeTask_Lock);
+            } else if (timeout > 0) {
+                LOG_V("waiting for task in % ms....\n", timeout);
+                if(NO_ERROR != mEncodeTask_Cond.waitRelative(mEncodeTask_Lock, 1000000*timeout)) {
+                    mEncodeTask_Lock.unlock();
+                    LOG_E("Time out wait for encode task.\n");
+                    return ENCODE_DATA_NOT_READY;
+                }
+            } else {//Nonblock
+                mEncodeTask_Lock.unlock();
+                return ENCODE_DATA_NOT_READY;
+            }
+        }
 
-    if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) {
-        LOG_E("Output buffer mode not supported\n");
-        goto CLEAN_UP;
+        if(mEncodeTaskList.empty()){
+            mEncodeTask_Lock.unlock();
+            return ENCODE_DATA_NOT_READY;
+        }
+        mCurOutputTask =  *(mEncodeTaskList.begin());
+        mEncodeTaskList.erase(mEncodeTaskList.begin());
+        mEncodeTask_Lock.unlock();
     }
 
-    setKeyFrame(mComParams.intraPeriod);
+    //sync/query/wait task if not completed
+    if (mCurOutputTask->completed == false) {
+        uint8_t *buf = NULL;
+        VASurfaceStatus vaSurfaceStatus;
 
+        if (timeout == FUNC_BLOCK) {
+            //block mode, direct sync surface to output data
+
+            LOG_I ("block mode, vaSyncSurface ID = 0x%08x\n", mCurOutputTask->enc_surface);
+            vaStatus = vaSyncSurface(mVADisplay, mCurOutputTask->enc_surface);
+            CHECK_VA_STATUS_GOTO_CLEANUP("vaSyncSurface");
+
+            mOutCodedBuffer = mCurOutputTask->coded_buffer;
+
+            // Check frame skip
+            // Need map buffer before calling query surface below to get the right skip frame flag for current frame
+            // It is a requirement of video driver
+            vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf);
+            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+
+            vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
+            CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
+            mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+
+            mCurOutputTask->completed = true;
+
+        } else {
+            //For both block with timeout and non-block mode, query surface, if ready, output data
+            LOG_I ("non-block mode, vaQuerySurfaceStatus ID = 0x%08x\n", mCurOutputTask->enc_surface);
+
+            vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurOutputTask->enc_surface,  &vaSurfaceStatus);
+            if (vaSurfaceStatus & VASurfaceReady) {
+                mOutCodedBuffer = mCurOutputTask->coded_buffer;
+                mFrameSkipped = vaSurfaceStatus & VASurfaceSkipped;
+                mCurOutputTask->completed = true;
+                //if need to call SyncSurface again ?
+
+            }	else {//not ready yet
+                ret = ENCODE_DATA_NOT_READY;
+                goto CLEAN_UP;
+            }
+
+        }
+
+    }
+
+    //start to output data
     ret = prepareForOutput(outBuffer, &useLocalBuffer);
     CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
 
-    ret = outputAllData(outBuffer);
-    CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+    //copy all flags to outBuffer
+    outBuffer->flag = mCurOutputTask->flag;
+    outBuffer->type = mCurOutputTask->type;
+    outBuffer->timeStamp = mCurOutputTask->timestamp;
+
+    if (outBuffer->format == OUTPUT_EVERYTHING || outBuffer->format == OUTPUT_FRAME_DATA) {
+        ret = outputAllData(outBuffer);
+        CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+    }else {
+        ret = getExtFormatOutput(outBuffer);
+        CHECK_ENCODE_STATUS_CLEANUP("getExtFormatOutput");
+    }
 
     LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize);
 
     ret = cleanupForOutput();
     CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
 
+    LOG_V("getOutput return Success, Frame skip is %d\n", mFrameSkipped);
+
+    return ENCODE_SUCCESS;
+
 CLEAN_UP:
 
-    if (ret < ENCODE_SUCCESS) {
-        if (outBuffer->data && (useLocalBuffer == true)) {
-            delete[] outBuffer->data;
-            outBuffer->data = NULL;
-            useLocalBuffer = false;
-        }
-
-        if (mCodedBufferMapped) {
-            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
-            mCodedBufferMapped = false;
-            mCurSegment = NULL;
-        }
+    if (outBuffer->data && (useLocalBuffer == true)) {
+        delete[] outBuffer->data;
+        outBuffer->data = NULL;
+        useLocalBuffer = false;
     }
 
-    LOG_V("End\n");
+    if (mCodedBufferMapped) {
+        vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+        mCodedBufferMapped = false;
+        mCurSegment = NULL;
+    }
+
+    delete mCurOutputTask;
+    mCurOutputTask = NULL;
+    mCodedBuffer_Lock.lock();
+    mVACodedBufferList.push_back(mOutCodedBuffer);
+    mCodedBuffer_Cond.signal();
+    mCodedBuffer_Lock.unlock();
+
+    LOG_V("getOutput return error=%x\n", ret);
     return ret;
 }
 
-
 void VideoEncoderBase::flush() {
 
     LOG_V( "Begin\n");
 
     // reset the properities
-    mEncodedFrames = 0;
     mFrameNum = 0;
-    mPicSkipped = false;
-    mIsIntra = true;
 
     LOG_V( "end\n");
 }
@@ -605,58 +520,68 @@
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     Encode_Status ret = ENCODE_SUCCESS;
-    SurfaceMap *map = NULL;
 
     LOG_V( "Begin\n");
 
-    if (mSurfaces) {
-        delete [] mSurfaces;
-        mSurfaces = NULL;
-    }
-
     // It is possible that above pointers have been allocated
-    // before we set mInitialized to true
-    if (!mInitialized) {
+    // before we set mStarted to true
+    if (!mStarted) {
         LOG_V("Encoder has been stopped\n");
         return ENCODE_SUCCESS;
     }
 
+    mCodedBuffer_Lock.lock();
+    mVACodedBufferList.clear();
+    mCodedBuffer_Lock.unlock();
+    mCodedBuffer_Cond.broadcast();
+
+    //Delete all uncompleted tasks
+    mEncodeTask_Lock.lock();
+    while(! mEncodeTaskList.empty())
+    {
+        delete *mEncodeTaskList.begin();
+        mEncodeTaskList.erase(mEncodeTaskList.begin());
+    }
+    mEncodeTask_Lock.unlock();
+    mEncodeTask_Cond.broadcast();
+
+    //Release Src Surface Buffer Map, destroy surface manually since it is not added into context
+    LOG_V( "Rlease Src Surface Map\n");
+    while(! mSrcSurfaceMapList.empty())
+    {
+        if (! (*mSrcSurfaceMapList.begin())->added) {
+            LOG_V( "Rlease the Src Surface Buffer not added into vaContext\n");
+            vaDestroySurfaces(mVADisplay, &((*mSrcSurfaceMapList.begin())->surface), 1);
+        }
+        delete (*mSrcSurfaceMapList.begin());
+        mSrcSurfaceMapList.erase(mSrcSurfaceMapList.begin());
+    }
+
     LOG_V( "vaDestroyContext\n");
-    vaStatus = vaDestroyContext(mVADisplay, mVAContext);
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
+    if (mVAContext != VA_INVALID_ID) {
+        vaStatus = vaDestroyContext(mVADisplay, mVAContext);
+        CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
+    }
 
     LOG_V( "vaDestroyConfig\n");
-    vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
-    CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
-
-    // Release Src Surface Buffer Map 
-    LOG_V( "Rlease Src Surface Map\n");
-
-    map = mSrcSurfaceMapList;
-    while(map) {
-        if (! map->added) {
-            //destroy surface by itself
-            LOG_V( "Rlease Src Surface Buffer not added into vaContext\n");
-            vaDestroySurfaces(mVADisplay, &map->surface, 1);
-        }
-        SurfaceMap *tmp = map;
-        map = map->next;
-        delete tmp;
+    if (mVAConfig != VA_INVALID_ID) {
+        vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
+        CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
     }
 
 CLEAN_UP:
-    mInitialized = false;
 
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    LOG_V("Encoder Statistics:\n");
-    LOG_V("    %d frames Encoded, %d frames Skipped\n", mEncodedFrames, mVideoStat.skipped_frames);
-    LOG_V("    Encode time: Average(%d us), Max(%d us @Frame No.%d), Min(%d us @Frame No.%d)\n", \
-           mVideoStat.average_encode_time / mEncodedFrames, mVideoStat.max_encode_time, \
-           mVideoStat.max_encode_frame, mVideoStat.min_encode_time, mVideoStat.min_encode_frame);
+    mStarted = false;
+    mSliceSizeOverflow = false;
+    mCurOutputTask= NULL;
+    mOutCodedBuffer = 0;
+    mCodedBufferMapped = false;
+    mCurSegment = NULL;
+    mOffsetInSeg =0;
+    mTotalSize = 0;
+    mTotalSizeCopied = 0;
+    mFrameSkipped = false;
 
-    memset(&mVideoStat, 0, sizeof(VideoStatistics));
-    mVideoStat.min_encode_time = 0xFFFFFFFF;
-#endif
     LOG_V( "end\n");
     return ret;
 }
@@ -721,6 +646,9 @@
     outBuffer->flag = 0;
     if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
 
+    if (!mCurSegment)
+        return ENCODE_FAIL;
+
     if (mCurSegment->size < mOffsetInSeg) {
         LOG_E("mCurSegment->size < mOffsetInSeg\n");
         return ENCODE_FAIL;
@@ -753,13 +681,23 @@
         vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
         CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
         mCodedBufferMapped = false;
+        mTotalSize = 0;
+        mOffsetInSeg = 0;
+        mTotalSizeCopied = 0;
+
+        delete mCurOutputTask;
+        mCurOutputTask = NULL;
+        mCodedBuffer_Lock.lock();
+        mVACodedBufferList.push_back(mOutCodedBuffer);
+        mCodedBuffer_Cond.signal();
+        mCodedBuffer_Lock.unlock();
+
+        LOG_V("All data has been outputted, return CodedBuffer 0x%08x to pool\n", mOutCodedBuffer);
     }
     return ENCODE_SUCCESS;
 }
 
-
-Encode_Status VideoEncoderBase::outputAllData(
-        VideoEncOutputBuffer *outBuffer) {
+Encode_Status VideoEncoderBase::outputAllData(VideoEncOutputBuffer *outBuffer) {
 
     // Data size been copied for every single call
     uint32_t sizeCopiedHere = 0;
@@ -794,7 +732,6 @@
             outBuffer->dataSize = outBuffer->bufferSize;
             outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
             outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
-            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
             return ENCODE_BUFFER_TOO_SMALL;
         }
 
@@ -802,7 +739,6 @@
             outBuffer->dataSize = sizeCopiedHere;
             outBuffer->remainingSize = 0;
             outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
-            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
             mCurSegment = NULL;
             return ENCODE_SUCCESS;
         }
@@ -838,6 +774,7 @@
     mComParams.airParams.airAuto = 1;
     mComParams.disableDeblocking = 2;
     mComParams.syncEncMode = false;
+    mComParams.codedBufNum = 2;
 
     mHrdParam.bufferSize = 0;
     mHrdParam.initBufferFullness = 0;
@@ -852,7 +789,7 @@
     CHECK_NULL_RETURN_IFFAIL(videoEncParams);
     LOG_I("Config type = %d\n", (int)videoEncParams->type);
 
-    if (mInitialized) {
+    if (mStarted) {
         LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
         return ENCODE_ALREADY_INIT;
     }
@@ -862,10 +799,11 @@
 
             VideoParamsCommon *paramsCommon =
                     reinterpret_cast <VideoParamsCommon *> (videoEncParams);
-
             if (paramsCommon->size != sizeof (VideoParamsCommon)) {
                 return ENCODE_INVALID_PARAMS;
             }
+            if(paramsCommon->codedBufNum < 2)
+                paramsCommon->codedBufNum =2;
             mComParams = *paramsCommon;
             break;
         }
@@ -1029,7 +967,7 @@
 
    // workaround
 #if 0
-    if (!mInitialized) {
+    if (!mStarted) {
         LOG_E("Encoder has not initialized yet, can't call setConfig\n");
         return ENCODE_NOT_INIT;
     }
@@ -1200,51 +1138,29 @@
     return ret;
 }
 
-void VideoEncoderBase:: decideFrameType () {
+void VideoEncoderBase:: PrepareFrameInfo (EncodeTask* task) {
+    if (mNewHeader) mFrameNum = 0;
+    LOG_I( "mFrameNum = %d   ", mFrameNum);
 
-    LOG_I( "mEncodedFrames = %d\n", mEncodedFrames);
-    LOG_I( "mFrameNum = %d\n", mFrameNum);
-    LOG_I( "mIsIntra = %d\n", mIsIntra);
+    updateFrameInfo(task) ;
+}
+
+Encode_Status VideoEncoderBase:: updateFrameInfo (EncodeTask* task) {
+
+    task->type = FTYPE_P;
 
     // determine the picture type
-    if (mComParams.intraPeriod == 0) {
-        if (mFrameNum == 0)
-            mIsIntra = true;
-        else
-            mIsIntra = false;
-    } else if ((mFrameNum % mComParams.intraPeriod) == 0) {
-        mIsIntra = true;
-    } else {
-        mIsIntra = false;
-    }
+    if (mFrameNum == 0)
+        task->type = FTYPE_I;
+    if (mComParams.intraPeriod != 0 && ((mFrameNum % mComParams.intraPeriod) == 0))
+        task->type = FTYPE_I;
 
-    LOG_I( "mIsIntra = %d\n",mIsIntra);
+    if (task->type == FTYPE_I)
+        task->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+
+    return ENCODE_SUCCESS;
 }
 
-
-void VideoEncoderBase:: updateProperities () {
-
-    VASurfaceID tmp = VA_INVALID_SURFACE;
-    LOG_V( "Begin\n");
-
-    mEncodedFrames ++;
-    mFrameNum ++;
-    mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex];
-    mCodedBufIndex ++;
-    mCodedBufIndex %=2;
-
-    mLastSurface = mCurSurface;
-
-    if (!mPicSkipped) {
-        tmp = mRecSurface;
-        mRecSurface = mRefSurface;
-        mRefSurface = tmp;
-    }
-
-    LOG_V( "End\n");
-}
-
-
 Encode_Status  VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
 
     uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
@@ -1282,25 +1198,6 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) {
-
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    if (videoStat != NULL) {
-        videoStat->total_frames = mEncodedFrames;
-        videoStat->skipped_frames = mVideoStat.skipped_frames;     
-        videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames;
-        videoStat->max_encode_time = mVideoStat.max_encode_time;
-        videoStat->max_encode_frame = mVideoStat.max_encode_frame;
-        videoStat->min_encode_time = mVideoStat.min_encode_time;
-        videoStat->min_encode_frame = mVideoStat.min_encode_frame;
-    }
-    
-    return ENCODE_SUCCESS;
-#else
-    return ENCODE_NOT_SUPPORTED;
-#endif
-}
-
 Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
     uint32_t width, uint32_t height, uint32_t format,
     uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
@@ -1317,7 +1214,7 @@
     LOG_V( "Begin\n");
 
     // If encode session has been configured, we can not request surface creation anymore
-    if (mInitialized) {
+    if (mStarted) {
         LOG_E( "Already Initialized, can not request VA surface anymore\n");
         return ENCODE_WRONG_STATE;
     }
@@ -1387,9 +1284,8 @@
     map->vinfo.format = VA_FOURCC_NV12;
     map->vinfo.s3dformat = 0xffffffff;
     map->added = false;
-    map->next = NULL;
 
-    mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+    mSrcSurfaceMapList.push_back(map);
 
     LOG_I( "surface = 0x%08x\n",(uint32_t)surface);
     LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
@@ -1436,7 +1332,7 @@
     }
 
     for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
-        if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL)  //already mapped
+        if (findSurfaceMapByValue(upStreamBuffer->bufList[i]) != NULL)  //already mapped
             continue;
 
         //wrap upstream buffer into vaSurface
@@ -1456,18 +1352,12 @@
         }
         map->vinfo.s3dformat = 0xFFFFFFFF;
         map->added = false;
-        map->next = NULL;
         status = surfaceMapping(map);
 
         if (status == ENCODE_SUCCESS)
-            mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+            mSrcSurfaceMapList.push_back(map);
         else
            delete map;
-    
-        if (mSrcSurfaceMapList == NULL) {
-            LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n");
-            return ENCODE_NO_MEMORY;
-        }
     }
 
     return status;
@@ -1493,7 +1383,7 @@
 
     VASurfaceAttributeTPI vaSurfaceAttrib;
     uint32_t buf;
-    
+
     vaSurfaceAttrib.buffers = &buf;
 
     vaStatus = vaLockSurface(
@@ -1536,7 +1426,7 @@
     LOG_I("Surface ID created from Kbuf = 0x%08x", surface);
 
     map->surface = surface;
-    
+
     return ret;
 }
 
@@ -1608,12 +1498,12 @@
     uint32_t lumaOffset = 0;
     uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride;
     uint32_t chromaVOffset = chromaUOffset + 1;
-    
+
     VASurfaceAttributeTPI vaSurfaceAttrib;
     uint32_t buf;
 
     vaSurfaceAttrib.buffers = &buf;
-    
+
     vaSurfaceAttrib.count = 1;
     vaSurfaceAttrib.size = map->vinfo.lumaStride * map->vinfo.height * 3 / 2;
     vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride;
@@ -1635,7 +1525,7 @@
     LOG_I("Surface ID created from Kbuf = 0x%08x", map->value);
 
     map->surface = surface;
-    
+
     return ret;
 }
 
@@ -1667,7 +1557,7 @@
     CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
 
     map->surface = surface;
-   
+
     return ret;
 }
 
@@ -1745,9 +1635,9 @@
     return status;
 }
 
-Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) {
+Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid) {
 
-    Encode_Status ret = ENCODE_SUCCESS;        
+    Encode_Status ret = ENCODE_SUCCESS;
     MetadataBufferType type;
     int32_t value;
     ValueInfo vinfo;
@@ -1757,13 +1647,13 @@
 
     IntelMetadataBuffer imb;
     SurfaceMap *map = NULL;
-  
-    if (mStoreMetaDataInBuffers.isEnabled) {        
+
+    if (mStoreMetaDataInBuffers.isEnabled) {
         //metadatabuffer mode
         LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
         if (imb.UnSerialize(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
             //fail to parse buffer
-            return ENCODE_NO_REQUEST_DATA; 
+            return ENCODE_NO_REQUEST_DATA;
         }
 
         imb.GetType(type);
@@ -1772,20 +1662,21 @@
         //raw mode
         LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
         if (! inBuffer->data || inBuffer->size == 0) {
-            return ENCODE_NULL_PTR; 
+            return ENCODE_NULL_PTR;
         }
 
         type = MetadataBufferTypeUser;
         value = (int32_t)inBuffer->data;
     }
-   
-    //find if mapped
-    map = findSurfaceMapByValue(mSrcSurfaceMapList, value);
 
-    if (map) {  	
+
+    //find if mapped
+    map = (SurfaceMap*) findSurfaceMapByValue(value);
+
+    if (map) {
         //has mapped, get surfaceID directly
         LOG_I("direct find surface %d from value %x\n", map->surface, value);
-        mCurSurface = map->surface;
+        *sid = map->surface;
 
         return ret;
     }
@@ -1793,8 +1684,8 @@
     //if no found from list, then try to map value with parameters
     LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value);
 
-    if (mStoreMetaDataInBuffers.isEnabled) {  
-    	
+    if (mStoreMetaDataInBuffers.isEnabled) {
+
         //if type is MetadataBufferTypeGrallocSource, use default parameters
         if (type == MetadataBufferTypeGrallocSource) {
             vinfo.mode = MEM_MODE_GFXHANDLE;
@@ -1806,15 +1697,15 @@
             vinfo.chromStride = mComParams.resolution.width;
             vinfo.format = VA_FOURCC_NV12;
             vinfo.s3dformat = 0xFFFFFFFF;
-        } else {            
+        } else {
             //get all info mapping needs
             imb.GetValueInfo(pvinfo);
             imb.GetExtraValues(extravalues, extravalues_count);
   	}
-        
+
     } else {
 
-        //raw mode           
+        //raw mode
         vinfo.mode = MEM_MODE_MALLOC;
         vinfo.handle = 0;
         vinfo.size = inBuffer->size;
@@ -1836,26 +1727,25 @@
         map->value = value;
         memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo));
         map->added = false;
-        map->next = NULL;
 
         ret = surfaceMapping(map);
         if (ret == ENCODE_SUCCESS) {
             LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface);
-            mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+            mSrcSurfaceMapList.push_back(map);
         } else {
             delete map;
             LOG_E("surface mapping failed, wrong info or meet serious error\n");
             return ret;
-        } 
+        }
 
-        mCurSurface = map->surface;
+        *sid = map->surface;
 
     } else {
         //can't map due to no info
         LOG_E("surface mapping failed,  missing information\n");
         return ENCODE_NO_REQUEST_DATA;
     }
-            
+
     if (extravalues) {
         //map more using same ValueInfo
         for(unsigned int i=0; i<extravalues_count; i++) {
@@ -1864,12 +1754,11 @@
             map->value = extravalues[i];
             memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo));
             map->added = false;
-            map->next = NULL;
 
             ret = surfaceMapping(map);
             if (ret == ENCODE_SUCCESS) {
                 LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface);
-                mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+                mSrcSurfaceMapList.push_back(map);
             } else {
                 delete map;
                 map = NULL;
@@ -1877,69 +1766,10 @@
             }
         }
     }
-   
+
     return ret;
 }
 
-SurfaceMap *VideoEncoderBase::appendSurfaceMap(
-        SurfaceMap *head, SurfaceMap *map) {
-
-    if (head == NULL) {
-        return map;
-    }
-
-    SurfaceMap *node = head;
-    SurfaceMap *tail = NULL;
-
-    while (node != NULL) {
-        tail = node;
-        node = node->next;
-    }
-    tail->next = map;
-
-    return head;
-}
-
-SurfaceMap *VideoEncoderBase::removeSurfaceMap(
-        SurfaceMap *head, SurfaceMap *map) {
-
-    SurfaceMap *node = head;
-    SurfaceMap *tmpNode = NULL;
-
-    if (head == map) {
-        tmpNode = head->next;
-        map->next = NULL;
-        return tmpNode;
-    }
-
-    while (node != NULL) {
-        if (node->next == map)
-            break;
-        node = node->next;
-    }
-
-    if (node != NULL) {
-        node->next = map->next;
-    }
-
-    map->next = NULL;
-    return head;
-}
-
-SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(
-        SurfaceMap *head, int32_t value) {
-
-    SurfaceMap *node = head;
-
-    while (node != NULL) {
-        if (node->value == value)
-            break;
-        node = node->next;
-    }
-
-    return node;
-}
-
 Encode_Status VideoEncoderBase::renderDynamicBitrate() {
     VAStatus vaStatus = VA_STATUS_SUCCESS;
 
@@ -2063,3 +1893,17 @@
 
     return ENCODE_SUCCESS;
 }
+
+SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(int32_t value) {
+    android::List<SurfaceMap *>::iterator node;
+
+    for(node = mSrcSurfaceMapList.begin(); node !=  mSrcSurfaceMapList.end(); node++)
+    {
+        if ((*node)->value == value)
+            return *node;
+        else
+            continue;
+    }
+
+    return NULL;
+}
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
index 9ab7bc6..924c4da 100644
--- a/videoencoder/VideoEncoderBase.h
+++ b/videoencoder/VideoEncoderBase.h
@@ -14,15 +14,30 @@
 #include "VideoEncoderDef.h"
 #include "VideoEncoderInterface.h"
 #include "IntelMetadataBuffer.h"
+#include <utils/List.h>
+#include <utils/threads.h>
 
+//#define AUTO_REFERENCE
 struct SurfaceMap {
     VASurfaceID surface;
     MetadataBufferType type;
     int32_t value;
     ValueInfo vinfo;
-    uint32_t index;
     bool added;
-    SurfaceMap *next;
+};
+
+struct EncodeTask {
+    VASurfaceID enc_surface;
+    VASurfaceID ref_surface[2];
+    VASurfaceID rec_surface;
+    VABufferID coded_buffer;
+
+    FrameType type;
+    int flag;
+    int64_t timestamp;  //corresponding input frame timestamp
+    uint8_t *in_data;  //input buffer data
+
+    bool completed;   //if encode task is done complet by HW
 };
 
 class VideoEncoderBase : IVideoEncoder {
@@ -34,7 +49,7 @@
     virtual Encode_Status start(void);
     virtual void flush(void);
     virtual Encode_Status stop(void);
-    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer);
+    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout);
 
     /*
     * getOutput can be called several time for a frame (such as first time  codec data, and second time others)
@@ -42,30 +57,26 @@
     * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL
     * and caller should provide a big enough buffer and call again
     */
-    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout);
 
     virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
     virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
     virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig);
     virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig);
-
     virtual Encode_Status getMaxOutSize(uint32_t *maxSize);
-    virtual Encode_Status getStatistics(VideoStatistics *videoStat);
 
 protected:
-    virtual Encode_Status sendEncodeCommand(void) = 0;
+    virtual Encode_Status sendEncodeCommand(EncodeTask* task) = 0;
     virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0;
     virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0;
     virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0;
     virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+    virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) = 0;
+    virtual Encode_Status updateFrameInfo(EncodeTask* task) ;
 
-    Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
-    Encode_Status cleanupForOutput();
-    Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
     Encode_Status renderDynamicFrameRate();
     Encode_Status renderDynamicBitrate();
     Encode_Status renderHrd();
-    void setKeyFrame(int32_t keyFramePeriod);
 
 private:
     void setDefaultParams(void);
@@ -78,41 +89,29 @@
     Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map);
     Encode_Status surfaceMappingForMalloc(SurfaceMap *map);
     Encode_Status surfaceMapping(SurfaceMap *map);
+    SurfaceMap *findSurfaceMapByValue(int32_t value);
+    Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer, VASurfaceID *sid);
+    void PrepareFrameInfo(EncodeTask* task);
 
-    SurfaceMap *appendSurfaceMap(
-            SurfaceMap *head, SurfaceMap *map);
-    SurfaceMap *removeSurfaceMap(
-            SurfaceMap *head, SurfaceMap *map);
-    SurfaceMap *findSurfaceMapByValue(
-            SurfaceMap *head, int32_t value);
-
-    Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer);
-    void updateProperities(void);
-    void decideFrameType(void);
-//    Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer);
-    Encode_Status syncEncode(VideoEncRawBuffer *inBuffer);
-    Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer);
+    Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
+    Encode_Status cleanupForOutput();
+    Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
 
 protected:
 
     bool mInitialized;
+    bool mStarted;
     VADisplay mVADisplay;
     VAContextID mVAContext;
     VAConfigID mVAConfig;
     VAEntrypoint mVAEntrypoint;
 
-    VACodedBufferSegment *mCurSegment;
-    uint32_t mOffsetInSeg;
-    uint32_t mTotalSize;
-    uint32_t mTotalSizeCopied;
 
     VideoParamsCommon mComParams;
     VideoParamsHRD mHrdParam;
     VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers;
 
-    bool mForceKeyFrame;
     bool mNewHeader;
-    bool mFirstFrame;
 
     bool mRenderMaxSliceSize; //Max Slice Size
     bool mRenderQP;
@@ -121,50 +120,36 @@
     bool mRenderBitRate;
     bool mRenderHrd;
 
-    VABufferID mVACodedBuffer[2];
-    VABufferID mLastCodedBuffer;
-    VABufferID mOutCodedBuffer;
     VABufferID mSeqParamBuf;
     VABufferID mRcParamBuf;
     VABufferID mFrameRateParamBuf;
     VABufferID mPicParamBuf;
     VABufferID mSliceParamBuf;
 
-    VASurfaceID *mSurfaces;
-    uint32_t mSurfaceCnt;
+    android::List <SurfaceMap *> mSrcSurfaceMapList;  //all mapped surface info list from input buffer
+    android::List <EncodeTask *> mEncodeTaskList;  //all encode tasks list
+    android::List <VABufferID> mVACodedBufferList;  //all available codedbuffer list
 
-    SurfaceMap *mSrcSurfaceMapList;
-
-    //for new design
-    VASurfaceID mCurSurface;        //current input surface to be encoded 
-    VASurfaceID mRefSurface;        //reference surface
-    VASurfaceID mRecSurface;        //reconstructed surface
-    VASurfaceID mLastSurface;       //last surface
-
-    VideoEncRawBuffer *mLastInputRawBuffer;
-
-    uint32_t mEncodedFrames;
+    VASurfaceID mRefSurface;        //reference surface, only used in base
+    VASurfaceID mRecSurface;        //reconstructed surface, only used in base
     uint32_t mFrameNum;
     uint32_t mCodedBufSize;
-    uint32_t mCodedBufIndex;
 
-    bool mPicSkipped;
-    bool mIsIntra;
     bool mSliceSizeOverflow;
+
+    //Current Outputting task
+    EncodeTask *mCurOutputTask;
+
+    //Current outputting CodedBuffer status
+    VABufferID mOutCodedBuffer;
     bool mCodedBufferMapped;
-    bool mDataCopiedOut;
-    bool mKeyFrame;
+    VACodedBufferSegment *mCurSegment;
+    uint32_t mOffsetInSeg;
+    uint32_t mTotalSize;
+    uint32_t mTotalSizeCopied;
+    android::Mutex               mCodedBuffer_Lock, mEncodeTask_Lock;
+    android::Condition           mCodedBuffer_Cond, mEncodeTask_Cond;
 
-    int32_t  mInitCheck;
-
-#ifdef VIDEO_ENC_STATISTICS_ENABLE
-    VideoStatistics mVideoStat;
-#endif
-
-    // Constants
-    static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2;
-    static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8;
+    bool mFrameSkipped;
 };
-
-
 #endif /* __VIDEO_ENCODER_BASE_H__ */
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
index b9feca2..f5174aa 100644
--- a/videoencoder/VideoEncoderDef.h
+++ b/videoencoder/VideoEncoderDef.h
@@ -32,7 +32,9 @@
     ENCODE_SUCCESS = 0,
     ENCODE_ALREADY_INIT = 1,
     ENCODE_SLICESIZE_OVERFLOW = 2,
-    ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data
+    ENCODE_BUFFER_TOO_SMALL = 3, // The buffer passed to encode is too small to contain encoded data
+    ENCODE_DEVICE_BUSY = 4,
+    ENCODE_DATA_NOT_READY = 5,
 };
 
 typedef enum {
@@ -42,6 +44,7 @@
     OUTPUT_ONE_NAL = 4,
     OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8,
     OUTPUT_LENGTH_PREFIXED = 16,
+    OUTPUT_CODEDBUFFER = 32,
     OUTPUT_BUFFER_LAST
 } VideoOutputFormat;
 
@@ -102,6 +105,23 @@
     BUFFER_LAST
 };
 
+typedef enum {
+    FTYPE_UNKNOWN = 0, // Unknown
+    FTYPE_I = 1, // General I-frame type
+    FTYPE_P = 2, // General P-frame type
+    FTYPE_B = 3, // General B-frame type
+    FTYPE_SI = 4, // H.263 SI-frame type
+    FTYPE_SP = 5, // H.263 SP-frame type
+    FTYPE_EI = 6, // H.264 EI-frame type
+    FTYPE_EP = 7, // H.264 EP-frame type
+    FTYPE_S = 8, // MPEG-4 S-frame type
+    FTYPE_IDR = 9, // IDR-frame type
+}FrameType;
+
+//function call mode
+#define FUNC_BLOCK        0xFFFFFFFF
+#define FUNC_NONBLOCK        0
+
 // Output buffer flag
 #define ENCODE_BUFFERFLAG_ENDOFFRAME       0x00000001
 #define ENCODE_BUFFERFLAG_PARTIALFRAME     0x00000002
@@ -110,6 +130,8 @@
 #define ENCODE_BUFFERFLAG_DATACORRUPT      0x00000010
 #define ENCODE_BUFFERFLAG_DATAINVALID      0x00000020
 #define ENCODE_BUFFERFLAG_SLICEOVERFOLOW   0x00000040
+#define ENCODE_BUFFERFLAG_ENDOFSTREAM     0x00000080
+#define ENCODE_BUFFERFLAG_NSTOPFRAME        0x00000100
 
 typedef struct {
     uint8_t *data;
@@ -118,14 +140,18 @@
     uint32_t remainingSize;
     int flag; //Key frame, Codec Data etc
     VideoOutputFormat format; //output format
-    uint64_t timeStamp; //reserved
+    int64_t timeStamp; //reserved
+    FrameType type;
+    uint8_t *in_data; //indicate corresponding input data
 } VideoEncOutputBuffer;
 
 typedef struct {
     uint8_t *data;
     uint32_t size;
     bool bufAvailable; //To indicate whether this buffer can be reused
-    uint64_t timeStamp; //reserved
+    int64_t timeStamp; //reserved
+    FrameType type; //frame type expected to be encoded
+    int flag; // flag to indicate buffer property
 } VideoEncRawBuffer;
 
 struct VideoEncSurfaceBuffer {
@@ -304,6 +330,8 @@
     AirParams airParams;
     uint32_t disableDeblocking;
     bool syncEncMode;
+    //CodedBuffer properties
+    uint32_t codedBufNum;
 
     VideoParamsCommon() {
         type = VideoParamsTypeCommon;
@@ -327,6 +355,7 @@
         this->airParams = other.airParams;
         this->disableDeblocking = other.disableDeblocking;
         this->syncEncMode = other.syncEncMode;
+        this->codedBufNum = other.codedBufNum;
         return *this;
     }
 };
@@ -336,10 +365,23 @@
     uint8_t VUIFlag;
     int32_t maxSliceSize;
     uint32_t idrInterval;
+    uint32_t ipPeriod;
+    uint32_t refFrames;
     SliceNum sliceNum;
     AVCDelimiterType delimiterType;
     Cropping crop;
     SamplingAspectRatio SAR;
+    uint32_t refIdx10ActiveMinus1;
+    uint32_t refIdx11ActiveMinus1;
+    bool bFrameMBsOnly;
+    bool bMBAFF;
+    bool bEntropyCodingCABAC;
+    bool bWeightedPPrediction;
+    uint32_t weightedBipredicitonMode;
+    bool bConstIpred ;
+    bool bDirect8x8Inference;
+    bool bDirectSpatialTemporal;
+    uint32_t cabacInitIdc;
 
     VideoParamsAVC() {
         type = VideoParamsTypeAVC;
@@ -354,6 +396,8 @@
         this->VUIFlag = other.VUIFlag;
         this->maxSliceSize = other.maxSliceSize;
         this->idrInterval = other.idrInterval;
+        this->ipPeriod = other.ipPeriod;
+        this->refFrames = other.refFrames;
         this->sliceNum = other.sliceNum;
         this->delimiterType = other.delimiterType;
         this->crop.LeftOffset = other.crop.LeftOffset;
@@ -363,6 +407,17 @@
         this->SAR.SarWidth = other.SAR.SarWidth;
         this->SAR.SarHeight = other.SAR.SarHeight;
 
+        this->refIdx10ActiveMinus1 = other.refIdx10ActiveMinus1;
+        this->refIdx11ActiveMinus1 = other.refIdx11ActiveMinus1;
+        this->bFrameMBsOnly = other.bFrameMBsOnly;
+        this->bMBAFF = other.bMBAFF;
+        this->bEntropyCodingCABAC = other.bEntropyCodingCABAC;
+        this->bWeightedPPrediction = other.bWeightedPPrediction;
+        this->weightedBipredicitonMode = other.weightedBipredicitonMode;
+        this->bConstIpred = other.bConstIpred;
+        this->bDirect8x8Inference = other.bDirect8x8Inference;
+        this->bDirectSpatialTemporal = other.bDirectSpatialTemporal;
+        this->cabacInitIdc = other.cabacInitIdc;
         return *this;
     }
 };
@@ -450,6 +505,7 @@
 
     uint32_t idrInterval;  //How many Intra frame will have a IDR frame
     uint32_t intraPeriod;
+    uint32_t ipPeriod;
 };
 
 struct VideoConfigNALSize : VideoParamConfigSet {
@@ -512,14 +568,4 @@
     SliceNum sliceNum;
 };
 
-typedef struct {
-    uint32_t total_frames; 
-    uint32_t skipped_frames;
-    uint32_t average_encode_time;
-    uint32_t max_encode_time;
-    uint32_t max_encode_frame;
-    uint32_t min_encode_time;
-    uint32_t min_encode_frame;
-}VideoStatistics;
-
 #endif /*  __VIDEO_ENCODER_DEF_H__ */
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
index 7371d7a..b9b9c99 100644
--- a/videoencoder/VideoEncoderH263.cpp
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -16,20 +16,20 @@
     mComParams.profile = (VAProfile)PROFILE_H263BASELINE;
 }
 
-Encode_Status VideoEncoderH263::sendEncodeCommand(void) {
+Encode_Status VideoEncoderH263::sendEncodeCommand(EncodeTask *task) {
 
     Encode_Status ret = ENCODE_SUCCESS;
     LOG_V( "Begin\n");
 
     if (mFrameNum == 0) {
-        ret = renderSequenceParams();
+        ret = renderSequenceParams(task);
         CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
     }
 
-    ret = renderPictureParams();
+    ret = renderPictureParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
 
-    ret = renderSliceParams();
+    ret = renderSliceParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
 
     LOG_V( "End\n");
@@ -37,7 +37,7 @@
 }
 
 
-Encode_Status VideoEncoderH263::renderSequenceParams() {
+Encode_Status VideoEncoderH263::renderSequenceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncSequenceParameterBufferH263 h263SequenceParam = {};
@@ -78,7 +78,7 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderH263::renderPictureParams() {
+Encode_Status VideoEncoderH263::renderPictureParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncPictureParameterBufferH263 h263PictureParams = {};
@@ -86,18 +86,18 @@
     LOG_V( "Begin\n\n");
 
     // set picture params for HW
-    h263PictureParams.reference_picture = mRefSurface;
-    h263PictureParams.reconstructed_picture = mRecSurface;
-    h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
+    h263PictureParams.reference_picture = task->ref_surface[0];
+    h263PictureParams.reconstructed_picture = task->rec_surface;
+    h263PictureParams.coded_buf = task->coded_buffer;
     h263PictureParams.picture_width = mComParams.resolution.width;
     h263PictureParams.picture_height = mComParams.resolution.height;
-    h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+    h263PictureParams.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
 
     LOG_V("======h263 picture params======\n");
     LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture);
     LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture);
     LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf);
-    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+//    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
     LOG_I( "picture_width = %d\n", h263PictureParams.picture_width);
     LOG_I( "picture_height = %d\n",h263PictureParams.picture_height);
     LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type);
@@ -117,7 +117,7 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderH263::renderSliceParams() {
+Encode_Status VideoEncoderH263::renderSliceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     uint32_t sliceHeight;
@@ -145,7 +145,7 @@
     sliceParams->start_row_number = 0;
     // slice height measured in MB
     sliceParams->slice_height = sliceHeightInMB;
-    sliceParams->slice_flags.bits.is_intra = mIsIntra;
+    sliceParams->slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
     sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0;
 
     LOG_V("======h263 slice params======\n");
diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h
index 2113e2f..a8578dd 100644
--- a/videoencoder/VideoEncoderH263.h
+++ b/videoencoder/VideoEncoderH263.h
@@ -20,7 +20,7 @@
     virtual ~VideoEncoderH263() {};
 
 protected:
-    virtual Encode_Status sendEncodeCommand(void);
+    virtual Encode_Status sendEncodeCommand(EncodeTask *task);
     virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) {
         return ENCODE_SUCCESS;
     }
@@ -33,12 +33,16 @@
     virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
         return ENCODE_SUCCESS;
     }
+    virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
+        return ENCODE_NOT_SUPPORTED;
+    }
+    //virtual Encode_Status updateFrameInfo(EncodeTask* task);
 
     // Local Methods
 private:
-    Encode_Status renderSequenceParams();
-    Encode_Status renderPictureParams();
-    Encode_Status renderSliceParams();
+    Encode_Status renderSequenceParams(EncodeTask *task);
+    Encode_Status renderPictureParams(EncodeTask *task);
+    Encode_Status renderSliceParams(EncodeTask *task);
 };
 
 #endif /* __VIDEO_ENCODER_H263_H__ */
diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h
index 243e4a1..da1c6ec 100644
--- a/videoencoder/VideoEncoderInterface.h
+++ b/videoencoder/VideoEncoderInterface.h
@@ -17,14 +17,13 @@
     virtual Encode_Status start(void) = 0;
     virtual Encode_Status stop(void) = 0;
     virtual void flush(void) = 0;
-    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0;
-    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0;
+    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
+    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer, uint32_t timeout = FUNC_BLOCK) = 0;
     virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0;
     virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0;
     virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0;
     virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0;
     virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0;
-    virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0;
 };
 
 #endif /* VIDEO_ENCODER_INTERFACE_H_ */
diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h
index 49c34df..3b8910d 100644
--- a/videoencoder/VideoEncoderLog.h
+++ b/videoencoder/VideoEncoderLog.h
@@ -23,7 +23,8 @@
     __android_log_print(level, comp, "%s():%d: "format, \
     __FUNCTION__, __LINE__, ##__VA_ARGS__)
 
-#if 1
+//#define VIDEO_ENC_LOG_ENABLE
+#if  1
 #ifdef VIDEO_ENC_LOG_ENABLE
 #define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__)
 #define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__)
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
index 6e0263b..8afb215 100644
--- a/videoencoder/VideoEncoderMP4.cpp
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -85,30 +85,14 @@
     return ret;
 }
 
-
-Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) {
+Encode_Status VideoEncoderMP4::getExtFormatOutput(VideoEncOutputBuffer *outBuffer) {
 
     Encode_Status ret = ENCODE_SUCCESS;
-    VAStatus vaStatus = VA_STATUS_SUCCESS;
-    bool useLocalBuffer = false;
 
     LOG_V("Begin\n");
     CHECK_NULL_RETURN_IFFAIL(outBuffer);
 
-     setKeyFrame(mComParams.intraPeriod);
-
-    // prepare for output, map the coded buffer
-    ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer);
-    CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
-
     switch (outBuffer->format) {
-        case OUTPUT_EVERYTHING:
-        case OUTPUT_FRAME_DATA: {
-            // Output whatever we have
-            ret = VideoEncoderBase::outputAllData(outBuffer);
-            CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
-            break;
-        }
         case OUTPUT_CODEC_DATA: {
             // Output the codec config data
             ret = outputConfigData(outBuffer);
@@ -123,32 +107,14 @@
 
     LOG_I("out size is = %d\n", outBuffer->dataSize);
 
-    // cleanup, unmap the coded buffer if all
-    // data has been copied out
-    ret = VideoEncoderBase::cleanupForOutput();
 
 CLEAN_UP:
 
-    if (ret < ENCODE_SUCCESS) {
-        if (outBuffer->data && (useLocalBuffer == true)) {
-            delete[] outBuffer->data;
-            outBuffer->data = NULL;
-            useLocalBuffer = false;
-        }
-
-        // error happens, unmap the buffer
-        if (mCodedBufferMapped) {
-            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
-            mCodedBufferMapped = false;
-            mCurSegment = NULL;
-        }
-    }
     LOG_V("End\n");
     return ret;
 }
 
-
-Encode_Status VideoEncoderMP4::renderSequenceParams() {
+Encode_Status VideoEncoderMP4::renderSequenceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncSequenceParameterBufferMPEG4 mp4SequenceParams = {};
@@ -202,26 +168,26 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderMP4::renderPictureParams() {
+Encode_Status VideoEncoderMP4::renderPictureParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     VAEncPictureParameterBufferMPEG4 mpeg4_pic_param = {};
     LOG_V( "Begin\n\n");
 
     // set picture params for HW
-    mpeg4_pic_param.reference_picture = mRefSurface;
-    mpeg4_pic_param.reconstructed_picture = mRecSurface;
-    mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex];
+    mpeg4_pic_param.reference_picture = task->ref_surface[0];
+    mpeg4_pic_param.reconstructed_picture = task->rec_surface;
+    mpeg4_pic_param.coded_buf = task->coded_buffer;
     mpeg4_pic_param.picture_width = mComParams.resolution.width;
     mpeg4_pic_param.picture_height = mComParams.resolution.height;
     mpeg4_pic_param.vop_time_increment= mFrameNum;
-    mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+    mpeg4_pic_param.picture_type = (task->type == FTYPE_I) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
 
     LOG_V("======mpeg4 picture params======\n");
     LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture);
     LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture);
     LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf);
-    LOG_I("coded_buf_index = %d\n", mCodedBufIndex);
+//    LOG_I("coded_buf_index = %d\n", mCodedBufIndex);
     LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width);
     LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height);
     LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment);
@@ -242,7 +208,7 @@
 }
 
 
-Encode_Status VideoEncoderMP4::renderSliceParams() {
+Encode_Status VideoEncoderMP4::renderSliceParams(EncodeTask *task) {
 
     VAStatus vaStatus = VA_STATUS_SUCCESS;
     uint32_t sliceHeight;
@@ -259,7 +225,7 @@
 
     sliceParams.start_row_number = 0;
     sliceParams.slice_height = sliceHeightInMB;
-    sliceParams.slice_flags.bits.is_intra = mIsIntra;
+    sliceParams.slice_flags.bits.is_intra = (task->type == FTYPE_I)?1:0;
     sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0;
 
     LOG_V("======mpeg4 slice params======\n");
@@ -282,19 +248,19 @@
     return ENCODE_SUCCESS;
 }
 
-Encode_Status VideoEncoderMP4::sendEncodeCommand(void) {
+Encode_Status VideoEncoderMP4::sendEncodeCommand(EncodeTask *task) {
     Encode_Status ret = ENCODE_SUCCESS;
     LOG_V( "Begin\n");
 
     if (mFrameNum == 0) {
-        ret = renderSequenceParams();
+        ret = renderSequenceParams(task);
         CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
     }
 
-    ret = renderPictureParams();
+    ret = renderPictureParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
 
-    ret = renderSliceParams();
+    ret = renderSliceParams(task);
     CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
 
     LOG_V( "End\n");
diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h
index b453023..7e579c0 100644
--- a/videoencoder/VideoEncoderMP4.h
+++ b/videoencoder/VideoEncoderMP4.h
@@ -19,11 +19,10 @@
     VideoEncoderMP4();
     virtual ~VideoEncoderMP4() {};
 
-    Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+//    Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
 
 protected:
-    virtual Encode_Status sendEncodeCommand(void);
-
+    virtual Encode_Status sendEncodeCommand(EncodeTask *task);
     virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) {
         return ENCODE_SUCCESS;
     }
@@ -36,13 +35,16 @@
     virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
         return ENCODE_SUCCESS;
     }
+    virtual Encode_Status getExtFormatOutput(VideoEncOutputBuffer *outBuffer);
+    //virtual Encode_Status updateFrameInfo(EncodeTask* task);
+
     // Local Methods
 private:
     Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize);
     Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer);
-    Encode_Status renderSequenceParams();
-    Encode_Status renderPictureParams();
-    Encode_Status renderSliceParams();
+    Encode_Status renderSequenceParams(EncodeTask *task);
+    Encode_Status renderPictureParams(EncodeTask *task);
+    Encode_Status renderSliceParams(EncodeTask *task);
 
     unsigned char mProfileLevelIndication;
     uint32_t mFixedVOPTimeIncrement;