[PORT FROM R1][libmix] new encoder library

BZ: 5809

New encoder common library

Change-Id: I33a1896ac3efaf8697d9ee9e32b02932d9c2c896
Orig-Change-Id: I1c402c8d4b468011d892538f229d28f6f5abdcb0
Signed-off-by: Weian Chen <weian.chen@intel.com>
Reviewed-on: http://android.intel.com:8080/18318
Tested-by: Sang, Shuduo <shuduo.sang@intel.com>
Reviewed-by: Monnier, OlivierX <olivierx.monnier@intel.com>
Reviewed-by: buildbot <buildbot@intel.com>
Tested-by: buildbot <buildbot@intel.com>
diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk
new file mode 100644
index 0000000..9ee2e42
--- /dev/null
+++ b/videoencoder/Android.mk
@@ -0,0 +1,43 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+VIDEO_ENC_LOG_ENABLE := false
+
+LOCAL_SRC_FILES :=              \
+    VideoEncoderBase.cpp        \
+    VideoEncoderAVC.cpp         \
+    VideoEncoderH263.cpp        \
+    VideoEncoderMP4.cpp         \
+    VideoEncoderHost.cpp
+
+# LOCAL_CFLAGS :=
+
+LOCAL_C_INCLUDES :=             \
+    $(LOCAL_PATH)               \
+    $(TARGET_OUT_HEADERS)/libva \
+
+#LOCAL_LDLIBS += -lpthread
+
+LOCAL_SHARED_LIBRARIES :=       \
+        libcutils               \
+        libva                   \
+        libva-android           \
+        libva-tpi
+
+#LOCAL_CFLAGS += -DANDROID
+
+LOCAL_COPY_HEADERS_TO  := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+    VideoEncoderHost.h \
+    VideoEncoderInterface.h \
+    VideoEncoderDef.h
+
+ifeq ($(VIDEO_ENC_LOG_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE
+endif
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libva_videoencoder
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
new file mode 100644
index 0000000..93951c6
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -0,0 +1,890 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderAVC.h"
+#include <va/va_tpi.h>
+
+VideoEncoderAVC::VideoEncoderAVC()
+    :VideoEncoderBase() {
+    mVideoParamsAVC.basicUnitSize = 0;
+    mVideoParamsAVC.VUIFlag = 0;
+    mVideoParamsAVC.sliceNum.iSliceNum = 2;
+    mVideoParamsAVC.sliceNum.pSliceNum = 2;
+    mVideoParamsAVC.idrInterval = 2;
+    mVideoParamsAVC.maxSliceSize = 0;
+    mVideoParamsAVC.delimiterType = AVC_DELIMITER_ANNEXB;
+    mSliceNum = 2;
+}
+
+Encode_Status VideoEncoderAVC::start() {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    LOG_V( "Begin\n");
+
+    if (mComParams.rcMode == VA_RC_VCM) {
+        // If we are in VCM, we will set slice num to max value
+        mVideoParamsAVC.sliceNum.iSliceNum = (mComParams.resolution.height + 15) / 16;
+        mVideoParamsAVC.sliceNum.pSliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+    }
+
+    ret = VideoEncoderBase::start ();
+    CHECK_ENCODE_STATUS_RETURN("VideoEncoderBase::start");
+
+    LOG_V( "end\n");
+    return ret;
+}
+
+Encode_Status VideoEncoderAVC::derivedSetParams(VideoParamConfigSet *videoEncParams) {
+
+    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+    VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+    // AVC parames
+    if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+        return ENCODE_INVALID_PARAMS;
+    }
+
+    mVideoParamsAVC = *encParamsAVC;
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetParams(VideoParamConfigSet *videoEncParams) {
+
+    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+    VideoParamsAVC *encParamsAVC = reinterpret_cast <VideoParamsAVC *> (videoEncParams);
+
+    // AVC parames
+    if (encParamsAVC->size != sizeof (VideoParamsAVC)) {
+        return ENCODE_INVALID_PARAMS;
+    }
+
+    *encParamsAVC = mVideoParamsAVC;
+    return ENCODE_SUCCESS;
+
+}
+
+Encode_Status VideoEncoderAVC::derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+
+    CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+    LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+    switch (videoEncConfig->type) {
+        case VideoConfigTypeAVCIntraPeriod: {
+
+            VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+                    reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+            // Config Intra Peroid
+            if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            mVideoParamsAVC.idrInterval = configAVCIntraPeriod->idrInterval;
+            mComParams.intraPeriod = configAVCIntraPeriod->intraPeriod;
+            mNewHeader = true;
+            break;
+        }
+        case VideoConfigTypeNALSize: {
+            // Config MTU
+            VideoConfigNALSize *configNALSize =
+                    reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+            if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            mVideoParamsAVC.maxSliceSize = configNALSize->maxSliceSize;
+            mRenderMaxSliceSize = true;
+            break;
+        }
+        case VideoConfigTypeIDRRequest: {
+
+            mNewHeader = true;
+            break;
+        }
+        case VideoConfigTypeSliceNum: {
+
+            VideoConfigSliceNum *configSliceNum =
+                    reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+            // Config Slice size
+            if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            mVideoParamsAVC.sliceNum = configSliceNum->sliceNum;
+            break;
+        }
+        default: {
+            LOG_E ("Invalid Config Type");
+            break;
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC:: derivedGetConfig(
+        VideoParamConfigSet *videoEncConfig) {
+
+    CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+    LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+    switch (videoEncConfig->type) {
+
+        case VideoConfigTypeAVCIntraPeriod: {
+
+            VideoConfigAVCIntraPeriod *configAVCIntraPeriod =
+                    reinterpret_cast <VideoConfigAVCIntraPeriod *> (videoEncConfig);
+            if (configAVCIntraPeriod->size != sizeof (VideoConfigAVCIntraPeriod)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configAVCIntraPeriod->idrInterval = mVideoParamsAVC.idrInterval;
+            configAVCIntraPeriod->intraPeriod = mComParams.intraPeriod;
+
+            break;
+        }
+        case VideoConfigTypeNALSize: {
+
+            VideoConfigNALSize *configNALSize =
+                    reinterpret_cast <VideoConfigNALSize *> (videoEncConfig);
+            if (configNALSize->size != sizeof (VideoConfigNALSize)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configNALSize->maxSliceSize = mVideoParamsAVC.maxSliceSize;
+            break;
+        }
+        case VideoConfigTypeIDRRequest: {
+            break;
+
+        }
+        case VideoConfigTypeSliceNum: {
+
+            VideoConfigSliceNum *configSliceNum =
+                    reinterpret_cast <VideoConfigSliceNum *> (videoEncConfig);
+            if (configSliceNum->size != sizeof (VideoConfigSliceNum)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configSliceNum->sliceNum = mVideoParamsAVC.sliceNum;
+            break;
+        }
+        default: {
+            LOG_E ("Invalid Config Type");
+            break;
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getOutput(VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    bool useLocalBuffer = false;
+    uint32_t nalType = 0;
+    uint32_t nalSize = 0;
+    uint32_t nalOffset = 0;
+    uint32_t idrPeroid = mComParams.intraPeriod * mVideoParamsAVC.idrInterval;
+
+    LOG_V("Begin\n");
+    CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+    if (mFrameNum > 2) {
+        if (idrPeroid != 0 && (((mFrameNum - 2) % idrPeroid) == 0)) {
+            mKeyFrame = true;
+        } else {
+            mKeyFrame = false;
+        }
+    }
+
+    // prepare for output, map the coded buffer
+    ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer);
+    CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
+
+    switch (outBuffer->format) {
+        case OUTPUT_EVERYTHING:
+        case OUTPUT_FRAME_DATA: {
+            // Output whatever we have
+            ret = VideoEncoderBase::outputAllData(outBuffer);
+            CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+            break;
+        }
+        case OUTPUT_CODEC_DATA: {
+            // Output the codec data
+            ret = outputCodecData(outBuffer);
+            CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+            break;
+        }
+
+        case OUTPUT_ONE_NAL: {
+            // Output only one NAL unit
+            ret = outputOneNALU(outBuffer, true);
+            CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+            break;
+        }
+
+        case OUTPUT_ONE_NAL_WITHOUT_STARTCODE: {
+            ret = outputOneNALU(outBuffer, false);
+            CHECK_ENCODE_STATUS_CLEANUP("outputOneNALU");
+            break;
+        }
+
+        case OUTPUT_LENGTH_PREFIXED: {
+            // Output length prefixed
+            ret = outputLengthPrefixed(outBuffer);
+            CHECK_ENCODE_STATUS_CLEANUP("outputLengthPrefixed");
+            break;
+        }
+
+        default:
+            LOG_E("Invalid buffer mode\n");
+            ret = ENCODE_FAIL;
+            break;
+    }
+
+    LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+    // cleanup, unmap the coded buffer if all
+    // data has been copied out
+    ret = VideoEncoderBase::cleanupForOutput();
+
+CLEAN_UP:
+
+    if (ret < ENCODE_SUCCESS) {
+        if (outBuffer->data && (useLocalBuffer == true)) {
+            delete[] outBuffer->data;
+            outBuffer->data = NULL;
+            useLocalBuffer = false;
+        }
+
+        // error happens, unmap the buffer
+        if (mCodedBufferMapped) {
+            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+            mCodedBufferMapped = false;
+            mCurSegment = NULL;
+        }
+    }
+    LOG_V("End\n");
+    return ret;
+}
+
+Encode_Status VideoEncoderAVC::getOneNALUnit(
+        uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize,
+        uint32_t *nalType, uint32_t *nalOffset) {
+    uint32_t pos = 0;
+    uint32_t zeroByteCount = 0;
+    uint32_t prefixLength = 0;
+    uint32_t leadingZeroCnt = 0;
+
+    // Don't need to check parameters here as we just checked by caller
+    while ((inBuffer[pos++] == 0x00)) {
+        zeroByteCount ++;
+        if (pos >= bufSize)  //to make sure the buffer to be accessed is valid
+            break;
+    }
+
+    if (inBuffer[pos - 1] != 0x01 || zeroByteCount < 2) {
+        LOG_E("The stream is not AnnexB format \n");
+        return ENCODE_FAIL; //not AnnexB, we won't process it
+    }
+
+    *nalType = (*(inBuffer + pos)) & 0x1F;
+    LOG_I ("NAL type = 0x%x\n", *nalType);
+
+    zeroByteCount = 0;
+    *nalOffset = pos;
+
+    while (pos < bufSize) {
+
+        while (inBuffer[pos++] == 0) {
+            zeroByteCount ++;
+            if (pos >= bufSize) //to make sure the buffer to be accessed is valid
+                break;
+        }
+
+        if (inBuffer[pos - 1] == 0x01 && zeroByteCount >= 2) {
+            if (zeroByteCount == 2) {
+                prefixLength = 3;
+            } else {
+                prefixLength = 4;
+                leadingZeroCnt = zeroByteCount - 3;
+            }
+
+            LOG_V("leading_zero_count = %d\n", leadingZeroCnt);
+            *nalSize = pos - *nalOffset - prefixLength - leadingZeroCnt;
+            break;
+        } else if (pos == bufSize) {
+            LOG_V ("The last NALU\n");
+            *nalSize = pos - *nalOffset;
+        } else {
+            zeroByteCount = 0;
+            leadingZeroCnt = 0;
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::getHeader(
+        uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) {
+
+    uint32_t nalType = 0;
+    uint32_t nalSize = 0;
+    uint32_t nalOffset = 0;
+    uint32_t size = 0;
+    uint8_t *buf = inBuffer;
+    Encode_Status ret = ENCODE_SUCCESS;
+
+    *headerSize = 0;
+    CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+    if (bufSize == 0) {
+        //bufSize shoule not be 0, error happens
+        LOG_E("Buffer size is 0\n");
+        return ENCODE_FAIL;
+    }
+
+    while (1) {
+        nalType = nalSize = nalOffset = 0;
+        ret = getOneNALUnit(buf, bufSize, &nalSize, &nalType, &nalOffset);
+        CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+        LOG_I("NAL type = %d, NAL size = %d, offset = %d\n", nalType, nalSize, nalOffset);
+        size = nalSize + nalOffset;
+
+        // Codec_data should be SPS or PPS
+        if (nalType == 7 || nalType == 8) {
+            *headerSize += size;
+            buf += size;
+            bufSize -= size;
+        } else {
+            LOG_V("No header found or no header anymore\n");
+            break;
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputCodecData(
+        VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    uint32_t headerSize = 0;
+
+    ret = getHeader((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+            mCurSegment->size - mOffsetInSeg, &headerSize);
+    CHECK_ENCODE_STATUS_RETURN("getHeader");
+    if (headerSize == 0) {
+        outBuffer->dataSize = 0;
+        mCurSegment = NULL;
+        return ENCODE_NO_REQUEST_DATA;
+    }
+
+    if (headerSize <= outBuffer->bufferSize) {
+        memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+        mTotalSizeCopied += headerSize;
+        mOffsetInSeg += headerSize;
+        outBuffer->dataSize = headerSize;
+        outBuffer->remainingSize = 0;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+    } else {
+        // we need a big enough buffer, otherwise we won't output anything
+        outBuffer->dataSize = 0;
+        outBuffer->remainingSize = headerSize;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+        LOG_E("Buffer size too small\n");
+        return ENCODE_BUFFER_TOO_SMALL;
+    }
+
+    return ret;
+}
+
+Encode_Status VideoEncoderAVC::outputOneNALU(
+        VideoEncOutputBuffer *outBuffer, bool startCode) {
+
+    uint32_t nalType = 0;
+    uint32_t nalSize = 0;
+    uint32_t nalOffset = 0;
+    uint32_t sizeToBeCopied = 0;
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+    ret = getOneNALUnit((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+            mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset);
+    CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+    // check if we need startcode along with the payload
+    if (startCode) {
+        sizeToBeCopied = nalSize + nalOffset;
+    } else {
+        sizeToBeCopied = nalSize;
+    }
+
+    if (sizeToBeCopied <= outBuffer->bufferSize) {
+        if (startCode) {
+            memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+        } else {
+            memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset,
+                   sizeToBeCopied);
+        }
+        mTotalSizeCopied += sizeToBeCopied;
+        mOffsetInSeg += (nalSize + nalOffset);
+        outBuffer->dataSize = sizeToBeCopied;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+        if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+        outBuffer->remainingSize = 0;
+    } else {
+        // if nothing to be copied out, set flag to invalid
+        outBuffer->dataSize = 0;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+        outBuffer->remainingSize = sizeToBeCopied;
+        LOG_W("Buffer size too small\n");
+        return ENCODE_BUFFER_TOO_SMALL;
+    }
+
+    // check if all data in current segment has been copied out
+    if (mCurSegment->size == mOffsetInSeg) {
+        if (mCurSegment->next != NULL) {
+            mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+            mOffsetInSeg = 0;
+        } else {
+            LOG_V("End of stream\n");
+            outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+            mCurSegment = NULL;
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::outputLengthPrefixed(VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    uint32_t nalType = 0;
+    uint32_t nalSize = 0;
+    uint32_t nalOffset = 0;
+    uint32_t sizeCopiedHere = 0;
+    uint32_t sizeToBeCopied = 0;
+
+    CHECK_NULL_RETURN_IFFAIL(mCurSegment->buf);
+
+    while (1) {
+
+        if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+            LOG_E("mCurSegment->size < mOffsetInSeg  || outBuffer->bufferSize < sizeCopiedHere\n");
+            return ENCODE_FAIL;
+        }
+
+        // we need to handle the whole bitstream NAL by NAL
+        ret = getOneNALUnit(
+                (uint8_t *)mCurSegment->buf + mOffsetInSeg,
+                mCurSegment->size - mOffsetInSeg, &nalSize, &nalType, &nalOffset);
+        CHECK_ENCODE_STATUS_RETURN("getOneNALUnit");
+
+        if (nalSize + 4 <= outBuffer->bufferSize - sizeCopiedHere) {
+            // write the NAL length to bit stream
+            outBuffer->data[sizeCopiedHere] = (nalSize >> 24) & 0xff;
+            outBuffer->data[sizeCopiedHere + 1] = (nalSize >> 16) & 0xff;
+            outBuffer->data[sizeCopiedHere + 2] = (nalSize >> 8)  & 0xff;
+            outBuffer->data[sizeCopiedHere + 3] = nalSize   & 0xff;
+
+            sizeCopiedHere += 4;
+            mTotalSizeCopied += 4;
+
+            memcpy(outBuffer->data + sizeCopiedHere,
+                   (uint8_t *)mCurSegment->buf + mOffsetInSeg + nalOffset, sizeToBeCopied);
+
+            sizeCopiedHere += nalSize;
+            mTotalSizeCopied += nalSize;
+            mOffsetInSeg += (nalSize + nalOffset);
+
+        } else {
+            outBuffer->dataSize = sizeCopiedHere;
+            // In case the start code is 3-byte length but we use 4-byte for length prefixed
+            // so the remainingSize size may larger than the remaining data size
+            outBuffer->remainingSize = mTotalSize - mTotalSizeCopied + 100;
+            outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+            LOG_E("Buffer size too small\n");
+            return ENCODE_BUFFER_TOO_SMALL;
+        }
+
+        // check if all data in current segment has been copied out
+        if (mCurSegment->size == mOffsetInSeg) {
+            if (mCurSegment->next != NULL) {
+                mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+                mOffsetInSeg = 0;
+            } else {
+                LOG_V("End of stream\n");
+                outBuffer->dataSize = sizeCopiedHere;
+                outBuffer->remainingSize = 0;
+                outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+                if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+                mCurSegment = NULL;
+                break;
+            }
+        }
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::sendEncodeCommand(void) {
+    Encode_Status ret = ENCODE_SUCCESS;
+
+    LOG_V( "Begin\n");
+
+    if (mFrameNum == 0 || mNewHeader) {
+        ret = renderSequenceParams();
+        CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+        mNewHeader = false; //Set to require new header filed to false
+    }
+
+    if (mRenderMaxSliceSize && mVideoParamsAVC.maxSliceSize != 0) {
+        ret = renderMaxSliceSize();
+        CHECK_ENCODE_STATUS_RETURN("renderMaxSliceSize");
+        mRenderMaxSliceSize = false;
+    }
+
+    if (mRenderBitRate) {
+        ret = VideoEncoderBase::renderDynamicBitrate();
+        CHECK_ENCODE_STATUS_RETURN("renderDynamicBitrate");
+
+        mRenderBitRate = false;
+    }
+
+    if (mRenderAIR &&
+        (mComParams.refreshType == VIDEO_ENC_AIR ||
+        mComParams.refreshType == VIDEO_ENC_BOTH)) {
+
+        ret = renderAIR();
+        CHECK_ENCODE_STATUS_RETURN("renderAIR");
+
+        mRenderAIR = false;
+    }
+
+    if (mRenderFrameRate) {
+
+        ret = VideoEncoderBase::renderDynamicFrameRate();
+        CHECK_ENCODE_STATUS_RETURN("renderDynamicFrameRate");
+
+        mRenderFrameRate = false;
+    }
+
+    ret = renderPictureParams();
+    CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+    ret = renderSliceParams();
+    CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+    LOG_V( "End\n");
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderAVC::renderMaxSliceSize() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    Encode_Status ret = ENCODE_SUCCESS;
+    LOG_V( "Begin\n\n");
+
+    if (mComParams.rcMode != RATE_CONTROL_VCM) {
+        LOG_W ("Not in VCM mode, but call send_max_slice_size\n");
+        return ENCODE_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer *miscEncParamBuf;
+    VAEncMiscParameterMaxSliceSize *maxSliceSizeParam;
+    VABufferID miscParamBufferID;
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncMiscParameterBufferType,
+            sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize),
+            1, NULL, &miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    miscEncParamBuf->type = VAEncMiscParameterTypeMaxSliceSize;
+    maxSliceSizeParam = (VAEncMiscParameterMaxSliceSize *)miscEncParamBuf->data;
+
+    maxSliceSizeParam->max_slice_size = mVideoParamsAVC.maxSliceSize;
+
+    vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    LOG_I( "max slice size = %d\n", maxSliceSizeParam->max_slice_size);
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderAVC::renderAIR() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    LOG_V( "Begin\n\n");
+
+    if (mComParams.rcMode != RATE_CONTROL_VCM) {
+
+        LOG_W("Not in VCM mode, but call send_AIR\n");
+        return ENCODE_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer   *miscEncParamBuf;
+    VAEncMiscParameterAIR *airParams;
+    VABufferID miscParamBufferID;
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncMiscParameterBufferType,
+            sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterAIR),
+            1, NULL, &miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    miscEncParamBuf->type = VAEncMiscParameterTypeAIR;
+    airParams = (VAEncMiscParameterAIR *)miscEncParamBuf->data;
+
+    airParams->air_num_mbs = mComParams.airParams.airMBs;
+    airParams->air_threshold= mComParams.airParams.airThreshold;
+    airParams->air_auto = mComParams.airParams.airAuto;
+
+    vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_I( "airThreshold = %d\n", airParams->air_threshold);
+    return ENCODE_SUCCESS;
+}
+
+int VideoEncoderAVC::calcLevel(int numMbs) {
+    int level = 30;
+
+    if (numMbs < 3600) {
+        level = 30;
+    } else if (numMbs < 5120) {
+        level = 31;
+    } else if (numMbs < 8192) {
+        level = 32;
+    } else if (numMbs < 8704) {
+        level = 40;
+    } else if (numMbs < 22080) {
+        level = 42;
+    } else if (numMbs < 36864) {
+        level = 50;
+    } else {
+        level = 51;
+    }
+    return level;
+}
+
+Encode_Status VideoEncoderAVC::renderSequenceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncSequenceParameterBufferH264 avcSeqParams;
+    int level;
+    uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+    uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+    LOG_V( "Begin\n\n");
+
+    // set up the sequence params for HW
+    // avcSeqParams.level_idc = mLevel;
+    avcSeqParams.intra_period = mComParams.intraPeriod;
+    avcSeqParams.intra_idr_period = mVideoParamsAVC.idrInterval;
+    avcSeqParams.picture_width_in_mbs = (mComParams.resolution.width + 15) / 16;
+    avcSeqParams.picture_height_in_mbs = (mComParams.resolution.height + 15) / 16;
+
+    level = calcLevel (avcSeqParams.picture_width_in_mbs * avcSeqParams.picture_height_in_mbs);
+    avcSeqParams.level_idc = level;
+    avcSeqParams.bits_per_second = mComParams.rcParams.bitRate;
+    avcSeqParams.frame_rate =
+            (unsigned int) (frameRateNum + frameRateDenom /2 ) / frameRateDenom;
+    avcSeqParams.initial_qp = mComParams.rcParams.initQP;
+    avcSeqParams.min_qp = mComParams.rcParams.minQP;
+    avcSeqParams.basic_unit_size = mVideoParamsAVC.basicUnitSize; //for rate control usage
+    avcSeqParams.intra_period = mComParams.intraPeriod;
+    //avcSeqParams.vui_flag = 248;
+    avcSeqParams.seq_parameter_set_id = 8;
+
+    // This is a temporary fix suggested by Binglin for bad encoding quality issue
+    avcSeqParams.max_num_ref_frames = 1; // TODO: We need a long term design for this field
+
+    LOG_V("===h264 sequence params===\n");
+    LOG_I( "seq_parameter_set_id = %d\n", (uint32_t)avcSeqParams.seq_parameter_set_id);
+    LOG_I( "level_idc = %d\n", (uint32_t)avcSeqParams.level_idc);
+    LOG_I( "intra_period = %d\n", avcSeqParams.intra_period);
+    LOG_I( "idr_interval = %d\n", avcSeqParams.intra_idr_period);
+    LOG_I( "picture_width_in_mbs = %d\n", avcSeqParams.picture_width_in_mbs);
+    LOG_I( "picture_height_in_mbs = %d\n", avcSeqParams.picture_height_in_mbs);
+    LOG_I( "bitrate = %d\n", avcSeqParams.bits_per_second);
+    LOG_I( "frame_rate = %d\n", avcSeqParams.frame_rate);
+    LOG_I( "initial_qp = %d\n", avcSeqParams.initial_qp);
+    LOG_I( "min_qp = %d\n", avcSeqParams.min_qp);
+    LOG_I( "basic_unit_size = %d\n", avcSeqParams.basic_unit_size);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSequenceParameterBufferType,
+            sizeof(avcSeqParams), 1, &avcSeqParams,
+            &mSeqParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderAVC::renderPictureParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncPictureParameterBufferH264 avcPicParams;
+
+    LOG_V( "Begin\n\n");
+    // set picture params for HW
+    avcPicParams.reference_picture = mRefFrame->surface;
+    avcPicParams.reconstructed_picture = mRecFrame->surface;
+    avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
+    avcPicParams.picture_width = mComParams.resolution.width;
+    avcPicParams.picture_height = mComParams.resolution.height;
+    avcPicParams.last_picture = 0;
+
+    LOG_V("======h264 picture params======\n");
+    LOG_I( "reference_picture = 0x%08x\n", avcPicParams.reference_picture);
+    LOG_I( "reconstructed_picture = 0x%08x\n", avcPicParams.reconstructed_picture);
+    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+    LOG_I( "coded_buf = 0x%08x\n", avcPicParams.coded_buf);
+    LOG_I( "picture_width = %d\n", avcPicParams.picture_width);
+    LOG_I( "picture_height = %d\n\n", avcPicParams.picture_height);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncPictureParameterBufferType,
+            sizeof(avcPicParams),
+            1,&avcPicParams,
+            &mPicParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderAVC::renderSliceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    uint32_t sliceNum = 0;
+    uint32_t sliceHeight = 0;
+    uint32_t sliceIndex = 0;
+    uint32_t sliceHeightInMB = 0;
+    uint32_t maxSliceNum = 0;
+    uint32_t minSliceNum = 0;
+    int actualSliceHeightInMB = 0;
+    int startRowInMB = 0;
+    uint32_t modulus = 0;
+
+    LOG_V( "Begin\n\n");
+
+    maxSliceNum = (mComParams.resolution.height + 15) / 16;
+    minSliceNum = 1;
+
+    if (mIsIntra) {
+        sliceNum = mVideoParamsAVC.sliceNum.iSliceNum;
+    } else {
+        sliceNum = mVideoParamsAVC.sliceNum.pSliceNum;
+    }
+
+    if (sliceNum < minSliceNum) {
+        LOG_W("Slice Number is too small");
+        sliceNum = minSliceNum;
+    }
+
+    if (sliceNum > maxSliceNum) {
+        LOG_W("Slice Number is too big");
+        sliceNum = maxSliceNum;
+    }
+
+    mSliceNum= sliceNum;
+    modulus = maxSliceNum % sliceNum;
+    sliceHeightInMB = (maxSliceNum - modulus) / sliceNum ;
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSliceParameterBufferType,
+            sizeof(VAEncSliceParameterBuffer),
+            sliceNum, NULL,
+            &mSliceParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    VAEncSliceParameterBuffer *sliceParams, *currentSlice;
+    vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    currentSlice = sliceParams;
+    startRowInMB = 0;
+    for (sliceIndex = 0; sliceIndex < sliceNum; sliceIndex++) {
+        currentSlice = sliceParams + sliceIndex;
+        actualSliceHeightInMB = sliceHeightInMB;
+        if (sliceIndex < modulus) {
+            actualSliceHeightInMB ++;
+        }
+
+        // starting MB row number for this slice
+        currentSlice->start_row_number = startRowInMB;
+        // slice height measured in MB
+        currentSlice->slice_height = actualSliceHeightInMB;
+        currentSlice->slice_flags.bits.is_intra = mIsIntra;
+        currentSlice->slice_flags.bits.disable_deblocking_filter_idc
+        = mComParams.disableDeblocking;
+
+        // This is a temporary fix suggested by Binglin for bad encoding quality issue
+        // TODO: We need a long term design for this field
+        currentSlice->slice_flags.bits.uses_long_term_ref = 0;
+        currentSlice->slice_flags.bits.is_long_term_ref = 0;
+
+        LOG_V("======AVC slice params======\n");
+        LOG_I( "slice_index = %d\n", (int) sliceIndex);
+        LOG_I( "start_row_number = %d\n", (int) currentSlice->start_row_number);
+        LOG_I( "slice_height_in_mb = %d\n", (int) currentSlice->slice_height);
+        LOG_I( "slice.is_intra = %d\n", (int) currentSlice->slice_flags.bits.is_intra);
+        LOG_I("disable_deblocking_filter_idc = %d\n\n", (int) currentSlice->slice_flags.bits.disable_deblocking_filter_idc);
+
+        startRowInMB += actualSliceHeightInMB;
+    }
+
+    vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderAVC.h b/videoencoder/VideoEncoderAVC.h
new file mode 100644
index 0000000..c86b0b4
--- /dev/null
+++ b/videoencoder/VideoEncoderAVC.h
@@ -0,0 +1,55 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER_AVC_H__
+#define __VIDEO_ENCODER_AVC_H__
+
+#include "VideoEncoderBase.h"
+
+class VideoEncoderAVC : public VideoEncoderBase {
+
+public:
+    VideoEncoderAVC();
+    ~VideoEncoderAVC() {};
+
+    virtual Encode_Status start();
+    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+
+    virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams);
+    virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams);
+    virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig);
+    virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig);
+
+protected:
+
+    virtual Encode_Status sendEncodeCommand(void);
+
+private:
+    // Local Methods
+
+    Encode_Status getOneNALUnit(uint8_t *inBuffer, uint32_t bufSize, uint32_t *nalSize, uint32_t *nalType, uint32_t *nalOffset);
+    Encode_Status getHeader(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize);
+    Encode_Status outputCodecData(VideoEncOutputBuffer *outBuffer);
+    Encode_Status outputOneNALU(VideoEncOutputBuffer *outBuffer, bool startCode);
+    Encode_Status outputLengthPrefixed(VideoEncOutputBuffer *outBuffer);
+
+    Encode_Status renderMaxSliceSize();
+    Encode_Status renderAIR();
+    Encode_Status renderSequenceParams();
+    Encode_Status renderPictureParams();
+    Encode_Status renderSliceParams();
+    int calcLevel(int numMbs);
+
+public:
+
+    VideoParamsAVC mVideoParamsAVC;
+    uint32_t mSliceNum;
+
+};
+
+#endif /* __VIDEO_ENCODER_AVC_H__ */
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
new file mode 100644
index 0000000..4c6fe62
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -0,0 +1,1694 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+#include <string.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderBase.h"
+#include <va/va_tpi.h>
+#include <va/va_android.h>
+
+VideoEncoderBase::VideoEncoderBase()
+    :mInitialized(false)
+    ,mVADisplay(NULL)
+    ,mVAContext(0)
+    ,mVAConfig(0)
+    ,mVAEntrypoint(VAEntrypointEncSlice)
+    ,mCurSegment(NULL)
+    ,mOffsetInSeg(0)
+    ,mTotalSize(0)
+    ,mTotalSizeCopied(0)
+    ,mBufferMode(BUFFER_SHARING_NONE)
+    ,mUpstreamBufferList(NULL)
+    ,mUpstreamBufferCnt(0)
+    ,mForceKeyFrame(false)
+    ,mNewHeader(false)
+    ,mFirstFrame (true)
+    ,mRenderMaxSliceSize(false)
+    ,mRenderQP (false)
+    ,mRenderAIR(false)
+    ,mRenderFrameRate(false)
+    ,mRenderBitRate(false)
+    ,mLastCodedBuffer(0)
+    ,mOutCodedBuffer(0)
+    ,mSeqParamBuf(0)
+    ,mPicParamBuf(0)
+    ,mSliceParamBuf(0)
+    ,mSharedSurfaces(NULL)
+    ,mSurfaces(NULL)
+    ,mSurfaceCnt(0)
+    ,mSharedSurfacesCnt(0)
+    ,mReqSurfacesCnt(0)
+    ,mUsrPtr(NULL)
+    ,mVideoSrcBufferList(NULL)
+    ,mCurFrame(NULL)
+    ,mRefFrame(NULL)
+    ,mRecFrame(NULL)
+    ,mLastFrame(NULL)
+    ,mLastInputRawBuffer(NULL)
+    ,mEncodedFrames(0)
+    ,mFrameNum(0)
+    ,mCodedBufSize(0)
+    ,mCodedBufIndex(0)
+    ,mPicSkipped(false)
+    ,mIsIntra(true)
+    ,mSliceSizeOverflow(false)
+    ,mCodedBufferMapped(false)
+    ,mDataCopiedOut(false)
+    ,mKeyFrame(true) {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    // here the display can be any value, use following one
+    // just for consistence purpose, so don't define it
+    unsigned int display = 0x18C34078;
+    int majorVersion = -1;
+    int minorVersion = -1;
+
+    setDefaultParams();
+    mVACodedBuffer [0] = 0;
+    mVACodedBuffer [1] = 0;
+
+    LOG_V("vaGetDisplay \n");
+    mVADisplay = vaGetDisplay(&display);
+    if (mVADisplay == NULL) {
+        LOG_E("vaGetDisplay failed.");
+    }
+
+    vaStatus = vaInitialize(mVADisplay, &majorVersion, &minorVersion);
+    LOG_V("vaInitialize \n");
+    if (vaStatus != VA_STATUS_SUCCESS) {
+        LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
+    }
+}
+
+VideoEncoderBase::~VideoEncoderBase() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    vaStatus = vaTerminate(mVADisplay);
+    LOG_V( "vaTerminate\n");
+    if (vaStatus != VA_STATUS_SUCCESS) {
+        LOG_W( "Failed vaTerminate, vaStatus = %d\n", vaStatus);
+    } else {
+        mVADisplay = NULL;
+    }
+}
+
+Encode_Status VideoEncoderBase::start() {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VASurfaceID *surfaces = NULL;
+
+    VAConfigAttrib vaAttrib[2];
+    uint32_t index;
+    uint32_t maxSize = 0;
+
+    VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
+    uint32_t normalSurfacesCnt = 2;
+
+    if (mInitialized) {
+        LOG_V("Encoder has been started\n");
+        return ENCODE_ALREADY_INIT;
+    }
+
+    // For upstream allocates buffer, it is mandatory to set buffer mode
+    // and for other stuff, it is optional
+    // Different buffer mode will have different surface handling approach
+
+    // mSharedSurfacesCnt is for upstream buffer allocation case
+    mSharedSurfacesCnt = 0;
+
+    vaAttrib[0].type = VAConfigAttribRTFormat;
+    vaAttrib[1].type = VAConfigAttribRateControl;
+    vaAttrib[0].value = VA_RT_FORMAT_YUV420;
+    vaAttrib[1].value = mComParams.rcMode;
+
+    LOG_V( "======VA Configuration======\n");
+
+    LOG_I( "profile = %d\n", mComParams.profile);
+    LOG_I( "mVAEntrypoint = %d\n", mVAEntrypoint);
+    LOG_I( "vaAttrib[0].type = %d\n", vaAttrib[0].type);
+    LOG_I( "vaAttrib[1].type = %d\n", vaAttrib[1].type);
+    LOG_I( "vaAttrib[0].value (Format) = %d\n", vaAttrib[0].value);
+    LOG_I( "vaAttrib[1].value (RC mode) = %d\n", vaAttrib[1].value);
+
+    LOG_V( "vaCreateConfig\n");
+
+    vaStatus = vaCreateConfig(
+            mVADisplay, mComParams.profile, mVAEntrypoint,
+            &vaAttrib[0], 2, &(mVAConfig));
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateConfig");
+
+    if (mComParams.rcMode == VA_RC_VCM) {
+
+        // Following three features are only enabled in VCM mode
+        mRenderMaxSliceSize = true;
+        mRenderAIR = true;
+        mRenderBitRate = true;
+    }
+
+    LOG_I("mReqSurfacesCnt = %d\n", mReqSurfacesCnt);
+
+    if (mReqSurfacesCnt == 0) {
+        switch (mBufferMode) {
+            case BUFFER_SHARING_CI: {
+                mSharedSurfacesCnt = mUpstreamBufferCnt;
+                normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE;
+
+                if (mSharedSurfacesCnt != 0) {
+                    mSharedSurfaces = new VASurfaceID[mSharedSurfacesCnt];
+
+                    if (mSharedSurfaces == NULL) {
+                        LOG_E("Failed allocate shared surface\n");
+                        ret = ENCODE_NO_MEMORY;
+                        goto CLEAN_UP;
+                    }
+                }
+            }
+            break;
+            case BUFFER_SHARING_V4L2:
+            case BUFFER_SHARING_SURFACE:
+                // To be develped
+                break;
+            default:
+                mBufferMode = BUFFER_SHARING_NONE;
+                normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE;
+                break;
+        }
+    } else if (mReqSurfacesCnt == 1) {
+        // TODO: Un-normal case,
+        mBufferMode = BUFFER_SHARING_NONE;
+        normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE;
+    } else {
+        mBufferMode = BUFFER_SHARING_USRPTR;
+        mUsrPtr = new  uint8_t *[mReqSurfacesCnt];
+        if (mUsrPtr == NULL) {
+            LOG_E("Failed allocate memory\n");
+            ret = ENCODE_NO_MEMORY;
+            goto CLEAN_UP;
+        }
+    }
+
+    LOG_I("mBufferMode = %d\n", mBufferMode);
+
+    mSurfaceCnt = normalSurfacesCnt + mSharedSurfacesCnt + mReqSurfacesCnt;
+
+    surfaces = new VASurfaceID[normalSurfacesCnt];
+    if (surfaces == NULL) {
+        LOG_E("Failed allocate surface\n");
+        ret = ENCODE_NO_MEMORY;
+        goto CLEAN_UP;
+    }
+
+    mSurfaces = new VASurfaceID[mSurfaceCnt] ;
+    if (mSurfaces == NULL) {
+        LOG_E("Failed allocate private surface\n");
+        ret = ENCODE_NO_MEMORY;
+        goto CLEAN_UP;
+    }
+
+    vaStatus = vaCreateSurfaces(mVADisplay, mComParams.resolution.width,
+            mComParams.resolution.height, VA_RT_FORMAT_YUV420,
+            normalSurfacesCnt, surfaces);
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces");
+
+    switch (mBufferMode) {
+        case BUFFER_SHARING_CI: {
+            for (index = 0; index < mSharedSurfacesCnt; index++) {
+
+                vaStatus = vaCreateSurfaceFromCIFrame(
+                        mVADisplay, (uint32_t)mUpstreamBufferCnt, &mSharedSurfaces[index]);
+
+                CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaceFromCIFrame");
+
+                mSurfaces[index] = mSharedSurfaces[index];
+
+                videoSurfaceBuffer = new VideoEncSurfaceBuffer;
+                if (videoSurfaceBuffer == NULL) {
+                    LOG_E( "new VideoEncSurfaceBuffer failed\n");
+                    return ENCODE_NO_MEMORY;
+                }
+
+                videoSurfaceBuffer->surface = mSharedSurfaces[index];
+                videoSurfaceBuffer->usrptr = NULL;
+                videoSurfaceBuffer->index = index;
+                videoSurfaceBuffer->bufAvailable = true;
+                videoSurfaceBuffer->next = NULL;
+
+                mVideoSrcBufferList = appendVideoSurfaceBuffer
+                        (mVideoSrcBufferList, videoSurfaceBuffer);
+                videoSurfaceBuffer = NULL;
+            }
+        }
+        break;
+        case BUFFER_SHARING_V4L2:
+        case BUFFER_SHARING_SURFACE:
+            // To be develped
+            break;
+        case BUFFER_SHARING_NONE:
+            break;
+        case BUFFER_SHARING_USRPTR: {
+            videoSurfaceBuffer = mVideoSrcBufferList;
+            index = 0;
+            while (videoSurfaceBuffer != NULL) {
+                mSurfaces[index] = videoSurfaceBuffer->surface;
+                mUsrPtr [index] = videoSurfaceBuffer->usrptr;
+                videoSurfaceBuffer = videoSurfaceBuffer->next;
+                index ++;
+            }
+        }
+        break;
+        default:
+            break;
+    }
+
+    for (index = 0; index < normalSurfacesCnt; index++) {
+        mSurfaces[mReqSurfacesCnt + mSharedSurfacesCnt + index] = surfaces[index];
+
+        videoSurfaceBuffer = new VideoEncSurfaceBuffer;
+        if (videoSurfaceBuffer == NULL) {
+            LOG_E( "new VideoEncSurfaceBuffer failed\n");
+            return ENCODE_NO_MEMORY;
+        }
+
+        videoSurfaceBuffer->surface = surfaces[index];
+        videoSurfaceBuffer->usrptr = NULL;
+        videoSurfaceBuffer->index = mReqSurfacesCnt + mSharedSurfacesCnt + index;
+        videoSurfaceBuffer->bufAvailable = true;
+        videoSurfaceBuffer->next = NULL;
+
+        mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer);
+
+        videoSurfaceBuffer = NULL;
+    }
+
+    LOG_V( "assign surface Done\n");
+    LOG_I( "Created %d libva surfaces\n", mSurfaceCnt);
+
+    //Initialize and save the VA context ID
+    LOG_V( "vaCreateContext\n");
+
+    vaStatus = vaCreateContext(mVADisplay, mVAConfig,
+            mComParams.resolution.width,
+            mComParams.resolution.height,
+            0, mSurfaces, mSurfaceCnt,
+            &(mVAContext));
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateContext");
+
+    LOG_I("Created libva context width %d, height %d\n",
+          mComParams.resolution.width, mComParams.resolution.height);
+
+    ret = getMaxOutSize(&maxSize);
+    CHECK_ENCODE_STATUS_CLEANUP("getMaxOutSize");
+
+    // Create coded buffer for output
+    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+            VAEncCodedBufferType,
+            mCodedBufSize,
+            1, NULL,
+            &(mVACodedBuffer[0]));
+
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType");
+
+    // Create coded buffer for output
+    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+            VAEncCodedBufferType,
+            mCodedBufSize,
+            1, NULL,
+            &(mVACodedBuffer[1]));
+
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateBuffer::VAEncCodedBufferType");
+
+    mFirstFrame = true;
+
+CLEAN_UP:
+
+    if (ret == ENCODE_SUCCESS) {
+        mInitialized = true;
+    }
+
+    if (surfaces) delete []surfaces;
+
+    LOG_V( "end\n");
+    return ret;
+}
+
+Encode_Status VideoEncoderBase::encode(VideoEncRawBuffer *inBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    if (!mInitialized) {
+        LOG_E("Encoder has not initialized yet\n");
+        return ENCODE_NOT_INIT;
+    }
+
+    CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+    inBuffer->bufAvailable = false;
+    if (mNewHeader) mFrameNum = 0;
+
+    // current we use one surface for source data,
+    // one for reference and one for reconstructed
+    decideFrameType();
+    ret = manageSrcSurface(inBuffer);
+    CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
+
+    // Start encoding process
+    LOG_V( "vaBeginPicture\n");
+    LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext);
+    LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurFrame->surface);
+    LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay);
+
+    vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface);
+    CHECK_VA_STATUS_RETURN("vaBeginPicture");
+
+    ret = sendEncodeCommand();
+    CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand");
+
+    if ((mComParams.rcMode == VA_RC_NONE) || mFirstFrame) {
+        vaStatus = vaEndPicture(mVADisplay, mVAContext);
+        CHECK_VA_STATUS_RETURN("vaEndPicture");
+    }
+
+    LOG_V( "vaEndPicture\n");
+
+    if (mFirstFrame) {
+        updateProperities();
+        decideFrameType();
+    }
+
+    LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastFrame->surface);
+    vaStatus = vaSyncSurface(mVADisplay, mLastFrame->surface);
+    if (vaStatus != VA_STATUS_SUCCESS) {
+        LOG_W( "Failed vaSyncSurface\n");
+    }
+
+    mOutCodedBuffer = mLastCodedBuffer;
+
+    if (!((mComParams.rcMode == VA_RC_NONE) || mFirstFrame)) {
+        vaStatus = vaEndPicture(mVADisplay, mVAContext);
+        CHECK_VA_STATUS_RETURN("vaEndPicture");
+
+    }
+
+    if (mFirstFrame) {
+        vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface);
+        CHECK_VA_STATUS_RETURN("vaBeginPicture");
+
+        ret = sendEncodeCommand();
+        CHECK_ENCODE_STATUS_RETURN("sendEncodeCommand");
+
+        vaStatus = vaEndPicture(mVADisplay, mVAContext);
+        CHECK_VA_STATUS_RETURN("vaEndPicture");
+
+        mKeyFrame = true;
+    }
+
+    // Query the status of current surface
+    VASurfaceStatus vaSurfaceStatus;
+    vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface,  &vaSurfaceStatus);
+    CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
+
+    mPicSkipped = vaSurfaceStatus & VASurfaceSkipped;
+
+    if (!mFirstFrame) {
+        VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame);
+    }
+
+    mLastFrame = NULL;
+    updateProperities();
+    mCurFrame = NULL;
+
+    if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true;
+
+    LOG_V("ref the current inBuffer\n");
+
+    mLastInputRawBuffer = inBuffer;
+    mFirstFrame = false;
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getOutput(VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    bool useLocalBuffer = false;
+
+    CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+    LOG_V("Begin\n");
+
+    if (outBuffer->format != OUTPUT_EVERYTHING && outBuffer->format != OUTPUT_FRAME_DATA) {
+        LOG_E("Output buffer mode not supported\n");
+        goto CLEAN_UP;
+    }
+
+    // For first getOutput, the mFrameNum already increased to 2, and of course is key frame
+    // frame 0 is already encoded and will be outputed here
+    // frame 1 is encoding now, frame 2 will be sent to encoder for next encode() call
+    if (mFrameNum > 2) {
+        if (mComParams.intraPeriod != 0 &&
+                (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) {
+            mKeyFrame = true;
+        } else {
+            mKeyFrame = false;
+        }
+    }
+
+    ret = prepareForOutput(outBuffer, &useLocalBuffer);
+    CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
+
+    ret = outputAllData(outBuffer);
+    CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+
+    LOG_I("out size for this getOutput call = %d\n", outBuffer->dataSize);
+
+    ret = cleanupForOutput();
+    CHECK_ENCODE_STATUS_CLEANUP("cleanupForOutput");
+
+CLEAN_UP:
+
+    if (ret < ENCODE_SUCCESS) {
+        if (outBuffer->data && (useLocalBuffer == true)) {
+            delete[] outBuffer->data;
+            outBuffer->data = NULL;
+            useLocalBuffer = false;
+        }
+
+        if (mCodedBufferMapped) {
+            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+            mCodedBufferMapped = false;
+            mCurSegment = NULL;
+        }
+    }
+
+    LOG_V("End\n");
+    return ret;
+}
+
+
+void VideoEncoderBase::flush() {
+
+    LOG_V( "Begin\n");
+
+    // put reconstructed surface back to list
+    if (mRecFrame != NULL) {
+        appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
+        mRecFrame = NULL;
+    }
+
+    // put reference surface back to list
+    if (mRefFrame != NULL) {
+        appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
+        mRefFrame = NULL;
+    }
+
+    // Here this raw buffer means the surface being encoding
+    if (mLastInputRawBuffer) {
+        mLastInputRawBuffer->bufAvailable = true;
+        mLastInputRawBuffer = NULL;
+    }
+
+    // reset the properities
+    mEncodedFrames = 0;
+    mFrameNum = 0;
+    mPicSkipped = false;
+    mIsIntra = true;
+
+    LOG_V( "end\n");
+}
+
+Encode_Status VideoEncoderBase::stop() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    Encode_Status ret = ENCODE_SUCCESS;
+    VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
+    VideoEncSurfaceBuffer *tmpBuffer = NULL;
+
+
+    LOG_V( "Begin\n");
+
+    if (mSharedSurfaces) {
+        delete [] mSharedSurfaces;
+        mSharedSurfaces = NULL;
+    }
+
+    if (mSurfaces) {
+        delete [] mSurfaces;
+        mSurfaces = NULL;
+    }
+
+    if (mUsrPtr) {
+        delete [] mUsrPtr;
+        mUsrPtr = NULL;
+    }
+
+    if (mUpstreamBufferList) {
+        delete [] mUpstreamBufferList;
+        mUpstreamBufferList = NULL;
+    }
+
+    // It is possible that above pointers have been allocated
+    // before we set mInitialized to true
+    if (!mInitialized) {
+        LOG_V("Encoder has been stopped\n");
+        return ENCODE_SUCCESS;
+    }
+
+    LOG_V( "Release frames\n");
+
+    // put reconstructed surface back to list
+    if (mRecFrame != NULL) {
+        appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
+        mRecFrame = NULL;
+    }
+
+    // put reference surface back to list
+    if (mRefFrame != NULL) {
+        appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
+        mRefFrame = NULL;
+    }
+
+    // put Source surface back to list
+    if (mLastFrame != NULL) {
+        appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame);
+        mLastFrame = NULL;
+    }
+
+    LOG_V( "Release surfaces\n");
+
+
+    LOG_V( "vaDestroyContext\n");
+    vaStatus = vaDestroyContext(mVADisplay, mVAContext);
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
+
+    LOG_V( "vaDestroyConfig\n");
+    vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
+    CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
+
+    // Release Src Surface Buffer List
+    LOG_V( "Rlease Src Surface Buffer \n");
+
+    videoSurfaceBuffer = mVideoSrcBufferList;
+
+    while (videoSurfaceBuffer != NULL) {
+        tmpBuffer = videoSurfaceBuffer;
+        videoSurfaceBuffer = videoSurfaceBuffer->next;
+        delete tmpBuffer;
+    }
+
+CLEAN_UP:
+    mInitialized = false;
+    LOG_V( "end\n");
+    return ret;
+}
+
+
+Encode_Status VideoEncoderBase::prepareForOutput(
+        VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VACodedBufferSegment *vaCodedSeg = NULL;
+    uint32_t status = 0;
+    uint8_t *buf = NULL;
+
+    LOG_V( "begin\n");
+    // Won't check parameters here as the caller already checked them
+    // mCurSegment is NULL means it is first time to be here after finishing encoding a frame
+    if (mCurSegment == NULL && !mCodedBufferMapped) {
+        LOG_I ("Coded Buffer ID been mapped = 0x%08x\n", mOutCodedBuffer);
+        vaStatus = vaMapBuffer (mVADisplay, mOutCodedBuffer, (void **)&buf);
+        CHECK_VA_STATUS_RETURN("vaMapBuffer");
+        CHECK_NULL_RETURN_IFFAIL(buf);
+
+        mCodedBufferMapped = true;
+        mTotalSize = 0;
+        mOffsetInSeg = 0;
+        mTotalSizeCopied = 0;
+        vaCodedSeg = (VACodedBufferSegment *)buf;
+        mCurSegment = (VACodedBufferSegment *)buf;
+
+        while (1) {
+
+            mTotalSize += vaCodedSeg->size;
+            status = vaCodedSeg->status;
+
+            if (!mSliceSizeOverflow) {
+                mSliceSizeOverflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
+            }
+
+            if (vaCodedSeg->next == NULL)
+                break;
+
+            vaCodedSeg = (VACodedBufferSegment *)vaCodedSeg->next;
+        }
+    }
+
+
+    // We will support two buffer allocation mode,
+    // one is application allocates the buffer and passes to encode,
+    // the other is encode allocate memory
+
+    //means  app doesn't allocate the buffer, so _encode will allocate it.
+    if (outBuffer->data == NULL) {
+        *useLocalBuffer = true;
+        outBuffer->data = new  uint8_t[mTotalSize - mTotalSizeCopied + 100];
+        if (outBuffer->data == NULL) {
+            LOG_E( "outBuffer->data == NULL\n");
+            return ENCODE_NO_MEMORY;
+        }
+        outBuffer->bufferSize = mTotalSize + 100;
+        outBuffer->dataSize = 0;
+    }
+
+    // Clear all flag for every call
+    outBuffer->flag = 0;
+    if (mSliceSizeOverflow) outBuffer->flag |= ENCODE_BUFFERFLAG_SLICEOVERFOLOW;
+
+    if (mCurSegment->size < mOffsetInSeg) {
+        LOG_E("mCurSegment->size < mOffsetInSeg\n");
+        return ENCODE_FAIL;
+    }
+
+    // Make sure we have data in current segment
+    if (mCurSegment->size == mOffsetInSeg) {
+        if (mCurSegment->next != NULL) {
+            mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+            mOffsetInSeg = 0;
+        } else {
+            LOG_V("No more data available\n");
+            outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+            outBuffer->dataSize = 0;
+            mCurSegment = NULL;
+            return ENCODE_NO_REQUEST_DATA;
+        }
+    }
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::cleanupForOutput() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    Encode_Status ret = ENCODE_SUCCESS;
+
+    //mCurSegment is NULL means all data has been copied out
+    if (mCurSegment == NULL && mCodedBufferMapped) {
+        vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+        CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+        mCodedBufferMapped = false;
+    }
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderBase::outputAllData(
+        VideoEncOutputBuffer *outBuffer) {
+
+    // Data size been copied for every single call
+    uint32_t sizeCopiedHere = 0;
+    uint32_t sizeToBeCopied = 0;
+
+    CHECK_NULL_RETURN_IFFAIL(outBuffer->data);
+
+    while (1) {
+
+        LOG_I("mCurSegment->size = %d, mOffsetInSeg = %d\n", mCurSegment->size, mOffsetInSeg);
+        LOG_I("outBuffer->bufferSize = %d, sizeCopiedHere = %d, mTotalSizeCopied = %d\n",
+              outBuffer->bufferSize, sizeCopiedHere, mTotalSizeCopied);
+
+        if (mCurSegment->size < mOffsetInSeg || outBuffer->bufferSize < sizeCopiedHere) {
+            LOG_E("mCurSegment->size < mOffsetInSeg  || outBuffer->bufferSize < sizeCopiedHere\n");
+            return ENCODE_FAIL;
+        }
+
+        if ((mCurSegment->size - mOffsetInSeg) <= outBuffer->bufferSize - sizeCopiedHere) {
+            sizeToBeCopied = mCurSegment->size - mOffsetInSeg;
+            memcpy(outBuffer->data + sizeCopiedHere,
+                   (uint8_t *)mCurSegment->buf + mOffsetInSeg, sizeToBeCopied);
+            sizeCopiedHere += sizeToBeCopied;
+            mTotalSizeCopied += sizeToBeCopied;
+            mOffsetInSeg = 0;
+        } else {
+            sizeToBeCopied = outBuffer->bufferSize - sizeCopiedHere;
+            memcpy(outBuffer->data + sizeCopiedHere,
+                   (uint8_t *)mCurSegment->buf + mOffsetInSeg, outBuffer->bufferSize - sizeCopiedHere);
+            mTotalSizeCopied += sizeToBeCopied;
+            mOffsetInSeg += sizeToBeCopied;
+            outBuffer->dataSize = outBuffer->bufferSize;
+            outBuffer->remainingSize = mTotalSize - mTotalSizeCopied;
+            outBuffer->flag |= ENCODE_BUFFERFLAG_PARTIALFRAME;
+            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+            return ENCODE_BUFFER_TOO_SMALL;
+        }
+
+        if (mCurSegment->next == NULL) {
+            outBuffer->dataSize = sizeCopiedHere;
+            outBuffer->remainingSize = 0;
+            outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+            if (mKeyFrame) outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+            mCurSegment = NULL;
+            return ENCODE_SUCCESS;
+        }
+
+        mCurSegment = (VACodedBufferSegment *)mCurSegment->next;
+        mOffsetInSeg = 0;
+    }
+}
+
+void VideoEncoderBase::setDefaultParams() {
+
+    // Set default value for input parameters
+    mComParams.profile = VAProfileH264Baseline;
+    mComParams.level = 40;
+    mComParams.rawFormat = RAW_FORMAT_NV12;
+    mComParams.frameRate.frameRateNum = 30;
+    mComParams.frameRate.frameRateDenom = 1;
+    mComParams.resolution.width = 0;
+    mComParams.resolution.height = 0;
+    mComParams.intraPeriod = 30;
+    mComParams.rcMode = RATE_CONTROL_NONE;
+    mComParams.rcParams.initQP = 15;
+    mComParams.rcParams.minQP = 1;
+    mComParams.rcParams.bitRate = 640000;
+    mComParams.rcParams.targetPercentage= 95;
+    mComParams.rcParams.windowSize = 500;
+    mComParams.cyclicFrameInterval = 30;
+    mComParams.refreshType = VIDEO_ENC_NONIR;
+    mComParams.airParams.airMBs = 0;
+    mComParams.airParams.airThreshold = 0;
+    mComParams.airParams.airAuto = 1;
+    mComParams.disableDeblocking = 2;
+}
+
+Encode_Status VideoEncoderBase::setParameters(
+        VideoParamConfigSet *videoEncParams) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+    LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+    if (mInitialized) {
+        LOG_E("Encoder has been initialized, should use setConfig to change configurations\n");
+        return ENCODE_ALREADY_INIT;
+    }
+
+    switch (videoEncParams->type) {
+        case VideoParamsTypeCommon: {
+
+            VideoParamsCommon *paramsCommon =
+                    reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+            if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            mComParams = *paramsCommon;
+            break;
+        }
+
+        case VideoParamsTypeUpSteamBuffer: {
+
+            VideoParamsUpstreamBuffer *upStreamBuffer =
+                    reinterpret_cast <VideoParamsUpstreamBuffer *> (videoEncParams);
+
+            if (upStreamBuffer->size != sizeof (VideoParamsUpstreamBuffer)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            ret = setUpstreamBuffer(
+                    upStreamBuffer->bufferMode, upStreamBuffer->bufList, upStreamBuffer->bufCnt);
+            break;
+        }
+
+        case VideoParamsTypeUsrptrBuffer: {
+
+            // usrptr only can be get
+            // this case should not happen
+            break;
+        }
+
+        case VideoParamsTypeAVC:
+        case VideoParamsTypeH263:
+        case VideoParamsTypeMP4:
+        case VideoParamsTypeVC1: {
+            ret = derivedSetParams(videoEncParams);
+            break;
+        }
+
+        default: {
+            LOG_E ("Wrong ParamType here\n");
+            break;
+        }
+
+    }
+
+    return ret;
+
+}
+
+
+Encode_Status VideoEncoderBase::getParameters(
+        VideoParamConfigSet *videoEncParams) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    CHECK_NULL_RETURN_IFFAIL(videoEncParams);
+    LOG_I("Config type = %d\n", (int)videoEncParams->type);
+
+    switch (videoEncParams->type) {
+        case VideoParamsTypeCommon: {
+
+            VideoParamsCommon *paramsCommon =
+                    reinterpret_cast <VideoParamsCommon *> (videoEncParams);
+
+            if (paramsCommon->size != sizeof (VideoParamsCommon)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            *paramsCommon = mComParams;
+            break;
+        }
+
+        case VideoParamsTypeUpSteamBuffer: {
+
+            // Get upstream buffer could happen
+            // but not meaningful a lot
+            break;
+        }
+
+        case VideoParamsTypeUsrptrBuffer: {
+            VideoParamsUsrptrBuffer *usrptrBuffer =
+                    reinterpret_cast <VideoParamsUsrptrBuffer *> (videoEncParams);
+
+            if (usrptrBuffer->size != sizeof (VideoParamsUsrptrBuffer)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            ret = getNewUsrptrFromSurface(
+                    usrptrBuffer->width, usrptrBuffer->height, usrptrBuffer->format,
+                    usrptrBuffer->expectedSize, &(usrptrBuffer->actualSize),
+                    &(usrptrBuffer->stride), &(usrptrBuffer->usrPtr));
+
+            break;
+        }
+
+        case VideoParamsTypeAVC:
+        case VideoParamsTypeH263:
+        case VideoParamsTypeMP4:
+        case VideoParamsTypeVC1: {
+            derivedGetParams(videoEncParams);
+            break;
+        }
+
+        default: {
+            LOG_E ("Wrong ParamType here\n");
+            break;
+        }
+
+    }
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::setConfig(VideoParamConfigSet *videoEncConfig) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+    LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+    if (!mInitialized) {
+        LOG_E("Encoder has not initialized yet, can't call setConfig\n");
+        return ENCODE_NOT_INIT;
+    }
+
+    switch (videoEncConfig->type) {
+        case VideoConfigTypeFrameRate: {
+            VideoConfigFrameRate *configFrameRate =
+                    reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+            if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            mComParams.frameRate = configFrameRate->frameRate;
+            mRenderFrameRate = true;
+            break;
+        }
+
+        case VideoConfigTypeBitRate: {
+            VideoConfigBitRate *configBitRate =
+                    reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+            if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            mComParams.rcParams = configBitRate->rcParams;
+
+            mRenderBitRate = true;
+
+            break;
+        }
+        case VideoConfigTypeResolution: {
+
+            // Not Implemented
+            break;
+        }
+        case VideoConfigTypeIntraRefreshType: {
+
+            VideoConfigIntraRefreshType *configIntraRefreshType =
+                    reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+            if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            mComParams.refreshType = configIntraRefreshType->refreshType;
+
+            break;
+        }
+
+        case VideoConfigTypeCyclicFrameInterval: {
+            VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+                    reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+            if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            mComParams.cyclicFrameInterval = configCyclicFrameInterval->cyclicFrameInterval;
+            break;
+        }
+
+        case VideoConfigTypeAIR: {
+
+            VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+            if (configAIR->size != sizeof (VideoConfigAIR)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            mComParams.airParams = configAIR->airParams;
+
+            mRenderAIR = true;
+
+            break;
+        }
+        case VideoConfigTypeAVCIntraPeriod:
+        case VideoConfigTypeNALSize:
+        case VideoConfigTypeIDRRequest:
+        case VideoConfigTypeSliceNum: {
+
+            ret = derivedSetConfig(videoEncConfig);
+
+            break;
+        }
+        default: {
+            LOG_E ("Wrong Config Type here\n");
+            break;
+        }
+    }
+
+
+    return ret;
+}
+
+Encode_Status VideoEncoderBase::getConfig(VideoParamConfigSet *videoEncConfig) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    CHECK_NULL_RETURN_IFFAIL(videoEncConfig);
+    LOG_I("Config type = %d\n", (int)videoEncConfig->type);
+
+    switch (videoEncConfig->type) {
+        case VideoConfigTypeFrameRate: {
+            VideoConfigFrameRate *configFrameRate =
+                    reinterpret_cast <VideoConfigFrameRate *> (videoEncConfig);
+
+            if (configFrameRate->size != sizeof (VideoConfigFrameRate)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configFrameRate->frameRate = mComParams.frameRate;
+
+            break;
+        }
+
+        case VideoConfigTypeBitRate: {
+            VideoConfigBitRate *configBitRate =
+                    reinterpret_cast <VideoConfigBitRate *> (videoEncConfig);
+
+            if (configBitRate->size != sizeof (VideoConfigBitRate)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            configBitRate->rcParams = mComParams.rcParams;
+
+
+            break;
+        }
+        case VideoConfigTypeResolution: {
+            // Not Implemented
+            break;
+        }
+        case VideoConfigTypeIntraRefreshType: {
+
+            VideoConfigIntraRefreshType *configIntraRefreshType =
+                    reinterpret_cast <VideoConfigIntraRefreshType *> (videoEncConfig);
+
+            if (configIntraRefreshType->size != sizeof (VideoConfigIntraRefreshType)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+            configIntraRefreshType->refreshType = mComParams.refreshType;
+
+            break;
+        }
+
+        case VideoConfigTypeCyclicFrameInterval: {
+            VideoConfigCyclicFrameInterval *configCyclicFrameInterval =
+                    reinterpret_cast <VideoConfigCyclicFrameInterval *> (videoEncConfig);
+            if (configCyclicFrameInterval->size != sizeof (VideoConfigCyclicFrameInterval)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configCyclicFrameInterval->cyclicFrameInterval = mComParams.cyclicFrameInterval;
+
+            break;
+        }
+
+        case VideoConfigTypeAIR: {
+
+            VideoConfigAIR *configAIR = reinterpret_cast <VideoConfigAIR *> (videoEncConfig);
+
+            if (configAIR->size != sizeof (VideoConfigAIR)) {
+                return ENCODE_INVALID_PARAMS;
+            }
+
+            configAIR->airParams = mComParams.airParams;
+
+            break;
+        }
+        case VideoConfigTypeAVCIntraPeriod:
+        case VideoConfigTypeNALSize:
+        case VideoConfigTypeIDRRequest:
+        case VideoConfigTypeSliceNum: {
+
+            ret = derivedGetConfig(videoEncConfig);
+
+            break;
+        }
+        default: {
+            LOG_E ("Wrong ParamType here\n");
+            break;
+        }
+    }
+
+    return ret;
+}
+
+void VideoEncoderBase:: decideFrameType () {
+
+    LOG_I( "mEncodedFrames = %d\n", mEncodedFrames);
+    LOG_I( "mFrameNum = %d\n", mFrameNum);
+    LOG_I( "mIsIntra = %d\n", mIsIntra);
+
+    // determine the picture type
+    if (mComParams.intraPeriod == 0) {
+        if (mFrameNum == 0)
+            mIsIntra = true;
+        else
+            mIsIntra = false;
+    } else if ((mFrameNum % mComParams.intraPeriod) == 0) {
+        mIsIntra = true;
+    } else {
+        mIsIntra = false;
+    }
+
+    LOG_I( "mIsIntra = %d\n",mIsIntra);
+}
+
+
+void VideoEncoderBase:: updateProperities () {
+
+    VideoEncSurfaceBuffer *tmpFrame = NULL;
+    LOG_V( "Begin\n");
+
+    mEncodedFrames ++;
+    mFrameNum ++;
+    mLastCodedBuffer = mVACodedBuffer[mCodedBufIndex];
+    mCodedBufIndex ++;
+    mCodedBufIndex %=2;
+
+    mLastFrame = mCurFrame;
+
+    if (!mPicSkipped) {
+        tmpFrame = mRecFrame;
+        mRecFrame = mRefFrame;
+        mRefFrame = tmpFrame;
+    }
+
+    LOG_V( "End\n");
+}
+
+
+Encode_Status  VideoEncoderBase::getMaxOutSize (uint32_t *maxSize) {
+
+    uint32_t size = mComParams.resolution.width * mComParams.resolution.height;
+
+    if (maxSize == NULL) {
+        LOG_E("maxSize == NULL\n");
+        return ENCODE_NULL_PTR;
+    }
+
+
+    LOG_V( "Begin\n");
+
+
+    if (mCodedBufSize > 0) {
+        *maxSize = mCodedBufSize;
+        LOG_V ("Already calculate the max encoded size, get the value directly");
+        return ENCODE_SUCCESS;
+    }
+
+    // base on the rate control mode to calculate the defaule encoded buffer size
+    if (mComParams.rcMode == VA_RC_NONE) {
+        mCodedBufSize = (size * 400) / (16 * 16);
+        // set to value according to QP
+    } else {
+        mCodedBufSize = mComParams.rcParams.bitRate / 4;
+    }
+
+    mCodedBufSize =
+        max (mCodedBufSize , (size * 400) / (16 * 16));
+
+    // in case got a very large user input bit rate value
+    mCodedBufSize =
+        min(mCodedBufSize, (size * 1.5 * 8));
+    mCodedBufSize =  (mCodedBufSize + 15) &(~15);
+
+    *maxSize = mCodedBufSize;
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
+    uint32_t width, uint32_t height, uint32_t format,
+    uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
+
+    Encode_Status ret = ENCODE_FAIL;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    VASurfaceID surface = VA_INVALID_SURFACE;
+    VAImage image;
+    uint32_t index = 0;
+
+    VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
+
+    LOG_V( "Begin\n");
+
+    // If encode session has been configured, we can not request surface creation anymore
+    if (mInitialized) {
+        LOG_E( "Already Initialized, can not request VA surface anymore\n");
+        return ENCODE_WRONG_STATE;
+    }
+
+    if (width<=0 || height<=0 ||outsize == NULL ||stride == NULL || usrptr == NULL) {
+        LOG_E("width<=0 || height<=0 || outsize == NULL || stride == NULL ||usrptr == NULL\n");
+        return ENCODE_NULL_PTR;
+    }
+
+    // Current only NV12 is supported in VA API
+    // Through format we can get known the number of planes
+    if (format != STRING_TO_FOURCC("NV12")) {
+
+        LOG_W ("Format is not supported\n");
+        return ENCODE_NOT_SUPPORTED;
+    }
+
+    vaStatus = vaCreateSurfacesForUserPtr(mVADisplay, width, height, VA_RT_FORMAT_YUV420, 1,
+            &surface, expectedSize, VA_FOURCC_NV12, width, width, width,
+            0, width * height, width * height);
+
+    CHECK_VA_STATUS_RETURN("vaCreateSurfacesForUserPtr");
+
+    vaStatus = vaDeriveImage(mVADisplay, surface, &image);
+    CHECK_VA_STATUS_RETURN("vaDeriveImage");
+
+    LOG_V( "vaDeriveImage Done\n");
+
+    vaStatus = vaMapBuffer(mVADisplay, image.buf, (void **) usrptr);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    // make sure the physical page been allocated
+    for (index = 0; index < image.data_size; index = index + 4096) {
+        unsigned char tmp =  *(*usrptr + index);
+        if (tmp == 0)
+            *(*usrptr + index) = 0;
+    }
+
+    *outsize = image.data_size;
+    *stride = image.pitches[0];
+
+    videoSurfaceBuffer = new VideoEncSurfaceBuffer;
+    if (videoSurfaceBuffer == NULL) {
+        LOG_E( "new VideoEncSurfaceBuffer failed\n");
+        return ENCODE_NO_MEMORY;
+    }
+
+    videoSurfaceBuffer->surface = surface;
+    videoSurfaceBuffer->usrptr = *usrptr;
+    videoSurfaceBuffer->index = mReqSurfacesCnt;
+    videoSurfaceBuffer->bufAvailable = true;
+    videoSurfaceBuffer->next = NULL;
+
+    mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer);
+
+    LOG_I( "surface = 0x%08x\n",(uint32_t)surface);
+    LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
+    LOG_I("image->pitches[1] = %d\n", image.pitches[1]);
+    LOG_I("image->offsets[0] = %d\n", image.offsets[0]);
+    LOG_I("image->offsets[1] = %d\n", image.offsets[1]);
+    LOG_I("image->num_planes = %d\n", image.num_planes);
+    LOG_I("image->width = %d\n", image.width);
+    LOG_I("image->height = %d\n", image.height);
+
+    LOG_I ("data_size = %d\n", image.data_size);
+    LOG_I ("usrptr = 0x%p\n", *usrptr);
+    LOG_I ("mReqSurfacesCnt = %d\n", mReqSurfacesCnt);
+    LOG_I ("videoSurfaceBuffer->usrptr = 0x%p\n ", videoSurfaceBuffer->usrptr);
+
+    videoSurfaceBuffer = NULL;
+
+    vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+
+    vaStatus = vaDestroyImage(mVADisplay, image.image_id);
+    CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+    if (*outsize < expectedSize) {
+        LOG_E ("Allocated buffer size is small than the expected size, destroy the surface");
+        LOG_I ("Allocated size is %d, expected size is %d\n", *outsize, expectedSize);
+        vaStatus = vaDestroySurfaces(mVADisplay, &surface, 1);
+        CHECK_VA_STATUS_RETURN("vaDestroySurfaces");
+        return ENCODE_FAIL;
+    }
+
+    mReqSurfacesCnt ++;
+    ret = ENCODE_SUCCESS;
+
+    return ret;
+}
+
+
+Encode_Status VideoEncoderBase::setUpstreamBuffer(
+        VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt) {
+
+    CHECK_NULL_RETURN_IFFAIL(bufList);
+    if (bufCnt == 0) {
+        LOG_E("bufCnt == 0\n");
+        return ENCODE_FAIL;
+    }
+
+    if (mUpstreamBufferList) delete [] mUpstreamBufferList;
+
+    mUpstreamBufferCnt = bufCnt;
+    mUpstreamBufferList = new uint32_t [bufCnt];
+    if (!mUpstreamBufferList) {
+        LOG_E ("mUpstreamBufferList NULL\n");
+        return ENCODE_NO_MEMORY;
+    }
+
+    memcpy(mUpstreamBufferList, bufList, bufCnt * sizeof (uint32_t));
+    return ENCODE_SUCCESS;
+
+}
+
+
+Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    uint32_t idx = 0;
+    uint32_t bufIndex = 0;
+
+    if (mBufferMode == BUFFER_SHARING_CI) {
+
+        memcpy(&bufIndex, inBuffer->data, sizeof(unsigned int));
+        // bufIndex = *(uint32_t*)inBuffer->data;
+
+        LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt);
+        LOG_I("bufIndex = %d\n", bufIndex);
+
+        if (bufIndex > mSurfaceCnt - 2) {
+            LOG_E("the CI frame idx is bigger than total CI frame count\n");
+            ret = ENCODE_FAIL;
+            return ret;
+
+        }
+
+    } else if (mBufferMode == BUFFER_SHARING_USRPTR) {
+
+        bufIndex = (uint32_t) -1; //fixme, temp use a big value
+
+        LOG_I("bufin->data = 0x%p	\n", inBuffer->data);
+
+        for (idx = 0; idx < mReqSurfacesCnt; idx++) {
+            LOG_I("mUsrPtr[%d] = 0x%p\n", idx, mUsrPtr[idx]);
+
+            if (inBuffer->data == mUsrPtr[idx])
+                bufIndex = idx;
+        }
+
+        LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt);
+        LOG_I("bufIndex = %d\n", bufIndex);
+
+        if (bufIndex > mSurfaceCnt - 2) {
+            LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n");
+            ret = ENCODE_FAIL;
+            goto no_share_mode;
+
+        }
+    }
+
+
+    switch (mBufferMode) {
+
+        case BUFFER_SHARING_CI:
+        case BUFFER_SHARING_USRPTR: {
+
+            if (mRefFrame== NULL) {
+                mRefFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt -1 );
+                if (mRefFrame == NULL) {
+                    LOG_E ("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
+
+            }
+
+            if (mRecFrame== NULL) {
+                mRecFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt - 2);
+                if (mRecFrame == NULL) {
+                    LOG_E ("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
+
+            }
+
+            if (mCurFrame== NULL) {
+                mCurFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, bufIndex);
+                if (mCurFrame == NULL) {
+                    LOG_E ("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame);
+            }
+        }
+
+        break;
+        case BUFFER_SHARING_V4L2:
+        case BUFFER_SHARING_SURFACE:
+            LOG_E("Not Implemented\n");
+            break;
+
+        case BUFFER_SHARING_NONE: {
+no_share_mode:
+
+            if (mRefFrame== NULL) {
+                mRefFrame = mVideoSrcBufferList;
+                if (mRefFrame == NULL) {
+                    LOG_E("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
+
+            }
+
+            if (mRecFrame== NULL) {
+                mRecFrame = mVideoSrcBufferList;
+                if (mRecFrame == NULL) {
+                    LOG_E ("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
+
+            }
+
+            if (mCurFrame== NULL) {
+                mCurFrame = mVideoSrcBufferList;
+                if (mCurFrame == NULL) {
+                    LOG_E ("No Surface buffer available, something should be wrong\n");
+                    return ENCODE_FAIL;
+                }
+                mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame);
+            }
+
+            LOG_V( "Get Surface Done\n");
+            ret = uploadDataToSurface (inBuffer);
+            CHECK_ENCODE_STATUS_RETURN("uploadDataToSurface");
+        }
+        break;
+        default:
+            break;
+
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+VideoEncSurfaceBuffer *VideoEncoderBase::appendVideoSurfaceBuffer(
+        VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) {
+
+    if (head == NULL) {
+        return buffer;
+    }
+
+    VideoEncSurfaceBuffer *node = head;
+    VideoEncSurfaceBuffer *tail = NULL;
+
+    while (node != NULL) {
+        tail = node;
+        node = node->next;
+    }
+    tail->next = buffer;
+
+    return head;
+}
+
+VideoEncSurfaceBuffer *VideoEncoderBase::removeVideoSurfaceBuffer(
+        VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) {
+
+    VideoEncSurfaceBuffer *node = head;
+    VideoEncSurfaceBuffer *tmpNode = NULL;
+
+    if (head == buffer) {
+        tmpNode = head->next;
+        buffer->next = NULL;
+        return tmpNode;
+    }
+
+    while (node != NULL) {
+        if (node->next == buffer)
+            break;
+        node = node->next;
+    }
+
+    if (node != NULL) {
+        node->next = buffer->next;
+    }
+
+    buffer->next = NULL;
+    return head;
+
+}
+
+VideoEncSurfaceBuffer *VideoEncoderBase::getVideoSurfaceBufferByIndex(
+        VideoEncSurfaceBuffer *head, uint32_t index) {
+    VideoEncSurfaceBuffer *node = head;
+
+    while (node != NULL) {
+        if (node->index == index)
+            break;
+        node = node->next;
+    }
+
+    return node;
+}
+
+Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    uint32_t width = mComParams.resolution.width;
+    uint32_t height = mComParams.resolution.height;
+
+    VAImage srcImage;
+    uint8_t *pvBuffer;
+    uint8_t *dstY;
+    uint8_t *dstUV;
+    uint32_t i,j;
+
+    uint8_t *inBuf = inBuffer->data;
+    VAImage *image = NULL;
+
+    int uvOffset = width * height;
+    uint8_t *uvBufIn = inBuf + uvOffset;
+    uint32_t uvHeight = height / 2;
+    uint32_t uvWidth = width;
+
+    LOG_V("map source data to surface\n");
+    LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurFrame->surface);
+
+    vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &srcImage);
+    CHECK_VA_STATUS_RETURN("vaDeriveImage");
+
+    LOG_V( "vaDeriveImage Done\n");
+
+    image = &srcImage;
+
+    vaStatus = vaMapBuffer(mVADisplay, image->buf, (void **)&pvBuffer);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    LOG_V("vaImage information\n");
+    LOG_I("image->pitches[0] = %d\n", image->pitches[0]);
+    LOG_I("image->pitches[1] = %d\n", image->pitches[1]);
+    LOG_I("image->offsets[0] = %d\n", image->offsets[0]);
+    LOG_I("image->offsets[1] = %d\n", image->offsets[1]);
+    LOG_I("image->num_planes = %d\n", image->num_planes);
+    LOG_I("image->width = %d\n", image->width);
+    LOG_I("image->height = %d\n", image->height);
+
+    LOG_I("input buf size = %d\n", inBuffer->size);
+
+    if (mComParams.rawFormat == RAW_FORMAT_YUV420) {
+        dstY = pvBuffer +image->offsets[0];
+
+        for (i = 0; i < height; i ++) {
+            memcpy(dstY, inBuf + i * width, width);
+            dstY += image->pitches[0];
+        }
+
+        dstUV = pvBuffer + image->offsets[1];
+
+        for (i = 0; i < height / 2; i ++) {
+            for (j = 0; j < width; j+=2) {
+                dstUV [j] = inBuf [width * height + i * width / 2 + j / 2];
+                dstUV [j + 1] =
+                    inBuf [width * height * 5 / 4 + i * width / 2 + j / 2];
+            }
+            dstUV += image->pitches[1];
+        }
+    }
+
+    else if (mComParams.rawFormat == RAW_FORMAT_NV12) {
+
+        dstY = pvBuffer + image->offsets[0];
+        for (i = 0; i < height; i++) {
+            memcpy(dstY, inBuf + i * width, width);
+            dstY += image->pitches[0];
+        }
+
+        dstUV = pvBuffer + image->offsets[1];
+        for (i = 0; i < uvHeight; i++) {
+            memcpy(dstUV, uvBufIn + i * uvWidth, uvWidth);
+            dstUV += image->pitches[1];
+        }
+    } else {
+        LOG_E("Raw format not supoort\n");
+        return ENCODE_FAIL;
+    }
+
+    vaStatus = vaUnmapBuffer(mVADisplay, image->buf);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    vaStatus = vaDestroyImage(mVADisplay, srcImage.image_id);
+    CHECK_VA_STATUS_RETURN("vaDestroyImage");
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderBase::renderDynamicBitrate() {
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    LOG_V( "Begin\n\n");
+
+    if (mComParams.rcMode != RATE_CONTROL_VCM) {
+
+        LOG_W("Not in VCM mode, but call renderDynamicBitrate\n");
+        return ENCODE_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer   *miscEncParamBuf;
+    VAEncMiscParameterRateControl *bitrateControlParam;
+    VABufferID miscParamBufferID;
+
+    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+            VAEncMiscParameterBufferType,
+            sizeof (VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+            1, NULL,
+            &miscParamBufferID);
+
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    miscEncParamBuf->type = VAEncMiscParameterTypeRateControl;
+    bitrateControlParam = (VAEncMiscParameterRateControl *)miscEncParamBuf->data;
+
+    bitrateControlParam->bits_per_second = mComParams.rcParams.bitRate;
+    bitrateControlParam->initial_qp = mComParams.rcParams.initQP;
+    bitrateControlParam->min_qp = mComParams.rcParams.minQP;
+    bitrateControlParam->target_percentage = mComParams.rcParams.targetPercentage;
+    bitrateControlParam->window_size = mComParams.rcParams.windowSize;
+
+    vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext,
+            &miscParamBufferID, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderBase::renderDynamicFrameRate() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+
+    if (mComParams.rcMode != RATE_CONTROL_VCM) {
+
+        LOG_W("Not in VCM mode, but call SendDynamicFramerate\n");
+        return ENCODE_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer   *miscEncParamBuf;
+    VAEncMiscParameterFrameRate *frameRateParam;
+    VABufferID miscParamBufferID;
+
+    vaStatus = vaCreateBuffer(mVADisplay, mVAContext,
+            VAEncMiscParameterBufferType,
+            sizeof(miscEncParamBuf) + sizeof(VAEncMiscParameterFrameRate),
+            1, NULL, &miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaMapBuffer(mVADisplay, miscParamBufferID, (void **)&miscEncParamBuf);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    miscEncParamBuf->type = VAEncMiscParameterTypeFrameRate;
+    frameRateParam = (VAEncMiscParameterFrameRate *)miscEncParamBuf->data;
+    frameRateParam->framerate =
+            (unsigned int) (mComParams.frameRate.frameRateNum + mComParams.frameRate.frameRateDenom/2)
+            / mComParams.frameRate.frameRateDenom;
+
+    vaStatus = vaUnmapBuffer(mVADisplay, miscParamBufferID);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &miscParamBufferID, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_I( "frame rate = %d\n", frameRateParam->framerate);
+    return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
new file mode 100644
index 0000000..8ea052c
--- /dev/null
+++ b/videoencoder/VideoEncoderBase.h
@@ -0,0 +1,144 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER_BASE_H__
+#define __VIDEO_ENCODER_BASE_H__
+
+#include <va/va.h>
+#include "VideoEncoderDef.h"
+#include "VideoEncoderInterface.h"
+
+class VideoEncoderBase : IVideoEncoder {
+
+public:
+    VideoEncoderBase();
+    virtual ~VideoEncoderBase();
+
+    virtual Encode_Status start(void);
+    virtual void flush(void);
+    virtual Encode_Status stop(void);
+    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer);
+
+    /*
+    * getOutput can be called several time for a frame (such as first time  codec data, and second time others)
+    * encoder will provide encoded data according to the format (whole frame, codec_data, sigle NAL etc)
+    * If the buffer passed to encoded is not big enough, this API call will return ENCODE_BUFFER_TOO_SMALL
+    * and caller should provide a big enough buffer and call again
+    */
+    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+
+
+    virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams);
+    virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams);
+    virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig);
+    virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig);
+
+    virtual Encode_Status getMaxOutSize(uint32_t *maxSize);
+
+
+protected:
+    virtual Encode_Status sendEncodeCommand(void) = 0;
+    virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) = 0;
+    virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) = 0;
+    virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+    virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) = 0;
+
+    Encode_Status prepareForOutput(VideoEncOutputBuffer *outBuffer, bool *useLocalBuffer);
+    Encode_Status cleanupForOutput();
+    Encode_Status outputAllData(VideoEncOutputBuffer *outBuffer);
+    Encode_Status renderDynamicFrameRate();
+    Encode_Status renderDynamicBitrate();
+
+private:
+    void setDefaultParams(void);
+    Encode_Status setUpstreamBuffer(VideoBufferSharingMode bufferMode, uint32_t *bufList, uint32_t bufCnt);
+    Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format,
+            uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr);
+
+    VideoEncSurfaceBuffer *appendVideoSurfaceBuffer(
+            VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer);
+    VideoEncSurfaceBuffer *removeVideoSurfaceBuffer(
+            VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer);
+    VideoEncSurfaceBuffer *getVideoSurfaceBufferByIndex(
+            VideoEncSurfaceBuffer *head, uint32_t index);
+
+    Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer);
+    void updateProperities(void);
+    void decideFrameType(void);
+    Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer);
+
+protected:
+
+    bool mInitialized;
+    VADisplay mVADisplay;
+    VAContextID mVAContext;
+    VAConfigID mVAConfig;
+    VAEntrypoint mVAEntrypoint;
+
+    VACodedBufferSegment *mCurSegment;
+    uint32_t mOffsetInSeg;
+    uint32_t mTotalSize;
+    uint32_t mTotalSizeCopied;
+
+    VideoParamsCommon mComParams;
+
+    VideoBufferSharingMode mBufferMode;
+    uint32_t *mUpstreamBufferList;
+    uint32_t mUpstreamBufferCnt;
+
+    bool mForceKeyFrame;
+    bool mNewHeader;
+    bool mFirstFrame;
+
+    bool mRenderMaxSliceSize; //Max Slice Size
+    bool mRenderQP;
+    bool mRenderAIR;
+    bool mRenderFrameRate;
+    bool mRenderBitRate;
+
+    VABufferID mVACodedBuffer[2];
+    VABufferID mLastCodedBuffer;
+    VABufferID mOutCodedBuffer;
+    VABufferID mSeqParamBuf;
+    VABufferID mPicParamBuf;
+    VABufferID mSliceParamBuf;
+
+    VASurfaceID *mSharedSurfaces;
+    VASurfaceID *mSurfaces;
+    uint32_t mSurfaceCnt;
+    uint32_t mSharedSurfacesCnt;
+    uint32_t mReqSurfacesCnt;
+    uint8_t **mUsrPtr;
+
+    VideoEncSurfaceBuffer *mVideoSrcBufferList;
+    VideoEncSurfaceBuffer *mCurFrame;	//current input frame to be encoded;
+    VideoEncSurfaceBuffer *mRefFrame;   //reference frame
+    VideoEncSurfaceBuffer *mRecFrame;	//reconstructed frame;
+    VideoEncSurfaceBuffer *mLastFrame;	//last frame;
+
+    VideoEncRawBuffer *mLastInputRawBuffer;
+
+    uint32_t mEncodedFrames;
+    uint32_t mFrameNum;
+    uint32_t mCodedBufSize;
+    uint32_t mCodedBufIndex;
+
+    bool mPicSkipped;
+    bool mIsIntra;
+    bool mSliceSizeOverflow;
+    bool mCodedBufferMapped;
+    bool mDataCopiedOut;
+    bool mKeyFrame;
+
+    // Constants
+    static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2;
+    static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8;
+};
+
+
+#endif /* __VIDEO_ENCODER_BASE_H__ */
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
new file mode 100644
index 0000000..1e90094
--- /dev/null
+++ b/videoencoder/VideoEncoderDef.h
@@ -0,0 +1,435 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER_DEF_H__
+#define __VIDEO_ENCODER_DEF_H__
+
+#include <stdint.h>
+
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+#define min(X,Y) (((X) < (Y)) ? (X) : (Y))
+#define max(X,Y) (((X) > (Y)) ? (X) : (Y))
+
+typedef int32_t Encode_Status;
+
+// Video encode error code
+enum {
+    ENCODE_NO_REQUEST_DATA = -10,
+    ENCODE_WRONG_STATE = -9,
+    ENCODE_NOTIMPL = -8,
+    ENCODE_NO_MEMORY = -7,
+    ENCODE_NOT_INIT = -6,
+    ENCODE_DRIVER_FAIL = -5,
+    ENCODE_INVALID_PARAMS = -4,
+    ENCODE_NOT_SUPPORTED = -3,
+    ENCODE_NULL_PTR = -2,
+    ENCODE_FAIL = -1,
+    ENCODE_SUCCESS = 0,
+    ENCODE_ALREADY_INIT = 1,
+    ENCODE_SLICESIZE_OVERFLOW = 2,
+    ENCODE_BUFFER_TOO_SMALL = 3 // The buffer passed to encode is too small to contain encoded data
+};
+
+typedef enum {
+    OUTPUT_EVERYTHING = 0,  //Output whatever driver generates
+    OUTPUT_CODEC_DATA = 1,
+    OUTPUT_FRAME_DATA = 2, //Equal to OUTPUT_EVERYTHING when no header along with the frame data
+    OUTPUT_ONE_NAL = 4,
+    OUTPUT_ONE_NAL_WITHOUT_STARTCODE = 8,
+    OUTPUT_LENGTH_PREFIXED = 16,
+    OUTPUT_BUFFER_LAST
+} VideoOutputFormat;
+
+typedef enum {
+    RAW_FORMAT_NONE = 0,
+    RAW_FORMAT_YUV420 = 1,
+    RAW_FORMAT_YUV422 = 2,
+    RAW_FORMAT_YUV444 = 4,
+    RAW_FORMAT_NV12 = 8,
+    RAW_FORMAT_PROTECTED = 0x80000000,
+    RAW_FORMAT_LAST
+} VideoRawFormat;
+
+typedef enum {
+    RATE_CONTROL_NONE = 1,
+    RATE_CONTROL_CBR = 2,
+    RATE_CONTROL_VBR = 4,
+    RATE_CONTROL_VCM = 8,
+    RATE_CONTROL_LAST
+} VideoRateControl;
+
+typedef enum {
+    PROFILE_MPEG2SIMPLE = 0,
+    PROFILE_MPEG2MAIN,
+    PROFILE_MPEG4SIMPLE,
+    PROFILE_MPEG4ADVANCEDSIMPLE,
+    PROFILE_MPEG4MAIN,
+    PROFILE_H264BASELINE,
+    PROFILE_H264MAIN,
+    PROFILE_H264HIGH,
+    PROFILE_VC1SIMPLE,
+    PROFILE_VC1MAIN,
+    PROFILE_VC1ADVANCED,
+    PROFILE_H263BASELINE
+} VideoProfile;
+
+typedef enum {
+    AVC_DELIMITER_LENGTHPREFIX = 0,
+    AVC_DELIMITER_ANNEXB
+} AVCDelimiterType;
+
+typedef enum {
+    VIDEO_ENC_NONIR,       // Non intra refresh
+    VIDEO_ENC_CIR, 		// Cyclic intra refresh
+    VIDEO_ENC_AIR, 		// Adaptive intra refresh
+    VIDEO_ENC_BOTH,
+    VIDEO_ENC_LAST
+} VideoIntraRefreshType;
+
+enum VideoBufferSharingMode {
+    BUFFER_SHARING_NONE = 1, //Means non shared buffer mode
+    BUFFER_SHARING_CI = 2,
+    BUFFER_SHARING_V4L2 = 4,
+    BUFFER_SHARING_SURFACE = 8,
+    BUFFER_SHARING_USRPTR = 16,
+    BUFFER_LAST
+};
+
+// Output buffer flag
+#define ENCODE_BUFFERFLAG_ENDOFFRAME       0x00000001
+#define ENCODE_BUFFERFLAG_PARTIALFRAME     0x00000002
+#define ENCODE_BUFFERFLAG_SYNCFRAME        0x00000004
+#define ENCODE_BUFFERFLAG_CODECCONFIG      0x00000008
+#define ENCODE_BUFFERFLAG_DATACORRUPT      0x00000010
+#define ENCODE_BUFFERFLAG_DATAINVALID      0x00000020
+#define ENCODE_BUFFERFLAG_SLICEOVERFOLOW   0x00000040
+
+typedef struct {
+    uint8_t *data;
+    uint32_t bufferSize; //buffer size
+    uint32_t dataSize; //actuall size
+    uint32_t remainingSize;
+    int flag; //Key frame, Codec Data etc
+    VideoOutputFormat format; //output format
+    uint64_t timeStamp; //reserved
+} VideoEncOutputBuffer;
+
+typedef struct {
+    uint8_t *data;
+    uint32_t size;
+    bool bufAvailable; //To indicate whether this buffer can be reused
+    uint64_t timeStamp; //reserved
+} VideoEncRawBuffer;
+
+struct VideoEncSurfaceBuffer {
+    VASurfaceID surface;
+    uint8_t *usrptr;
+    uint32_t index;
+    bool bufAvailable;
+    VideoEncSurfaceBuffer *next;
+};
+
+struct AirParams {
+    uint32_t airMBs;
+    uint32_t airThreshold;
+    uint32_t airAuto;
+
+    AirParams &operator=(const AirParams &other) {
+        if (this == &other) return *this;
+
+        this->airMBs= other.airMBs;
+        this->airThreshold= other.airThreshold;
+        this->airAuto = other.airAuto;
+        return *this;
+    }
+};
+
+struct VideoFrameRate {
+    uint32_t frameRateNum;
+    uint32_t frameRateDenom;
+
+    VideoFrameRate &operator=(const VideoFrameRate &other) {
+        if (this == &other) return *this;
+
+        this->frameRateNum = other.frameRateNum;
+        this->frameRateDenom = other.frameRateDenom;
+        return *this;
+    }
+};
+
+struct VideoResolution {
+    uint32_t width;
+    uint32_t height;
+
+    VideoResolution &operator=(const VideoResolution &other) {
+        if (this == &other) return *this;
+
+        this->width = other.width;
+        this->height = other.height;
+        return *this;
+    }
+};
+
+struct VideoRateControlParams {
+    uint32_t bitRate;
+    uint32_t initQP;
+    uint32_t minQP;
+    uint32_t windowSize;
+    uint32_t targetPercentage;
+
+    VideoRateControlParams &operator=(const VideoRateControlParams &other) {
+        if (this == &other) return *this;
+
+        this->bitRate = other.bitRate;
+        this->initQP = other.initQP;
+        this->minQP = other.minQP;
+        this->windowSize = other.windowSize;
+        this->targetPercentage = other.targetPercentage;
+        return *this;
+    }
+};
+
+struct SliceNum {
+    uint32_t iSliceNum;
+    uint32_t pSliceNum;
+
+    SliceNum &operator=(const SliceNum &other) {
+        if (this == &other) return *this;
+
+        this->iSliceNum = other.iSliceNum;
+        this->pSliceNum= other.pSliceNum;
+        return *this;
+    }
+};
+
+enum VideoParamConfigType {
+    VideoParamsTypeStartUnused = 0x01000000,
+    VideoParamsTypeCommon,
+    VideoParamsTypeAVC,
+    VideoParamsTypeH263,
+    VideoParamsTypeMP4,
+    VideoParamsTypeVC1,
+    VideoParamsTypeUpSteamBuffer,
+    VideoParamsTypeUsrptrBuffer,
+
+    VideoConfigTypeFrameRate,
+    VideoConfigTypeBitRate,
+    VideoConfigTypeResolution,
+    VideoConfigTypeIntraRefreshType,
+    VideoConfigTypeAIR,
+    VideoConfigTypeCyclicFrameInterval,
+    VideoConfigTypeAVCIntraPeriod,
+    VideoConfigTypeNALSize,
+    VideoConfigTypeIDRRequest,
+    VideoConfigTypeSliceNum,
+
+    VideoParamsConfigExtension
+};
+
+struct VideoParamConfigSet {
+    VideoParamConfigType type;
+    uint32_t size;
+
+    VideoParamConfigSet &operator=(const VideoParamConfigSet &other) {
+        if (this == &other) return *this;
+        this->type = other.type;
+        this->size = other.size;
+        return *this;
+    }
+};
+
+struct VideoParamsCommon : VideoParamConfigSet {
+
+    VAProfile profile;
+    uint8_t level;
+    VideoRawFormat rawFormat;
+    VideoResolution resolution;
+    VideoFrameRate frameRate;
+    int32_t intraPeriod;
+    VideoRateControl rcMode;
+    VideoRateControlParams rcParams;
+    VideoIntraRefreshType refreshType;
+    int32_t cyclicFrameInterval;
+    AirParams airParams;
+    uint32_t disableDeblocking;
+
+    VideoParamsCommon() {
+        type = VideoParamsTypeCommon;
+        size = sizeof(VideoParamsCommon);
+    }
+
+    VideoParamsCommon &operator=(const VideoParamsCommon &other) {
+        if (this == &other) return *this;
+
+        VideoParamConfigSet::operator=(other);
+        this->profile = other.profile;
+        this->level = other.level;
+        this->rawFormat = other.rawFormat;
+        this->resolution = other.resolution;
+        this->frameRate = other.frameRate;
+        this->intraPeriod = other.intraPeriod;
+        this->rcMode = other.rcMode;
+        this->rcParams = other.rcParams;
+        this->refreshType = other.refreshType;
+        this->cyclicFrameInterval = other.cyclicFrameInterval;
+        this->airParams = other.airParams;
+        this->disableDeblocking = other.disableDeblocking;
+        return *this;
+    }
+};
+
+struct VideoParamsAVC : VideoParamConfigSet {
+    uint32_t basicUnitSize;  //for rate control
+    uint8_t VUIFlag;
+    int32_t maxSliceSize;
+    uint32_t idrInterval;
+    SliceNum sliceNum;
+    AVCDelimiterType delimiterType;
+
+    VideoParamsAVC() {
+        type = VideoParamsTypeAVC;
+        size = sizeof(VideoParamsAVC);
+    }
+
+    VideoParamsAVC &operator=(const VideoParamsAVC &other) {
+        if (this == &other) return *this;
+
+        VideoParamConfigSet::operator=(other);
+        this->basicUnitSize = other.basicUnitSize;
+        this->VUIFlag = other.VUIFlag;
+        this->maxSliceSize = other.maxSliceSize;
+        this->idrInterval = other.idrInterval;
+        this->sliceNum = other.sliceNum;
+        this->delimiterType = other.delimiterType;
+
+        return *this;
+    }
+};
+
+struct VideoParamsUpstreamBuffer : VideoParamConfigSet {
+
+    VideoParamsUpstreamBuffer() {
+        type = VideoParamsTypeUpSteamBuffer;
+        size = sizeof(VideoParamsUpstreamBuffer);
+    }
+
+    VideoBufferSharingMode bufferMode;
+    uint32_t *bufList;
+    uint32_t bufCnt;
+};
+
+struct VideoParamsUsrptrBuffer : VideoParamConfigSet {
+
+    VideoParamsUsrptrBuffer() {
+        type = VideoParamsTypeUsrptrBuffer;
+        size = sizeof(VideoParamsUsrptrBuffer);
+    }
+
+    //input
+    uint32_t width;
+    uint32_t height;
+    uint32_t format;
+    uint32_t expectedSize;
+
+    //output
+    uint32_t actualSize;
+    uint32_t stride;
+    uint8_t *usrPtr;
+};
+
+struct VideoConfigFrameRate : VideoParamConfigSet {
+
+    VideoConfigFrameRate() {
+        type = VideoConfigTypeFrameRate;
+        size = sizeof(VideoConfigFrameRate);
+    }
+
+    VideoFrameRate frameRate;
+};
+
+struct VideoConfigBitRate : VideoParamConfigSet {
+
+    VideoConfigBitRate() {
+        type = VideoConfigTypeBitRate;
+        size = sizeof(VideoConfigBitRate);
+    }
+
+    VideoRateControlParams rcParams;
+};
+
+struct VideoConfigAVCIntraPeriod : VideoParamConfigSet {
+
+    VideoConfigAVCIntraPeriod() {
+        type = VideoConfigTypeAVCIntraPeriod;
+        size = sizeof(VideoConfigAVCIntraPeriod);
+    }
+
+    uint32_t idrInterval;  //How many Intra frame will have a IDR frame
+    uint32_t intraPeriod;
+};
+
+struct VideoConfigNALSize : VideoParamConfigSet {
+
+    VideoConfigNALSize() {
+        type = VideoConfigTypeNALSize;
+        size = sizeof(VideoConfigNALSize);
+    }
+
+    uint32_t maxSliceSize;
+};
+
+struct VideoConfigResoltuion : VideoParamConfigSet {
+
+    VideoConfigResoltuion() {
+        type = VideoConfigTypeResolution;
+        size = sizeof(VideoConfigResoltuion);
+    }
+
+    VideoResolution resolution;
+};
+
+struct VideoConfigIntraRefreshType : VideoParamConfigSet {
+
+    VideoConfigIntraRefreshType() {
+        type = VideoConfigTypeIntraRefreshType;
+        size = sizeof(VideoConfigIntraRefreshType);
+    }
+
+    VideoIntraRefreshType refreshType;
+};
+
+struct VideoConfigCyclicFrameInterval : VideoParamConfigSet {
+
+    VideoConfigCyclicFrameInterval() {
+        type = VideoConfigTypeCyclicFrameInterval;
+        size = sizeof(VideoConfigCyclicFrameInterval);
+    }
+
+    int32_t cyclicFrameInterval;
+};
+
+struct VideoConfigAIR : VideoParamConfigSet {
+
+    VideoConfigAIR() {
+        type = VideoConfigTypeAIR;
+        size = sizeof(VideoConfigAIR);
+    }
+
+    AirParams airParams;
+};
+
+struct VideoConfigSliceNum : VideoParamConfigSet {
+
+    VideoConfigSliceNum() {
+        type = VideoConfigTypeSliceNum;
+        size = sizeof(VideoConfigSliceNum);
+    }
+
+    SliceNum sliceNum;
+};
+#endif /*  __VIDEO_ENCODER_DEF_H__ */
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
new file mode 100644
index 0000000..6fb510b
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -0,0 +1,162 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+#include "VideoEncoderLog.h"
+#include "VideoEncoderH263.h"
+#include <va/va_tpi.h>
+
+VideoEncoderH263::VideoEncoderH263() {
+    mComParams.profile = (VAProfile)PROFILE_H263BASELINE;
+}
+
+Encode_Status VideoEncoderH263::sendEncodeCommand(void) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    LOG_V( "Begin\n");
+
+    if (mFrameNum == 0) {
+        ret = renderSequenceParams();
+        CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+    }
+
+    ret = renderPictureParams();
+    CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+    ret = renderSliceParams();
+    CHECK_ENCODE_STATUS_RETURN("renderSliceParams");
+
+    LOG_V( "End\n");
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderH263::renderSequenceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncSequenceParameterBufferH263 h263SequenceParam;
+
+    LOG_V( "Begin\n\n");
+
+    //set up the sequence params for HW
+    h263SequenceParam.bits_per_second= mComParams.rcParams.bitRate;
+    h263SequenceParam.frame_rate = 30; //hard-coded, driver need;
+    h263SequenceParam.initial_qp = mComParams.rcParams.initQP;
+    h263SequenceParam.min_qp = mComParams.rcParams.minQP;
+    h263SequenceParam.intra_period = mComParams.intraPeriod;
+
+    //h263_seq_param.fixed_vop_rate = 30;
+
+    LOG_V("===h263 sequence params===\n");
+    LOG_I( "bitrate = %d\n", h263SequenceParam.bits_per_second);
+    LOG_I( "frame_rate = %d\n", h263SequenceParam.frame_rate);
+    LOG_I( "initial_qp = %d\n", h263SequenceParam.initial_qp);
+    LOG_I( "min_qp = %d\n", h263SequenceParam.min_qp);
+    LOG_I( "intra_period = %d\n\n", h263SequenceParam.intra_period);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSequenceParameterBufferType,
+            sizeof(h263SequenceParam),
+            1, &h263SequenceParam,
+            &mSeqParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderPictureParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncPictureParameterBufferH263 h263PictureParams;
+
+    LOG_V( "Begin\n\n");
+
+    // set picture params for HW
+    h263PictureParams.reference_picture = mRefFrame->surface;
+    h263PictureParams.reconstructed_picture = mRecFrame->surface;
+    h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
+    h263PictureParams.picture_width = mComParams.resolution.width;
+    h263PictureParams.picture_height = mComParams.resolution.height;
+    h263PictureParams.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+    LOG_V("======h263 picture params======\n");
+    LOG_I( "reference_picture = 0x%08x\n", h263PictureParams.reference_picture);
+    LOG_I( "reconstructed_picture = 0x%08x\n", h263PictureParams.reconstructed_picture);
+    LOG_I( "coded_buf = 0x%08x\n", h263PictureParams.coded_buf);
+    LOG_I( "coded_buf_index = %d\n", mCodedBufIndex);
+    LOG_I( "picture_width = %d\n", h263PictureParams.picture_width);
+    LOG_I( "picture_height = %d\n",h263PictureParams.picture_height);
+    LOG_I( "picture_type = %d\n\n",h263PictureParams.picture_type);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncPictureParameterBufferType,
+            sizeof(h263PictureParams),
+            1,&h263PictureParams,
+            &mPicParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf , 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderH263::renderSliceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    uint32_t sliceNum;
+    uint32_t sliceHeight;
+    uint32_t sliceHeightInMB;
+
+    LOG_V("Begin\n\n");
+
+    sliceHeight = mComParams.resolution.height;
+    sliceHeight += 15;
+    sliceHeight &= (~15);
+    sliceHeightInMB = sliceHeight / 16;
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSliceParameterBufferType,
+            sizeof(VAEncSliceParameterBuffer),
+            1, NULL, &mSliceParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    VAEncSliceParameterBuffer *sliceParams;
+    vaStatus = vaMapBuffer(mVADisplay, mSliceParamBuf, (void **)&sliceParams);
+    CHECK_VA_STATUS_RETURN("vaMapBuffer");
+
+    // starting MB row number for this slice
+    sliceParams->start_row_number = 0;
+    // slice height measured in MB
+    sliceParams->slice_height = sliceHeightInMB;
+    sliceParams->slice_flags.bits.is_intra = mIsIntra;
+    sliceParams->slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+    LOG_V("======h263 slice params======\n");
+    LOG_I("start_row_number = %d\n", (int) sliceParams->start_row_number);
+    LOG_I("slice_height_in_mb = %d\n", (int) sliceParams->slice_height);
+    LOG_I("slice.is_intra = %d\n", (int) sliceParams->slice_flags.bits.is_intra);
+
+    vaStatus = vaUnmapBuffer(mVADisplay, mSliceParamBuf);
+    CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V("end\n");
+    return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderH263.h b/videoencoder/VideoEncoderH263.h
new file mode 100644
index 0000000..2113e2f
--- /dev/null
+++ b/videoencoder/VideoEncoderH263.h
@@ -0,0 +1,45 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER_H263_H__
+#define __VIDEO_ENCODER_H263_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+  * H.263 Encoder class, derived from VideoEncoderBase
+  */
+class VideoEncoderH263: public VideoEncoderBase {
+public:
+    VideoEncoderH263();
+    virtual ~VideoEncoderH263() {};
+
+protected:
+    virtual Encode_Status sendEncodeCommand(void);
+    virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+        return ENCODE_SUCCESS;
+    }
+
+    // Local Methods
+private:
+    Encode_Status renderSequenceParams();
+    Encode_Status renderPictureParams();
+    Encode_Status renderSliceParams();
+};
+
+#endif /* __VIDEO_ENCODER_H263_H__ */
+
diff --git a/videoencoder/VideoEncoderHost.cpp b/videoencoder/VideoEncoderHost.cpp
new file mode 100644
index 0000000..aed2bb9
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.cpp
@@ -0,0 +1,43 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include "VideoEncoderMP4.h"
+#include "VideoEncoderH263.h"
+#include "VideoEncoderAVC.h"
+#include "VideoEncoderHost.h"
+#include "VideoEncoderLog.h"
+#include <string.h>
+
+IVideoEncoder *createVideoEncoder(const char *mimeType) {
+
+    if (mimeType == NULL) {
+        LOG_E("NULL mime type");
+        return NULL;
+    }
+
+    if (strcasecmp(mimeType, "video/avc") == 0 ||
+            strcasecmp(mimeType, "video/h264") == 0) {
+        VideoEncoderAVC *p = new VideoEncoderAVC();
+        return (IVideoEncoder *)p;
+    } else if (strcasecmp(mimeType, "video/h263") == 0) {
+        VideoEncoderH263 *p = new VideoEncoderH263();
+        return (IVideoEncoder *)p;
+    } else if (strcasecmp(mimeType, "video/mpeg4") == 0 ||
+            strcasecmp(mimeType, "video/mp4v-es") == 0) {
+        VideoEncoderMP4 *p = new VideoEncoderMP4();
+        return (IVideoEncoder *)p;
+    } else {
+        LOG_E ("Unknown mime type: %s", mimeType);
+    }
+    return NULL;
+}
+
+void releaseVideoEncoder(IVideoEncoder *p) {
+    if (p) delete p;
+}
+
diff --git a/videoencoder/VideoEncoderHost.h b/videoencoder/VideoEncoderHost.h
new file mode 100644
index 0000000..cd39dc3
--- /dev/null
+++ b/videoencoder/VideoEncoderHost.h
@@ -0,0 +1,17 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VIDEO_ENCODER_HOST_H_
+#define VIDEO_ENCODER_HOST_H_
+
+#include "VideoEncoderInterface.h"
+
+IVideoEncoder *createVideoEncoder(const char *mimeType);
+void releaseVideoEncoder(IVideoEncoder *p);
+
+#endif /* VIDEO_ENCODER_HOST_H_ */
\ No newline at end of file
diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h
new file mode 100644
index 0000000..416c29d
--- /dev/null
+++ b/videoencoder/VideoEncoderInterface.h
@@ -0,0 +1,29 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef VIDEO_ENCODER_INTERFACE_H_
+#define VIDEO_ENCODER_INTERFACE_H_
+
+#include "VideoEncoderDef.h"
+
+class IVideoEncoder {
+public:
+    virtual ~IVideoEncoder() {};
+    virtual Encode_Status start(void) = 0;
+    virtual Encode_Status stop(void) = 0;
+    virtual void flush(void) = 0;
+    virtual Encode_Status encode(VideoEncRawBuffer *inBuffer) = 0;
+    virtual Encode_Status getOutput(VideoEncOutputBuffer *outBuffer) = 0;
+    virtual Encode_Status getParameters(VideoParamConfigSet *videoEncParams) = 0;
+    virtual Encode_Status setParameters(VideoParamConfigSet *videoEncParams) = 0;
+    virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0;
+    virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0;
+    virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0;
+};
+
+#endif /* VIDEO_ENCODER_INTERFACE_H_ */
diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h
new file mode 100644
index 0000000..4c1e982
--- /dev/null
+++ b/videoencoder/VideoEncoderLog.h
@@ -0,0 +1,68 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER_LOG_H__
+#define __VIDEO_ENCODER_LOG_H__
+
+// Components
+#define VIDEO_ENC_COMP 		"VideoEncoder"
+
+#include <utils/Log.h>
+
+#define VIDEO_ENC_LOG_LEVEL_ERROR     ANDROID_LOG_ERROR
+#define VIDEO_ENC_LOG_LEVEL_WARNING   ANDROID_LOG_WARN
+#define VIDEO_ENC_LOG_LEVEL_INFO	ANDROID_LOG_INFO
+#define VIDEO_ENC_LOG_LEVEL_VERBOSE   ANDROID_LOG_VERBOSE
+
+#define mix_log(comp, level, format, ...) \
+    __android_log_print(level, comp, "%s():%d: "format, \
+    __FUNCTION__, __LINE__, ##__VA_ARGS__)
+
+#ifdef VIDEO_ENC_LOG_ENABLE
+#define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__)
+#define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__)
+#define LOG_W(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_WARNING, format, ##__VA_ARGS__)
+#else
+#define LOG_V(format, ...)
+#define LOG_I(format, ...)
+#define LOG_W(format, ...)
+#endif
+
+#define LOG_E(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_ERROR, format, ##__VA_ARGS__)
+
+#define CHECK_VA_STATUS_RETURN(FUNC)\
+    if (vaStatus != VA_STATUS_SUCCESS) {\
+        LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+        return ENCODE_DRIVER_FAIL;\
+    }
+
+#define CHECK_VA_STATUS_GOTO_CLEANUP(FUNC)\
+    if (vaStatus != VA_STATUS_SUCCESS) {\
+        LOG_E(FUNC" failed. vaStatus = %d\n", vaStatus);\
+        ret = ENCODE_DRIVER_FAIL; \
+        goto CLEAN_UP;\
+    }
+
+#define CHECK_ENCODE_STATUS_RETURN(FUNC)\
+    if (ret != ENCODE_SUCCESS) { \
+        LOG_E(FUNC"Failed. ret = 0x%08x\n", ret); \
+        return ret; \
+    }
+
+#define CHECK_ENCODE_STATUS_CLEANUP(FUNC)\
+    if (ret != ENCODE_SUCCESS) { \
+        LOG_E(FUNC"Failed, ret = 0x%08x\n", ret); \
+        goto CLEAN_UP;\
+    }
+
+#define CHECK_NULL_RETURN_IFFAIL(POINTER)\
+    if (POINTER == NULL) { \
+        LOG_E("Invalid pointer\n"); \
+        return ENCODE_NULL_PTR;\
+    }
+#endif /*  __VIDEO_ENCODER_LOG_H__ */
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
new file mode 100644
index 0000000..37dce53
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -0,0 +1,311 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "VideoEncoderLog.h"
+#include "VideoEncoderMP4.h"
+#include <va/va_tpi.h>
+
+VideoEncoderMP4::VideoEncoderMP4()
+    :mProfileLevelIndication(3)
+    ,mFixedVOPTimeIncrement(0) {
+    mComParams.profile = (VAProfile)PROFILE_MPEG4SIMPLE;
+}
+
+Encode_Status VideoEncoderMP4::getHeaderPos(
+        uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize) {
+
+    uint8_t *buf = inBuffer;
+    uint32_t bytesLeft = bufSize;
+    Encode_Status ret = ENCODE_SUCCESS;
+
+    *headerSize = 0;
+    CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+    if (bufSize < 4) {
+        //bufSize shoule not < 4
+        LOG_E("Buffer size too small\n");
+        return ENCODE_FAIL;
+    }
+
+    while (bytesLeft > 4  &&
+            (memcmp("\x00\x00\x01\xB6", &inBuffer[bufSize - bytesLeft], 4) &&
+             memcmp("\x00\x00\x01\xB3", &inBuffer[bufSize - bytesLeft], 4))) {
+        --bytesLeft;
+    }
+
+    if (bytesLeft <= 4) {
+        LOG_E("NO header found\n");
+        *headerSize = 0; //
+    } else {
+        *headerSize = bufSize - bytesLeft;
+    }
+
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::outputConfigData(
+        VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    uint32_t headerSize = 0;
+
+    ret = getHeaderPos((uint8_t *)mCurSegment->buf + mOffsetInSeg,
+            mCurSegment->size - mOffsetInSeg, &headerSize);
+    CHECK_ENCODE_STATUS_RETURN("getHeaderPos");
+    if (headerSize == 0) {
+        outBuffer->dataSize = 0;
+        mCurSegment = NULL;
+        return ENCODE_NO_REQUEST_DATA;
+    }
+
+    if (headerSize <= outBuffer->bufferSize) {
+        memcpy(outBuffer->data, (uint8_t *)mCurSegment->buf + mOffsetInSeg, headerSize);
+        mTotalSizeCopied += headerSize;
+        mOffsetInSeg += headerSize;
+        outBuffer->dataSize = headerSize;
+        outBuffer->remainingSize = 0;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_ENDOFFRAME;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_CODECCONFIG;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_SYNCFRAME;
+    } else {
+        // we need a big enough buffer, otherwise we won't output anything
+        outBuffer->dataSize = 0;
+        outBuffer->remainingSize = headerSize;
+        outBuffer->flag |= ENCODE_BUFFERFLAG_DATAINVALID;
+        LOG_E("Buffer size too small\n");
+        return ENCODE_BUFFER_TOO_SMALL;
+    }
+
+    return ret;
+}
+
+
+Encode_Status VideoEncoderMP4::getOutput(VideoEncOutputBuffer *outBuffer) {
+
+    Encode_Status ret = ENCODE_SUCCESS;
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    bool useLocalBuffer = false;
+
+    LOG_V("Begin\n");
+    CHECK_NULL_RETURN_IFFAIL(outBuffer);
+
+    if (mFrameNum > 2) {
+        if (mComParams.intraPeriod != 0 &&
+                (((mFrameNum - 2) % mComParams.intraPeriod) == 0)) {
+            mKeyFrame = true;
+        } else {
+            mKeyFrame = false;
+        }
+    }
+
+    // prepare for output, map the coded buffer
+    ret = VideoEncoderBase::prepareForOutput(outBuffer, &useLocalBuffer);
+    CHECK_ENCODE_STATUS_CLEANUP("prepareForOutput");
+
+    switch (outBuffer->format) {
+        case OUTPUT_EVERYTHING:
+        case OUTPUT_FRAME_DATA: {
+            // Output whatever we have
+            ret = VideoEncoderBase::outputAllData(outBuffer);
+            CHECK_ENCODE_STATUS_CLEANUP("outputAllData");
+            break;
+        }
+        case OUTPUT_CODEC_DATA: {
+            // Output the codec config data
+            ret = outputConfigData(outBuffer);
+            CHECK_ENCODE_STATUS_CLEANUP("outputCodecData");
+            break;
+        }
+        default:
+            LOG_E("Invalid buffer mode for MPEG-4:2\n");
+            ret = ENCODE_FAIL;
+            break;
+    }
+
+    LOG_I("out size is = %d\n", outBuffer->dataSize);
+
+    // cleanup, unmap the coded buffer if all
+    // data has been copied out
+    ret = VideoEncoderBase::cleanupForOutput();
+
+CLEAN_UP:
+
+    if (ret < ENCODE_SUCCESS) {
+        if (outBuffer->data && (useLocalBuffer == true)) {
+            delete[] outBuffer->data;
+            outBuffer->data = NULL;
+            useLocalBuffer = false;
+        }
+
+        // error happens, unmap the buffer
+        if (mCodedBufferMapped) {
+            vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
+            mCodedBufferMapped = false;
+            mCurSegment = NULL;
+        }
+    }
+    LOG_V("End\n");
+    return ret;
+}
+
+
+Encode_Status VideoEncoderMP4::renderSequenceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncSequenceParameterBufferMPEG4 mp4SequenceParams;
+
+    uint32_t frameRateNum = mComParams.frameRate.frameRateNum;
+    uint32_t frameRateDenom = mComParams.frameRate.frameRateDenom;
+
+    LOG_V( "Begin\n\n");
+
+    // set up the sequence params for HW
+    mp4SequenceParams.profile_and_level_indication = mProfileLevelIndication;
+    mp4SequenceParams.video_object_layer_width= mComParams.resolution.width;
+    mp4SequenceParams.video_object_layer_height= mComParams.resolution.height;
+    mp4SequenceParams.vop_time_increment_resolution =
+            (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+    mp4SequenceParams.fixed_vop_time_increment= mFixedVOPTimeIncrement;
+    mp4SequenceParams.bits_per_second= mComParams.rcParams.bitRate;
+    mp4SequenceParams.frame_rate =
+            (unsigned int) (frameRateNum + frameRateDenom /2) / frameRateDenom;
+    mp4SequenceParams.initial_qp = mComParams.rcParams.initQP;
+    mp4SequenceParams.min_qp = mComParams.rcParams.minQP;
+    mp4SequenceParams.intra_period = mComParams.intraPeriod;
+    //mpeg4_seq_param.fixed_vop_rate = 30;
+
+    LOG_V("===mpeg4 sequence params===\n");
+    LOG_I("profile_and_level_indication = %d\n", (uint32_t)mp4SequenceParams.profile_and_level_indication);
+    LOG_I("intra_period = %d\n", mp4SequenceParams.intra_period);
+    LOG_I("video_object_layer_width = %d\n", mp4SequenceParams.video_object_layer_width);
+    LOG_I("video_object_layer_height = %d\n", mp4SequenceParams.video_object_layer_height);
+    LOG_I("vop_time_increment_resolution = %d\n", mp4SequenceParams.vop_time_increment_resolution);
+    LOG_I("fixed_vop_rate = %d\n", mp4SequenceParams.fixed_vop_rate);
+    LOG_I("fixed_vop_time_increment = %d\n", mp4SequenceParams.fixed_vop_time_increment);
+    LOG_I("bitrate = %d\n", mp4SequenceParams.bits_per_second);
+    LOG_I("frame_rate = %d\n", mp4SequenceParams.frame_rate);
+    LOG_I("initial_qp = %d\n", mp4SequenceParams.initial_qp);
+    LOG_I("min_qp = %d\n", mp4SequenceParams.min_qp);
+    LOG_I("intra_period = %d\n\n", mp4SequenceParams.intra_period);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSequenceParameterBufferType,
+            sizeof(mp4SequenceParams),
+            1, &mp4SequenceParams,
+            &mSeqParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSeqParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::renderPictureParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    VAEncPictureParameterBufferMPEG4 mpeg4_pic_param;
+    LOG_V( "Begin\n\n");
+
+    // set picture params for HW
+    mpeg4_pic_param.reference_picture = mRefFrame->surface;
+    mpeg4_pic_param.reconstructed_picture = mRecFrame->surface;
+    mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex];
+    mpeg4_pic_param.picture_width = mComParams.resolution.width;
+    mpeg4_pic_param.picture_height = mComParams.resolution.height;
+    mpeg4_pic_param.vop_time_increment= mFrameNum;
+    mpeg4_pic_param.picture_type = mIsIntra ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+    LOG_V("======mpeg4 picture params======\n");
+    LOG_I("reference_picture = 0x%08x\n", mpeg4_pic_param.reference_picture);
+    LOG_I("reconstructed_picture = 0x%08x\n", mpeg4_pic_param.reconstructed_picture);
+    LOG_I("coded_buf = 0x%08x\n", mpeg4_pic_param.coded_buf);
+    LOG_I("coded_buf_index = %d\n", mCodedBufIndex);
+    LOG_I("picture_width = %d\n", mpeg4_pic_param.picture_width);
+    LOG_I("picture_height = %d\n", mpeg4_pic_param.picture_height);
+    LOG_I("vop_time_increment = %d\n", mpeg4_pic_param.vop_time_increment);
+    LOG_I("picture_type = %d\n\n", mpeg4_pic_param.picture_type);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncPictureParameterBufferType,
+            sizeof(mpeg4_pic_param),
+            1,&mpeg4_pic_param,
+            &mPicParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mPicParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    return ENCODE_SUCCESS;
+}
+
+
+Encode_Status VideoEncoderMP4::renderSliceParams() {
+
+    VAStatus vaStatus = VA_STATUS_SUCCESS;
+    uint32_t sliceHeight;
+    uint32_t sliceHeightInMB;
+
+    VAEncSliceParameterBuffer sliceParams;
+
+    LOG_V( "Begin\n\n");
+
+    sliceHeight = mComParams.resolution.height;
+    sliceHeight += 15;
+    sliceHeight &= (~15);
+    sliceHeightInMB = sliceHeight / 16;
+
+    sliceParams.start_row_number = 0;
+    sliceParams.slice_height = sliceHeightInMB;
+    sliceParams.slice_flags.bits.is_intra = mIsIntra;
+    sliceParams.slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+    LOG_V("======mpeg4 slice params======\n");
+    LOG_I( "start_row_number = %d\n", (int) sliceParams.start_row_number);
+    LOG_I( "sliceHeightInMB = %d\n", (int) sliceParams.slice_height);
+    LOG_I( "is_intra = %d\n", (int) sliceParams.slice_flags.bits.is_intra);
+
+    vaStatus = vaCreateBuffer(
+            mVADisplay, mVAContext,
+            VAEncSliceParameterBufferType,
+            sizeof(VAEncSliceParameterBuffer),
+            1, &sliceParams,
+            &mSliceParamBuf);
+    CHECK_VA_STATUS_RETURN("vaCreateBuffer");
+
+    vaStatus = vaRenderPicture(mVADisplay, mVAContext, &mSliceParamBuf, 1);
+    CHECK_VA_STATUS_RETURN("vaRenderPicture");
+
+    LOG_V( "end\n");
+    return ENCODE_SUCCESS;
+}
+
+Encode_Status VideoEncoderMP4::sendEncodeCommand(void) {
+    Encode_Status ret = ENCODE_SUCCESS;
+    LOG_V( "Begin\n");
+
+    if (mFrameNum == 0) {
+        ret = renderSequenceParams();
+        CHECK_ENCODE_STATUS_RETURN("renderSequenceParams");
+    }
+
+    ret = renderPictureParams();
+    CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+    ret = renderSliceParams();
+    CHECK_ENCODE_STATUS_RETURN("renderPictureParams");
+
+    LOG_V( "End\n");
+    return ENCODE_SUCCESS;
+}
diff --git a/videoencoder/VideoEncoderMP4.h b/videoencoder/VideoEncoderMP4.h
new file mode 100644
index 0000000..b453023
--- /dev/null
+++ b/videoencoder/VideoEncoderMP4.h
@@ -0,0 +1,51 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2011 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+#ifndef __VIDEO_ENCODER__MPEG4_H__
+#define __VIDEO_ENCODER__MPEG4_H__
+
+#include "VideoEncoderBase.h"
+
+/**
+  * MPEG-4:2 Encoder class, derived from VideoEncoderBase
+  */
+class VideoEncoderMP4: public VideoEncoderBase {
+public:
+    VideoEncoderMP4();
+    virtual ~VideoEncoderMP4() {};
+
+    Encode_Status getOutput(VideoEncOutputBuffer *outBuffer);
+
+protected:
+    virtual Encode_Status sendEncodeCommand(void);
+
+    virtual Encode_Status derivedSetParams(VideoParamConfigSet *videoEncParams) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedGetParams(VideoParamConfigSet *videoEncParams) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedGetConfig(VideoParamConfigSet *videoEncConfig) {
+        return ENCODE_SUCCESS;
+    }
+    virtual Encode_Status derivedSetConfig(VideoParamConfigSet *videoEncConfig) {
+        return ENCODE_SUCCESS;
+    }
+    // Local Methods
+private:
+    Encode_Status getHeaderPos(uint8_t *inBuffer, uint32_t bufSize, uint32_t *headerSize);
+    Encode_Status outputConfigData(VideoEncOutputBuffer *outBuffer);
+    Encode_Status renderSequenceParams();
+    Encode_Status renderPictureParams();
+    Encode_Status renderSliceParams();
+
+    unsigned char mProfileLevelIndication;
+    uint32_t mFixedVOPTimeIncrement;
+};
+
+#endif /* __VIDEO_ENCODER__MPEG4_H__ */