Commit new libMIX changes for video encode enhancement.
BZ: 43450
Commit new libMIX changes for video encode enhancement.
Change-Id: Ia0cf7beb7404a878b44751ff287fa60a1942429e
Signed-off-by: Zhao Liang <leo.zhao@intel.com>
Signed-off-by: hding3 <haitao.ding@intel.com>
Reviewed-on: http://android.intel.com:8080/54087
Reviewed-by: Yuan, Shengquan <shengquan.yuan@intel.com>
Reviewed-by: buildbot <buildbot@intel.com>
Tested-by: buildbot <buildbot@intel.com>
diff --git a/test/Android.mk b/test/Android.mk
new file mode 100644
index 0000000..2f4d6a8
--- /dev/null
+++ b/test/Android.mk
@@ -0,0 +1,53 @@
+LOCAL_PATH := $(call my-dir)
+
+# For intelmetadatabuffer test
+# =====================================================
+
+include $(CLEAR_VARS)
+
+#VIDEO_ENC_LOG_ENABLE := true
+
+LOCAL_SRC_FILES := \
+ btest.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH) \
+ $(TARGET_OUT_HEADERS)/libmix_videoencoder \
+
+LOCAL_SHARED_LIBRARIES := \
+ libintelmetadatabuffer
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := btest
+
+include $(BUILD_EXECUTABLE)
+
+# For mix_encoder
+# =====================================================
+
+include $(CLEAR_VARS)
+
+#VIDEO_ENC_LOG_ENABLE := true
+
+LOCAL_SRC_FILES := \
+ mix_encoder.cpp
+
+LOCAL_C_INCLUDES := \
+ $(TARGET_OUT_HEADERS)/libva \
+ $(TARGET_OUT_HEADERS)/libmix_videoencoder \
+ $(TOP)/frameworks/base/include/display \
+ $(LOCAL_PATH)
+
+LOCAL_SHARED_LIBRARIES := \
+ libintelmetadatabuffer \
+ libva_videoencoder \
+ libva \
+ libva-android \
+ libva-tpi \
+ libgui \
+ libbinder
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := mix_encoder
+
+include $(BUILD_EXECUTABLE)
diff --git a/test/btest.cpp b/test/btest.cpp
new file mode 100644
index 0000000..26f104f
--- /dev/null
+++ b/test/btest.cpp
@@ -0,0 +1,78 @@
+#include "IntelMetadataBuffer.h"
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+
+#define SUCCESS "PASS IntelMetadataBuffer Unit Test\n"
+#define FAIL "Fail IntelMetadataBuffer Unit Test\n"
+
+int main(int argc, char* argv[])
+{
+ IntelMetadataBuffer *mb1, *mb2;
+ uint8_t* bytes;
+ uint32_t size;
+ IMB_Result ret;
+
+ MetadataBufferType t1 = MetadataBufferTypeCameraSource;
+ MetadataBufferType t2;
+ int32_t v1 = 0x00000010;
+ int32_t v2 = 0;
+ ValueInfo vi1, *vi2 = NULL;
+ int32_t ev1[10];
+ int32_t *ev2 = NULL;
+ unsigned int count;
+
+ if (argc > 1)
+ t1 = (MetadataBufferType) atoi(argv[1]);
+
+ if (argc > 2)
+ v1 = atoi(argv[2]);
+
+ memset(&vi1, 0, sizeof(ValueInfo));
+
+ mb1 = new IntelMetadataBuffer();
+ ret = mb1->SetType(t1);
+ ret = mb1->SetValue(v1);
+ if (t1 != MetadataBufferTypeGrallocSource) {
+ ret = mb1->SetValueInfo(&vi1);
+ ret = mb1->SetExtraValues(ev1, 10);
+ }
+ ret = mb1->GetBytes(bytes, size);
+ printf("assembling IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret );
+
+ printf("size = %d, bytes = ", size);
+ for(int i=0; i<size; i++)
+ {
+ printf("%02x ", bytes[i]);
+ }
+ printf("\n");
+
+ mb2 = new IntelMetadataBuffer();
+ ret = mb2->SetBytes(bytes, size);
+ printf("parsing IntelMetadataBuffer %s, ret = %d\n", (ret == IMB_SUCCESS)?"Success":"Fail", ret );
+
+ ret = mb2->GetType(t2);
+ ret = mb2->GetValue(v2);
+ ret = mb2->GetValueInfo(vi2);
+ ret = mb2->GetExtraValues(ev2, count);
+
+ printf("t2=%d, v2=%d, vi2=%x, ev2=%x\n", t2, v2, vi2, ev2);
+ if (v1 == v2 && t1 == t2 ) {
+ if (vi2) {
+ if (memcmp(&vi1, vi2, sizeof(ValueInfo)) == 0) {
+ if (ev2) {
+ if (memcmp(ev1, ev2, count) == 0)
+ printf(SUCCESS);
+ else
+ printf(FAIL);
+ }else
+ printf(SUCCESS);
+ }else
+ printf(FAIL);
+ }else
+ printf(SUCCESS);
+ }else
+ printf(SUCCESS);
+
+ return 1;
+}
diff --git a/test/mix_encoder.cpp b/test/mix_encoder.cpp
new file mode 100644
index 0000000..a228b71
--- /dev/null
+++ b/test/mix_encoder.cpp
@@ -0,0 +1,781 @@
+#include <va/va_tpi.h>
+#include <va/va_android.h>
+#include <VideoEncoderHost.h>
+#include <stdio.h>
+#include <getopt.h>
+#include <IntelMetadataBuffer.h>
+
+#include <surfaceflinger/ISurfaceComposer.h>
+#include <surfaceflinger/SurfaceComposerClient.h>
+#include <surfaceflinger/IGraphicBufferAlloc.h>
+
+#include <binder/IPCThreadState.h>
+#include <binder/ProcessState.h>
+
+#include <ui/PixelFormat.h>
+#include <hardware/gralloc.h>
+
+#define CHECK_ENCODE_STATUS(FUNC)\
+ if (ret < ENCODE_SUCCESS) { \
+ printf(FUNC" Failed. ret = 0x%08x\n", ret); \
+ return -1; \
+ }
+
+static const char *AVC_MIME_TYPE = "video/h264";
+static const char *MPEG4_MIME_TYPE = "video/mpeg4";
+static const char *H263_MIME_TYPE = "video/h263";
+static const int box_width = 128;
+
+static IVideoEncoder *gVideoEncoder = NULL;
+static VideoParamsCommon gEncoderParams;
+static VideoParamsStoreMetaDataInBuffers gStoreMetaDataInBuffers;
+static VideoRateControl gRC = RATE_CONTROL_CBR;
+
+static int gCodec = 0; //0: H264, 1: MPEG4, 2: H263
+static int gRCMode = 1; //0: NO_RC, 1: CBR, 2: VBR, 3: VCM
+static int gBitrate = 1280000;
+
+static bool gSyncEncMode = false;
+static uint32_t gEncFrames = 15;
+static const int gSrcFrames = 15;
+
+static uint32_t gAllocatedSize;
+static uint32_t gWidth = 1280;
+static uint32_t gHeight = 720;
+static uint32_t gStride = 1280;
+static uint32_t gFrameRate = 30;
+
+static char* gFile = (char*)"out.264";
+
+static uint32_t gMode = 0; //0:Camera malloc , 1: WiDi clone, 2: WiDi ext, 3: WiDi user, 4: Raw, 5: SurfaceMediaSource
+static const char* gModeString[7] = {"Camera malloc", "WiDi clone", "WiDi ext", "WiDi user", "Raw", "GrallocSource(Composer)", "GrallocSource(Gralloc)"};
+static const char* gRCModeString[4] ={"NO_RC", "CBR", "VBR", "VCM"};
+
+//for uploading src pictures, also for Camera malloc, WiDi clone, raw mode usrptr storage
+static uint8_t* gUsrptr[gSrcFrames];
+
+//for metadatabuffer transfer
+static IntelMetadataBuffer* gIMB[gSrcFrames] = {NULL};
+
+//for WiDi user mode
+static VADisplay gVADisplay;
+static VASurfaceID gSurface[gSrcFrames];
+
+//for WiDi ext mode
+static uint32_t gkBufHandle[gSrcFrames];
+
+//for gfxhandle
+static sp<IGraphicBufferAlloc> gGraphicBufferAlloc;
+static sp<GraphicBuffer> gGraphicBuffer[gSrcFrames];
+
+extern "C" {
+VAStatus vaLockSurface(VADisplay dpy,
+ VASurfaceID surface,
+ unsigned int *fourcc,
+ unsigned int *luma_stride,
+ unsigned int *chroma_u_stride,
+ unsigned int *chroma_v_stride,
+ unsigned int *luma_offset,
+ unsigned int *chroma_u_offset,
+ unsigned int *chroma_v_offset,
+ unsigned int *buffer_name,
+ void **buffer
+);
+
+VAStatus vaUnlockSurface(VADisplay dpy,
+ VASurfaceID surface
+);
+}
+
+static hw_module_t const *gModule;
+static gralloc_module_t const *gAllocMod; /* get by force hw_module_t */
+static alloc_device_t *gAllocDev; /* get by gralloc_open */
+
+static void gfx_init()
+{
+ int err = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &gModule);
+ if (err) {
+ printf("FATAL: can't find the %s module", GRALLOC_HARDWARE_MODULE_ID);
+ exit(-1);
+ }
+
+ gAllocMod = (gralloc_module_t const *)gModule;
+
+ err = gralloc_open(gModule, &gAllocDev);
+ if (err) {
+ printf("FATAL: gralloc open failed\n");
+ exit(-1);
+ }
+
+}
+
+static int gfx_alloc(uint32_t w, uint32_t h, int format,
+ int usage, buffer_handle_t* handle, int32_t* stride)
+{
+ int err;
+
+ err = gAllocDev->alloc(gAllocDev, w, h, format, usage, handle, stride);
+ if (err) {
+ printf("alloc(%u, %u, %d, %08x, ...) failed %d (%s)\n",
+ w, h, format, usage, err, strerror(-err));
+ exit(-1);
+ }
+
+ return err;
+}
+
+static int gfx_free(buffer_handle_t handle)
+{
+ int err;
+
+ err = gAllocDev->free(gAllocDev, handle);
+ if (err) {
+ printf("free(...) failed %d (%s)\n", err, strerror(-err));
+ exit(-1);
+ }
+
+ return err;
+}
+
+static int gfx_lock(buffer_handle_t handle,
+ int usage, int left, int top, int width, int height,
+ void** vaddr)
+{
+ int err;
+
+ err = gAllocMod->lock(gAllocMod, handle, usage,
+ left, top, width, height,
+ vaddr);
+
+ if (err){
+ printf("lock(...) failed %d (%s)", err, strerror(-err));
+ exit(-1);
+ }
+
+ return err;
+}
+
+
+static int gfx_unlock(buffer_handle_t handle)
+{
+ int err;
+
+ err = gAllocMod->unlock(gAllocMod, handle);
+ if (err) {
+ printf("unlock(...) failed %d (%s)", err, strerror(-err));
+ exit(-1);
+ }
+
+ return err;
+}
+
+Encode_Status SetVideoEncoderParam() {
+
+ Encode_Status ret = ENCODE_SUCCESS;
+
+ ret = gVideoEncoder->getParameters(&gEncoderParams);
+ CHECK_ENCODE_STATUS("getParameters");
+
+ gEncoderParams.resolution.height = gHeight;
+ gEncoderParams.resolution.width = gWidth;
+ gEncoderParams.frameRate.frameRateDenom = 1;
+ gEncoderParams.frameRate.frameRateNum = gFrameRate;
+ gEncoderParams.rcMode = gRC;
+ gEncoderParams.syncEncMode = gSyncEncMode;
+
+ switch(gCodec)
+ {
+ case 0:
+ break;
+ case 1:
+ gEncoderParams.profile = (VAProfile)VAProfileMPEG4Simple;
+ break;
+ case 2:
+ gEncoderParams.profile = (VAProfile)VAProfileH263Baseline;
+ break;
+ default:
+ break;
+ }
+
+ gEncoderParams.rcParams.bitRate = gBitrate;
+#if 0
+ gEncoderParams->intraPeriod = 15;
+ gEncoderParams->rawFormat = RAW_FORMAT_NV12;
+ gEncoderParams->rcParams.initQP = 0;
+ gEncoderParams->rcParams.minQP = 0;
+ gEncoderParams->rcParams.windowSize = 0;
+ gEncoderParams->rcParams.targetPercentage = 0;
+ gEncoderParams->rcParams.bitRate = 10000;
+ gEncoderParams->rcMode = RATE_CONTROL_CBR;
+ gEncoderParams->refreshType = VIDEO_ENC_NONIR;
+#endif
+
+ ret = gVideoEncoder->setParameters(&gEncoderParams);
+ CHECK_ENCODE_STATUS("setParameters VideoParamsCommon");
+
+ if (gMode != 4)
+ {
+ gStoreMetaDataInBuffers.isEnabled = true;
+
+ ret = gVideoEncoder->setParameters(&gStoreMetaDataInBuffers);
+ CHECK_ENCODE_STATUS("setParameters StoreMetaDataInBuffers");
+ }
+
+ return ret;
+}
+
+static int YUV_generator_planar(int width, int height,
+ unsigned char *Y_start, int Y_pitch,
+ unsigned char *U_start, int U_pitch,
+ unsigned char *V_start, int V_pitch,
+ int UV_interleave)
+{
+ static int row_shift = 0;
+ int row;
+
+ /* copy Y plane */
+ for (row=0;row<height;row++) {
+ unsigned char *Y_row = Y_start + row * Y_pitch;
+ int jj, xpos, ypos;
+
+ ypos = (row / box_width) & 0x1;
+
+ for (jj=0; jj<width; jj++) {
+ xpos = ((row_shift + jj) / box_width) & 0x1;
+
+ if ((xpos == 0) && (ypos == 0))
+ Y_row[jj] = 0xeb;
+ if ((xpos == 1) && (ypos == 1))
+ Y_row[jj] = 0xeb;
+
+ if ((xpos == 1) && (ypos == 0))
+ Y_row[jj] = 0x10;
+ if ((xpos == 0) && (ypos == 1))
+ Y_row[jj] = 0x10;
+ }
+ }
+
+ /* copy UV data */
+ for( row =0; row < height/2; row++) {
+ if (UV_interleave) {
+ unsigned char *UV_row = U_start + row * U_pitch;
+ memset (UV_row,0x80,width);
+ } else {
+ unsigned char *U_row = U_start + row * U_pitch;
+ unsigned char *V_row = V_start + row * V_pitch;
+
+ memset (U_row,0x80,width/2);
+ memset (V_row,0x80,width/2);
+ }
+ }
+
+ row_shift += 2;
+ if (row_shift==box_width) row_shift = 0;
+
+ return 0;
+}
+
+//malloc external memory, and not need to set into encoder before start()
+void MallocExternalMemory()
+{
+ uint32_t size = gWidth * gHeight * 3 /2;
+
+ ValueInfo* vinfo = new ValueInfo;
+ vinfo->mode = MEM_MODE_MALLOC;
+ vinfo->handle = 0;
+ vinfo->size = size;
+ vinfo->width = gWidth;
+ vinfo->height = gHeight;
+ vinfo->lumaStride = gStride;
+ vinfo->chromStride = gStride;
+ vinfo->format = STRING_TO_FOURCC("NV12");
+ vinfo->s3dformat = 0xFFFFFFFF;
+
+ for(int i = 0; i < gSrcFrames; i ++)
+ {
+ gUsrptr[i] = (uint8_t*)malloc(size);
+
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeCameraSource, (int32_t)gUsrptr[i]);
+
+ gIMB[i]->SetValueInfo(vinfo);
+ }
+ delete vinfo;
+}
+
+//apply memory from encoder, and get usrptr to upload pictures
+void GetAllUsrptr()
+{
+ Encode_Status ret = ENCODE_SUCCESS;
+ VideoParamsUsrptrBuffer paramsUsrptrBuffer;
+
+ paramsUsrptrBuffer.type = VideoParamsTypeUsrptrBuffer;
+ paramsUsrptrBuffer.size = sizeof(VideoParamsUsrptrBuffer);
+ paramsUsrptrBuffer.expectedSize = gWidth * gHeight * 3 / 2;
+ paramsUsrptrBuffer.format = STRING_TO_FOURCC("NV12");
+ paramsUsrptrBuffer.width = gWidth;
+ paramsUsrptrBuffer.height = gHeight;
+
+ for(int i = 0; i < gSrcFrames; i ++)
+ {
+ ret = gVideoEncoder->getParameters(¶msUsrptrBuffer);
+ if(ret != ENCODE_SUCCESS ) {
+ printf("could not allocate input surface from the encoder %d", ret);
+ ret = ENCODE_NO_MEMORY;
+ break;
+ }
+ gAllocatedSize = paramsUsrptrBuffer.actualSize;
+ gUsrptr[i] = paramsUsrptrBuffer.usrPtr;
+ gStride = paramsUsrptrBuffer.stride;
+
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeEncoder, (int32_t)gUsrptr[i]);
+ }
+
+}
+
+void CreateUserSurfaces(int mode)
+{
+ unsigned int display = 0;
+ int majorVersion = -1;
+ int minorVersion = -1;
+ VAStatus vaStatus;
+
+ gVADisplay = vaGetDisplay(&display);
+
+ if (gVADisplay == NULL) {
+ printf("vaGetDisplay failed.");
+ }
+
+ vaStatus = vaInitialize(gVADisplay, &majorVersion, &minorVersion);
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ printf( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
+ }
+
+ VASurfaceAttributeTPI attribute_tpi;
+
+ attribute_tpi.size = gWidth * gHeight * 3 /2;
+ attribute_tpi.luma_stride = gWidth;
+ attribute_tpi.chroma_u_stride = gWidth;
+ attribute_tpi.chroma_v_stride = gWidth;
+ attribute_tpi.luma_offset = 0;
+ attribute_tpi.chroma_u_offset = gWidth * gHeight;
+ attribute_tpi.chroma_v_offset = gWidth * gHeight;
+ attribute_tpi.pixel_format = VA_FOURCC_NV12;
+ attribute_tpi.type = VAExternalMemoryNULL;
+
+ vaStatus = vaCreateSurfacesWithAttribute(gVADisplay, gWidth, gHeight, VA_RT_FORMAT_YUV420,
+ gSrcFrames, gSurface, &attribute_tpi);
+
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ printf( "Failed vaCreateSurfaces, vaStatus = %d\n", vaStatus);
+ }
+
+ VideoParamsUpstreamBuffer upstreamParam;
+ if (mode == 0)
+ upstreamParam.bufferMode = BUFFER_SHARING_SURFACE;
+ else
+ upstreamParam.bufferMode = BUFFER_SHARING_KBUFHANDLE;
+
+ ExternalBufferAttrib attrib;
+ attrib.realWidth = gWidth;
+ attrib.realHeight = gHeight;
+ attrib.lumaStride = gStride;
+ attrib.chromStride = gStride;
+ attrib.format = VA_FOURCC_NV12;
+ upstreamParam.bufAttrib = &attrib;
+
+ uint32_t *list = new uint32_t[gSrcFrames];
+ if (mode == 1){
+ uint32_t fourCC = 0;
+ uint32_t lumaStride = 0;
+ uint32_t chromaUStride = 0;
+ uint32_t chromaVStride = 0;
+ uint32_t lumaOffset = 0;
+ uint32_t chromaUOffset = 0;
+ uint32_t chromaVOffset = 0;
+
+ for(int i = 0; i < gSrcFrames; i++) {
+ vaStatus = vaLockSurface(
+ gVADisplay, (VASurfaceID)gSurface[i],
+ &fourCC, &lumaStride, &chromaUStride, &chromaVStride,
+ &lumaOffset, &chromaUOffset, &chromaVOffset, &gkBufHandle[i], NULL);
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ printf( "Failed vaLockSurface, vaStatus = %d\n", vaStatus);
+ }
+#if 0
+ printf("lumaStride = %d", lumaStride);
+ printf("chromaUStride = %d", chromaUStride);
+ printf("chromaVStride = %d", chromaVStride);
+ printf("lumaOffset = %d", lumaOffset);
+ printf("chromaUOffset = %d", chromaUOffset);
+ printf("chromaVOffset = %d", chromaVOffset);
+ printf("kBufHandle = 0x%08x", gkBufHandle[i]);
+ printf("fourCC = %d\n", fourCC);
+#endif
+ vaStatus = vaUnlockSurface(gVADisplay, (VASurfaceID)gSurface[i]);
+ list[i] = gkBufHandle[i];
+ }
+
+ }else{
+
+ for (int i = 0; i < gSrcFrames; i++)
+ list[i] = gSurface[i];
+ }
+
+ upstreamParam.bufList = list;
+ upstreamParam.bufCnt = gSrcFrames;
+ upstreamParam.display = gVADisplay;
+ Encode_Status ret;
+ ret = gVideoEncoder->setParameters((VideoParamConfigSet *)&upstreamParam);
+ if (ret != ENCODE_SUCCESS) {
+ printf("Failed setParameters, Status = %d\n", ret);
+ }
+ delete list;
+
+ //get usrptr for uploading src pictures
+ VAImage surface_image;
+ for (int i=0; i<gSrcFrames; i++) {
+ vaStatus = vaDeriveImage(gVADisplay, gSurface[i], &surface_image);
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ printf("Failed vaDeriveImage, vaStatus = %d\n", vaStatus);
+ }
+
+ vaMapBuffer(gVADisplay, surface_image.buf, (void**)&gUsrptr[i]);
+ if (vaStatus != VA_STATUS_SUCCESS) {
+ printf("Failed vaMapBuffer, vaStatus = %d\n", vaStatus);
+ }
+
+ vaUnmapBuffer(gVADisplay, surface_image.buf);
+ vaDestroyImage(gVADisplay, surface_image.image_id);
+
+ if (mode == 0)
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, gSurface[i]);
+ else
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeUser, gkBufHandle[i]);
+ }
+}
+
+void CreateGfxhandle()
+{
+ sp<ISurfaceComposer> composer(ComposerService::getComposerService());
+ gGraphicBufferAlloc = composer->createGraphicBufferAlloc();
+
+ uint32_t usage = GraphicBuffer::USAGE_HW_TEXTURE | GraphicBuffer::USAGE_SW_WRITE_OFTEN | GraphicBuffer::USAGE_SW_READ_OFTEN; // | GraphicBuffer::USAGE_HW_COMPOSER;
+ int format = HAL_PIXEL_FORMAT_NV12_VED; //HAL_PIXEL_FORMAT_RGBA_8888
+ int32_t error;
+
+ int adjusted_width, adjusted_height;
+ if (0) {
+ ;
+ } else if (512 >= gWidth) {
+ adjusted_width = 512;
+ } else if (1024 >= gWidth) {
+ adjusted_width = 1024;
+ } else if (1280 >= gWidth) {
+ adjusted_width = 1280;
+ } else if (2048 >= gWidth) {
+ adjusted_width = 2048;
+ } else if (4096 >= gWidth) {
+ adjusted_width = 4096;
+ } else {
+ adjusted_width = (gWidth + 0x1f) & ~0x1f;
+ }
+
+ adjusted_height = (gHeight + 0x1f) & ~0x1f;
+
+printf("adjust width=%d, height=%d\n", adjusted_width, adjusted_height);
+ for(int i = 0; i < gSrcFrames; i ++)
+ {
+ sp<GraphicBuffer> graphicBuffer(
+ gGraphicBufferAlloc->createGraphicBuffer(
+// gWidth, gHeight, format, usage, &error));
+ adjusted_width, adjusted_height, format, usage, &error));
+
+ gGraphicBuffer[i] = graphicBuffer;
+ graphicBuffer->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, (void**)(&gUsrptr[i]));
+
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)gGraphicBuffer[i]->handle);
+ graphicBuffer->unlock();
+ }
+
+}
+
+void CreateGralloc()
+{
+ int usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_OFTEN | GRALLOC_USAGE_HW_TEXTURE;
+ int format = HAL_PIXEL_FORMAT_NV12_VED;
+
+ gfx_init();
+
+ void* vaddr;
+ buffer_handle_t handle;
+
+ for(int i = 0; i < gSrcFrames; i ++)
+ {
+ gfx_alloc(gWidth, gHeight, format, usage, &handle, (int32_t*)&gStride);
+ gfx_lock(handle, usage, 0, 0, gWidth, gHeight, &vaddr);
+ printf("vaddr= %p\n", vaddr);
+ gUsrptr[i] = (uint8_t*)vaddr;
+ gIMB[i] = new IntelMetadataBuffer(MetadataBufferTypeGrallocSource, (int32_t)handle);
+ gfx_unlock(handle);
+ }
+
+}
+
+int CheckArgs(int argc, char* argv[])
+{
+ char c;
+
+ while ((c =getopt(argc, argv,"b:c:r:w:h:m:f:n:s:?") ) != EOF) {
+ switch (c) {
+ case 'w':
+ gWidth = atoi(optarg);
+ gStride = gWidth;
+ break;
+ case 'h':
+ gHeight = atoi(optarg);
+ break;
+ case 'n':
+ gEncFrames = atoi(optarg);
+ break;
+ case 'm':
+ gMode = atoi(optarg);
+ break;
+ case 'f':
+ gFile = optarg;
+ break;
+ case 'c':
+ gCodec = atoi(optarg);
+ break;
+ case 'r':
+ gRCMode = atoi(optarg);
+ break;
+ case 'b':
+ gBitrate = atoi(optarg);
+ break;
+ case 's':
+ gSyncEncMode = atoi(optarg);
+ break;
+ case '?':
+ default:
+ printf("\n./mix_encode -c <Codec> -b <Bit rate> -r <Rate control> -w <Width> -h <Height> -n <Frame_num> -m <Mode> -s <Sync mode> -f <Output file>\n");
+ printf("\nCodec:\n");
+ printf("0: H264 (default)\n1: MPEG4\n2: H263\n");
+ printf("\nRate control:\n");
+ printf("0: NO_RC \n1: CBR (default)\n2: VBR\n3: VCM\n");
+ printf("\nMode:\n");
+ printf("0: Camera malloc (default)\n1: WiDi clone\n2: WiDi ext\n3: WiDi user\n4: Raw\n5: GrallocSource(Composer)\n6: GrallocSource(Gralloc)\n");
+ exit(0);
+ }
+ }
+
+ return 0;
+}
+
+int main(int argc, char* argv[])
+{
+ Encode_Status ret;
+ const char *codec;
+
+ CheckArgs(argc, argv);
+
+ sp<ProcessState> proc(ProcessState::self());
+
+ ProcessState::self()->startThreadPool();
+
+ switch(gCodec)
+ {
+ case 0:
+ codec = AVC_MIME_TYPE;
+ break;
+ case 1:
+ codec = MPEG4_MIME_TYPE;
+ break;
+ case 2:
+ codec = H263_MIME_TYPE;
+ break;
+ default:
+ printf("Not support this type codec\n");
+ return 1;
+ }
+
+ switch(gRCMode)
+ {
+ case 0:
+ gRC = RATE_CONTROL_NONE;
+ break;
+ case 1:
+ gRC = RATE_CONTROL_CBR;
+ break;
+ case 2:
+ gRC = RATE_CONTROL_VBR;
+ break;
+ case 3:
+ gRC = RATE_CONTROL_VCM;
+ break;
+ default:
+ printf("Not support this rate control mode\n");
+ return 1;
+ }
+
+ printf("\nStart %s Encoding ....\n", codec);
+ printf("Mode is %s, RC mode is %s, Width=%d, Height=%d, Bitrate=%dbps, EncodeFrames=%d, SyncMode=%d, out file is %s\n\n", gModeString[gMode], gRCModeString[gRCMode], gWidth, gHeight, gBitrate, gEncFrames, gSyncEncMode, gFile);
+
+//sleep(10);
+
+for(int i=0; i<1; i++)
+{
+ gVideoEncoder = createVideoEncoder(codec);
+
+ //set parameter
+ SetVideoEncoderParam();
+
+ //prepare src pictures, get user ptrs for uploading picture and prepare metadatabuffer in different mode
+
+ switch (gMode)
+ {
+ case 0: //Camera malloc
+ MallocExternalMemory();
+ break;
+ case 1: //WiDi clone
+ GetAllUsrptr();
+ break;
+ case 2: //WiDi ext
+ CreateUserSurfaces(1);
+ break;
+ case 3: //WiDi user
+ CreateUserSurfaces(0);
+ break;
+ case 4: //Raw
+ MallocExternalMemory();
+ break;
+ case 5: //SurfaceMediaSource
+ CreateGfxhandle();
+ break;
+ case 6: //Gralloc
+ CreateGralloc();
+ break;
+ default:
+ break;
+ }
+
+//sleep(10);
+
+ //upload src data
+ for(int i=0; i<gSrcFrames; i++)
+ YUV_generator_planar(gWidth, gHeight, gUsrptr[i], gWidth, gUsrptr[i]+gWidth*gHeight, gWidth, 0, 0, 1);
+
+ //start
+ ret = gVideoEncoder->start();
+ CHECK_ENCODE_STATUS("start");
+
+ //open out file
+ FILE* file = fopen(gFile, "w");
+ if (!file)
+ {
+ printf("create out file failed\n");
+ return 1;
+ }
+
+ //input buffers
+ VideoEncRawBuffer InBuf;
+ uint8_t *data;
+ uint32_t size;
+
+ //output buffers
+ VideoEncOutputBuffer OutBuf;
+ uint32_t maxsize;
+ gVideoEncoder->getMaxOutSize(&maxsize);
+ uint8_t out[maxsize];
+ OutBuf.bufferSize = maxsize;
+ OutBuf.dataSize = 0;
+ OutBuf.data = out;
+ OutBuf.format = OUTPUT_EVERYTHING;
+
+ printf("\n");
+ for(unsigned int i=0; i<gEncFrames; i++)
+ {
+ if (gMode != 4)
+ {
+ gIMB[i % gSrcFrames]->GetBytes(data, size);
+ // printf("srcno =%d, data=%x, size=%d\n", i % gSrcFrames, data, size);
+ }else
+ {
+ data = gUsrptr[i % gSrcFrames];
+ size = gWidth * gHeight * 3 /2;
+ }
+ InBuf.data = data;
+ InBuf.size = size;
+ InBuf.bufAvailable = true;
+
+ ret = gVideoEncoder->encode(&InBuf);
+ CHECK_ENCODE_STATUS("encode");
+
+ ret = gVideoEncoder->getOutput(&OutBuf);
+ CHECK_ENCODE_STATUS("getOutput");
+ // printf("OutBuf.dataSize = %d .........\n", OutBuf.dataSize);
+ fwrite(OutBuf.data, 1, OutBuf.dataSize, file);
+
+ printf("Encoding %d Frames \r", i+1);
+ fflush(stdout);
+ }
+ fclose(file);
+
+ VideoStatistics stat;
+ gVideoEncoder->getStatistics(&stat);
+ printf("\nVideoStatistics\n");
+ printf("Encoded %d frames, Skip %d frames, encode time: average( %d us), max( %d us/Frame %d), min( %d us/Frame %d)\n", stat.total_frames, stat.skipped_frames, stat.average_encode_time, stat.max_encode_time, stat.max_encode_frame, stat.min_encode_time, stat.min_encode_frame );
+ if(gVideoEncoder) {
+ releaseVideoEncoder(gVideoEncoder);
+ gVideoEncoder = NULL;
+ }
+
+
+ switch(gMode)
+ {
+ case 0: //camera malloc
+ case 4: //Raw
+ for(int i=0; i<gSrcFrames; i++)
+ {
+ delete gUsrptr[i];
+ }
+ break;
+ case 1: //WiDi clone
+ //nothing to do
+ break;
+ case 2: //WiDi ext
+ case 3: //WiDi user
+ //release surfaces
+ vaDestroySurfaces(gVADisplay, gSurface, gSrcFrames);
+ break;
+ case 5: //SurfaceMediaSource
+ for(int i=0; i<gSrcFrames; i++)
+ {
+ gGraphicBuffer[i] = 0;
+ }
+ break;
+ case 6: //Gralloc
+ buffer_handle_t handle;
+ for(int i=0; i<gSrcFrames; i++)
+ {
+ if (gIMB[i] != NULL)
+ {
+ gIMB[i]->GetValue((int32_t&)handle);
+ gfx_free(handle);
+ }
+ }
+ break;
+ }
+
+ for(int i=0; i<gSrcFrames; i++)
+ {
+ if (gIMB[i] != NULL)
+ delete gIMB[i];
+ }
+
+ printf("\nComplete Encoding, ByeBye ....\n");
+
+}
+
+ return 1;
+}
+
diff --git a/videoencoder/Android.mk b/videoencoder/Android.mk
index 0333bc9..7c8314a 100644
--- a/videoencoder/Android.mk
+++ b/videoencoder/Android.mk
@@ -1,7 +1,8 @@
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
-VIDEO_ENC_LOG_ENABLE := true
+#VIDEO_ENC_LOG_ENABLE := true
+#VIDEO_ENC_STATISTICS_ENABLE := true
LOCAL_SRC_FILES := \
VideoEncoderBase.cpp \
@@ -22,7 +23,8 @@
libcutils \
libva \
libva-android \
- libva-tpi
+ libva-tpi \
+ libintelmetadatabuffer
#LOCAL_CFLAGS += -DANDROID
@@ -37,7 +39,34 @@
LOCAL_CPPFLAGS += -DVIDEO_ENC_LOG_ENABLE
endif
+ifeq ($(VIDEO_ENC_STATISTICS_ENABLE),true)
+LOCAL_CPPFLAGS += -DVIDEO_ENC_STATISTICS_ENABLE
+endif
+
LOCAL_MODULE_TAGS := optional
LOCAL_MODULE := libva_videoencoder
include $(BUILD_SHARED_LIBRARY)
+
+# For libintelmetadatabuffer
+# =====================================================
+
+include $(CLEAR_VARS)
+
+VIDEO_ENC_LOG_ENABLE := true
+
+LOCAL_SRC_FILES := \
+ IntelMetadataBuffer.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)
+
+LOCAL_COPY_HEADERS_TO := libmix_videoencoder
+
+LOCAL_COPY_HEADERS := \
+ IntelMetadataBuffer.h
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libintelmetadatabuffer
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/videoencoder/IntelMetadataBuffer.cpp b/videoencoder/IntelMetadataBuffer.cpp
new file mode 100644
index 0000000..531d0ca
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.cpp
@@ -0,0 +1,259 @@
+/*
+ * Copyright (c) 2007 Intel Corporation. All Rights Reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sub license, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial portions
+ * of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
+ * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#include "IntelMetadataBuffer.h"
+#include <string.h>
+#include <stdio.h>
+
+IntelMetadataBuffer::IntelMetadataBuffer()
+{
+ mType = MetadataBufferTypeCameraSource;
+ mValue = 0;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+}
+
+IntelMetadataBuffer::IntelMetadataBuffer(MetadataBufferType type, int32_t value)
+{
+ mType = type;
+ mValue = value;
+ mInfo = NULL;
+ mExtraValues = NULL;
+ mExtraValues_Count = 0;
+ mBytes = NULL;
+ mSize = 0;
+}
+
+IntelMetadataBuffer::~IntelMetadataBuffer()
+{
+ if (mInfo)
+ delete mInfo;
+
+ if (mExtraValues)
+ delete[] mExtraValues;
+
+ if (mBytes)
+ delete[] mBytes;
+}
+
+IMB_Result IntelMetadataBuffer::GetType(MetadataBufferType& type)
+{
+ type = mType;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetType(MetadataBufferType type)
+{
+ if (type < MetadataBufferTypeLast)
+ mType = type;
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValue(int32_t& value)
+{
+ value = mValue;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetValue(int32_t value)
+{
+ mValue = value;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetValueInfo(ValueInfo* &info)
+{
+ info = mInfo;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetValueInfo(ValueInfo* info)
+{
+ if (info)
+ {
+ if (mInfo == NULL)
+ mInfo = new ValueInfo;
+
+ memcpy(mInfo, info, sizeof(ValueInfo));
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetExtraValues(int32_t* &values, uint32_t& num)
+{
+ values = mExtraValues;
+ num = mExtraValues_Count;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetExtraValues(int32_t* values, uint32_t num)
+{
+ if (values && num > 0)
+ {
+ if (mExtraValues && mExtraValues_Count != num)
+ {
+ delete[] mExtraValues;
+ mExtraValues = NULL;
+ }
+
+ if (mExtraValues == NULL)
+ mExtraValues = new int32_t[num];
+
+ memcpy(mExtraValues, values, sizeof(int32_t) * num);
+ mExtraValues_Count = num;
+ }
+ else
+ return IMB_INVAL_PARAM;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::SetBytes(uint8_t* data, uint32_t size)
+{
+ if (!data || size == 0)
+ return IMB_INVAL_PARAM;
+
+ MetadataBufferType type;
+ int32_t value;
+ uint32_t extrasize = size - 8;
+ ValueInfo* info = NULL;
+ int32_t* ExtraValues = NULL;
+ uint32_t ExtraValues_Count = 0;
+
+ memcpy(&type, data, 4);
+ data += 4;
+ memcpy(&value, data, 4);
+ data += 4;
+
+ switch (type)
+ {
+ case MetadataBufferTypeCameraSource:
+ case MetadataBufferTypeEncoder:
+ case MetadataBufferTypeUser:
+ {
+ if (extrasize >0 && extrasize < sizeof(ValueInfo))
+ return IMB_INVAL_BUFFER;
+
+ if (extrasize > sizeof(ValueInfo)) //has extravalues
+ {
+ if ( (extrasize - sizeof(ValueInfo)) % 4 != 0 )
+ return IMB_INVAL_BUFFER;
+ ExtraValues_Count = (extrasize - sizeof(ValueInfo)) / 4;
+ }
+
+ if (extrasize > 0)
+ {
+ info = new ValueInfo;
+ memcpy(info, data, sizeof(ValueInfo));
+ data += sizeof(ValueInfo);
+ }
+
+ if (ExtraValues_Count > 0)
+ {
+ ExtraValues = new int32_t[ExtraValues_Count];
+ memcpy(ExtraValues, data, ExtraValues_Count * 4);
+ }
+
+ break;
+ }
+ case MetadataBufferTypeGrallocSource:
+ if (extrasize > 0)
+ return IMB_INVAL_BUFFER;
+
+ break;
+ default:
+ return IMB_INVAL_BUFFER;
+ }
+
+ //store data
+ mType = type;
+ mValue = value;
+ if (mInfo)
+ delete mInfo;
+ mInfo = info;
+ if (mExtraValues)
+ delete[] mExtraValues;
+ mExtraValues = ExtraValues;
+ mExtraValues_Count = ExtraValues_Count;
+
+ return IMB_SUCCESS;
+}
+
+IMB_Result IntelMetadataBuffer::GetBytes(uint8_t* &data, uint32_t& size)
+{
+ if (mBytes == NULL)
+ {
+ if (mType == MetadataBufferTypeGrallocSource && mInfo)
+ return IMB_INVAL_PARAM;
+
+ //assemble bytes according members
+ mSize = 8;
+ if (mInfo)
+ {
+ mSize += sizeof(ValueInfo);
+ if (mExtraValues)
+ mSize += 4 * mExtraValues_Count;
+ }
+
+ mBytes = new uint8_t[mSize];
+ uint8_t *ptr = mBytes;
+ memcpy(ptr, &mType, 4);
+ ptr += 4;
+ memcpy(ptr, &mValue, 4);
+ ptr += 4;
+
+ if (mInfo)
+ {
+ memcpy(ptr, mInfo, sizeof(ValueInfo));
+ ptr += sizeof(ValueInfo);
+
+ if (mExtraValues)
+ memcpy(ptr, mExtraValues, mExtraValues_Count * 4);
+ }
+ }
+
+ data = mBytes;
+ size = mSize;
+
+ return IMB_SUCCESS;
+}
+
+uint32_t IntelMetadataBuffer::GetMaxBufferSize()
+{
+ return 256;
+}
diff --git a/videoencoder/IntelMetadataBuffer.h b/videoencoder/IntelMetadataBuffer.h
new file mode 100644
index 0000000..802ef7b
--- /dev/null
+++ b/videoencoder/IntelMetadataBuffer.h
@@ -0,0 +1,106 @@
+/*
+ * Copyright (c) 2007 Intel Corporation. All Rights Reserved.
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sub license, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial portions
+ * of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
+ * IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+#ifndef _INTEL_METADATA_BUFFER_H_
+#define _INTEL_METADATA_BUFFER_H_
+
+#include <stdint.h>
+
+#define STRING_TO_FOURCC(format) ((uint32_t)(((format)[0])|((format)[1]<<8)|((format)[2]<<16)|((format)[3]<<24)))
+
+typedef enum {
+ IMB_SUCCESS = 0,
+ IMB_INVAL_PARAM = 1,
+ IMB_INVAL_BUFFER = 2,
+}IMB_Result;
+
+typedef enum {
+ MEM_MODE_MALLOC = 1,
+ MEM_MODE_CI = 2,
+ MEM_MODE_V4L2 = 4,
+ MEM_MODE_SURFACE = 8,
+ MEM_MODE_USRPTR = 16,
+ MEM_MODE_GFXHANDLE = 32,
+ MEM_MODE_KBUFHANDLE = 64,
+ MEM_MODE_ION = 128,
+}MemMode;
+
+typedef struct {
+ MemMode mode; //memory type, vasurface/malloc/gfx/ion/v4l2/ci etc
+ uint32_t handle; //handle
+ uint32_t size; //memory size
+ uint32_t width; //picture width
+ uint32_t height; //picture height
+ uint32_t lumaStride; //picture luma stride
+ uint32_t chromStride; //picture chrom stride
+ uint32_t format; //color format
+ uint32_t s3dformat; //S3D format
+}ValueInfo;
+
+typedef enum {
+ MetadataBufferTypeCameraSource = 0, //for CameraSource
+ MetadataBufferTypeGrallocSource = 1, //for SurfaceMediaSource
+ MetadataBufferTypeEncoder = 2, //for WiDi clone mode
+ MetadataBufferTypeUser = 3, //for WiDi user mode
+ MetadataBufferTypeLast = 4, //type number
+}MetadataBufferType;
+
+class IntelMetadataBuffer {
+public:
+ IntelMetadataBuffer(); //for generator
+ IntelMetadataBuffer(MetadataBufferType type, int32_t value); //for quick generator
+ ~IntelMetadataBuffer();
+
+ IMB_Result GetType(MetadataBufferType &type);
+ IMB_Result SetType(MetadataBufferType type);
+ IMB_Result GetValue(int32_t &value);
+ IMB_Result SetValue(int32_t value);
+ IMB_Result GetValueInfo(ValueInfo* &info);
+ IMB_Result SetValueInfo(ValueInfo *info);
+ IMB_Result GetExtraValues(int32_t* &values, uint32_t &num);
+ IMB_Result SetExtraValues(int32_t *values, uint32_t num);
+
+ //for bytes input, also for parser
+ IMB_Result SetBytes(uint8_t* data, uint32_t size);
+
+ //for bytes output, also for generator
+ IMB_Result GetBytes(uint8_t* &data, uint32_t& size);
+
+ //Static, for get max IntelMetadataBuffer size
+ static uint32_t GetMaxBufferSize();
+
+private:
+ MetadataBufferType mType;
+ int32_t mValue;
+ ValueInfo* mInfo;
+
+ int32_t* mExtraValues;
+ uint32_t mExtraValues_Count;
+
+ uint8_t* mBytes;
+ uint32_t mSize;
+};
+
+#endif
+
diff --git a/videoencoder/VideoEncoderAVC.cpp b/videoencoder/VideoEncoderAVC.cpp
index 2ee25f8..9930b99 100644
--- a/videoencoder/VideoEncoderAVC.cpp
+++ b/videoencoder/VideoEncoderAVC.cpp
@@ -875,8 +875,8 @@
LOG_V( "Begin\n\n");
// set picture params for HW
- avcPicParams.ReferenceFrames[0].picture_id= mRefFrame->surface;
- avcPicParams.CurrPic.picture_id= mRecFrame->surface;
+ avcPicParams.ReferenceFrames[0].picture_id= mRefSurface;
+ avcPicParams.CurrPic.picture_id= mRecSurface;
avcPicParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
//avcPicParams.picture_width = mComParams.resolution.width;
//avcPicParams.picture_height = mComParams.resolution.height;
diff --git a/videoencoder/VideoEncoderBase.cpp b/videoencoder/VideoEncoderBase.cpp
index 287b8c2..2dea07e 100644
--- a/videoencoder/VideoEncoderBase.cpp
+++ b/videoencoder/VideoEncoderBase.cpp
@@ -8,6 +8,7 @@
#include <string.h>
#include "VideoEncoderLog.h"
#include "VideoEncoderBase.h"
+#include "IntelMetadataBuffer.h"
#include <va/va_tpi.h>
#include <va/va_android.h>
@@ -34,7 +35,6 @@
VideoEncoderBase::VideoEncoderBase()
:mInitialized(false)
,mVADisplay(NULL)
- ,mVADecoderDisplay(NULL)
,mVAContext(0)
,mVAConfig(0)
,mVAEntrypoint(VAEntrypointEncSlice)
@@ -42,10 +42,6 @@
,mOffsetInSeg(0)
,mTotalSize(0)
,mTotalSizeCopied(0)
- ,mBufferMode(BUFFER_SHARING_NONE)
- ,mUpstreamBufferList(NULL)
- ,mUpstreamBufferCnt(0)
- ,mBufAttrib(NULL)
,mForceKeyFrame(false)
,mNewHeader(false)
,mFirstFrame (true)
@@ -60,17 +56,13 @@
,mSeqParamBuf(0)
,mPicParamBuf(0)
,mSliceParamBuf(0)
- ,mSharedSurfaces(NULL)
,mSurfaces(NULL)
,mSurfaceCnt(0)
- ,mSharedSurfacesCnt(0)
- ,mReqSurfacesCnt(0)
- ,mUsrPtr(NULL)
- ,mVideoSrcBufferList(NULL)
- ,mCurFrame(NULL)
- ,mRefFrame(NULL)
- ,mRecFrame(NULL)
- ,mLastFrame(NULL)
+ ,mSrcSurfaceMapList(NULL)
+ ,mCurSurface(VA_INVALID_SURFACE)
+ ,mRefSurface(VA_INVALID_SURFACE)
+ ,mRecSurface(VA_INVALID_SURFACE)
+ ,mLastSurface(VA_INVALID_SURFACE)
,mLastInputRawBuffer(NULL)
,mEncodedFrames(0)
,mFrameNum(0)
@@ -105,6 +97,12 @@
if (vaStatus != VA_STATUS_SUCCESS) {
LOG_E( "Failed vaInitialize, vaStatus = %d\n", vaStatus);
}
+
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ memset(&mVideoStat, 0, sizeof(VideoStatistics));
+ mVideoStat.min_encode_time = 0xFFFFFFFF;
+#endif
+
}
VideoEncoderBase::~VideoEncoderBase() {
@@ -123,27 +121,18 @@
Encode_Status ret = ENCODE_SUCCESS;
VAStatus vaStatus = VA_STATUS_SUCCESS;
- VASurfaceID *surfaces = NULL;
+ VASurfaceID surfaces[2];
+ int32_t index = -1;
+ SurfaceMap *map = mSrcSurfaceMapList;
VAConfigAttrib vaAttrib[2];
- uint32_t index;
uint32_t maxSize = 0;
- VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
- uint32_t normalSurfacesCnt = 2;
-
if (mInitialized) {
LOG_V("Encoder has been started\n");
return ENCODE_ALREADY_INIT;
}
- // For upstream allocates buffer, it is mandatory to set buffer mode
- // and for other stuff, it is optional
- // Different buffer mode will have different surface handling approach
-
- // mSharedSurfacesCnt is for upstream buffer allocation case
- mSharedSurfacesCnt = 0;
-
vaAttrib[0].type = VAConfigAttribRTFormat;
vaAttrib[1].type = VAConfigAttribRateControl;
vaAttrib[0].value = VA_RT_FORMAT_YUV420;
@@ -173,139 +162,31 @@
mRenderBitRate = true;
}
- LOG_I("mReqSurfacesCnt = %d\n", mReqSurfacesCnt);
- LOG_I("mUpstreamBufferCnt = %d\n", mUpstreamBufferCnt);
-
- if (mReqSurfacesCnt == 0) {
- switch (mBufferMode) {
- case BUFFER_SHARING_CI:
- case BUFFER_SHARING_V4L2:
- case BUFFER_SHARING_SURFACE:
- case BUFFER_SHARING_GFXHANDLE:
- case BUFFER_SHARING_KBUFHANDLE:
- {
- mSharedSurfacesCnt = mUpstreamBufferCnt;
- normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE;
-
- if (mSharedSurfacesCnt != 0) {
- mSharedSurfaces = new VASurfaceID[mSharedSurfacesCnt];
-
- if (mSharedSurfaces == NULL) {
- LOG_E("Failed allocate shared surface\n");
- ret = ENCODE_NO_MEMORY;
- goto CLEAN_UP;
- }
- } else {
- LOG_E("Set to upstream mode, but no upstream info, something is wrong");
- ret = ENCODE_FAIL;
- goto CLEAN_UP;
- }
- break;
- }
-
- default:
- {
- mBufferMode = BUFFER_SHARING_NONE;
- normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE;
- break;
- }
- }
- } else if (mReqSurfacesCnt == 1) {
- // TODO: Un-normal case,
- mBufferMode = BUFFER_SHARING_NONE;
- normalSurfacesCnt = VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE;
- } else {
- mBufferMode = BUFFER_SHARING_USRPTR;
- mUsrPtr = new uint8_t *[mReqSurfacesCnt];
- if (mUsrPtr == NULL) {
- LOG_E("Failed allocate memory\n");
- ret = ENCODE_NO_MEMORY;
- goto CLEAN_UP;
- }
- }
-
- LOG_E("mBufferMode = %d\n", mBufferMode);
-
- mSurfaceCnt = normalSurfacesCnt + mSharedSurfacesCnt + mReqSurfacesCnt;
-
- surfaces = new VASurfaceID[normalSurfacesCnt];
- if (surfaces == NULL) {
- LOG_E("Failed allocate surface\n");
- ret = ENCODE_NO_MEMORY;
- goto CLEAN_UP;
- }
-
- mSurfaces = new VASurfaceID[mSurfaceCnt] ;
- if (mSurfaces == NULL) {
- LOG_E("Failed allocate private surface\n");
- ret = ENCODE_NO_MEMORY;
- goto CLEAN_UP;
- }
-
+ LOG_V( "======VA CreateSurfaces for Rec/Ref frames ======\n");
vaStatus = vaCreateSurfaces(mVADisplay,VA_RT_FORMAT_YUV420, mComParams.resolution.width,
mComParams.resolution.height,
- surfaces, normalSurfacesCnt, NULL , 0);
+ surfaces, 2, NULL , 0);
CHECK_VA_STATUS_GOTO_CLEANUP("vaCreateSurfaces");
+ mRefSurface = surfaces[0];
+ mRecSurface = surfaces[1];
- switch (mBufferMode) {
- case BUFFER_SHARING_CI:
- ret = surfaceMappingForCIFrameList();
- CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForCIFrameList");
- break;
- case BUFFER_SHARING_V4L2:
- // To be develped
- break;
- case BUFFER_SHARING_SURFACE:
- ret = surfaceMappingForSurfaceList();
- CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForSurfaceList");
- break;
- case BUFFER_SHARING_GFXHANDLE:
- ret = surfaceMappingForGfxHandle();
- CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForGfxHandle");
- break;
- case BUFFER_SHARING_KBUFHANDLE:
- ret = surfaceMappingForKbufHandle();
- CHECK_ENCODE_STATUS_CLEANUP("surfaceMappingForKbufHandle");
- break;
- case BUFFER_SHARING_NONE:
- break;
- case BUFFER_SHARING_USRPTR: {
- videoSurfaceBuffer = mVideoSrcBufferList;
- index = 0;
- while (videoSurfaceBuffer != NULL) {
- mSurfaces[index] = videoSurfaceBuffer->surface;
- mUsrPtr [index] = videoSurfaceBuffer->usrptr;
- videoSurfaceBuffer = videoSurfaceBuffer->next;
- index ++;
- }
- }
- break;
- default:
- break;
+ //count total surface id already allocated
+ mSurfaceCnt = 2;
+
+ while(map) {
+ mSurfaceCnt ++;
+ map = map->next;
}
- for (index = 0; index < normalSurfacesCnt; index++) {
- mSurfaces[mReqSurfacesCnt + mSharedSurfacesCnt + index] = surfaces[index];
-
- videoSurfaceBuffer = new VideoEncSurfaceBuffer;
- if (videoSurfaceBuffer == NULL) {
- LOG_E( "new VideoEncSurfaceBuffer failed\n");
- return ENCODE_NO_MEMORY;
- }
-
- videoSurfaceBuffer->surface = surfaces[index];
- videoSurfaceBuffer->usrptr = NULL;
- videoSurfaceBuffer->index = mReqSurfacesCnt + mSharedSurfacesCnt + index;
- videoSurfaceBuffer->bufAvailable = true;
- videoSurfaceBuffer->next = NULL;
-
- mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer);
-
- videoSurfaceBuffer = NULL;
+ mSurfaces = new VASurfaceID[mSurfaceCnt];
+ map = mSrcSurfaceMapList;
+ while(map) {
+ mSurfaces[++index] = map->surface;
+ map->added = true;
+ map = map->next;
}
-
- LOG_V( "assign surface Done\n");
- LOG_I( "Created %d libva surfaces\n", mSurfaceCnt);
+ mSurfaces[++index] = mRefSurface;
+ mSurfaces[++index] = mRecSurface;
//Initialize and save the VA context ID
LOG_V( "vaCreateContext\n");
@@ -349,8 +230,6 @@
mInitialized = true;
}
- if (surfaces) delete []surfaces;
-
LOG_V( "end\n");
return ret;
}
@@ -363,13 +242,42 @@
}
CHECK_NULL_RETURN_IFFAIL(inBuffer);
+
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ struct timespec ts1;
+ clock_gettime(CLOCK_MONOTONIC, &ts1);
+
+#endif
+
+ Encode_Status status;
+
if (mComParams.syncEncMode) {
LOG_I("Sync Enocde Mode, no optimization, no one frame delay\n");
- return syncEncode(inBuffer);
+ status = syncEncode(inBuffer);
} else {
LOG_I("Async Enocde Mode, HW/SW works in parallel, introduce one frame delay\n");
- return asyncEncode(inBuffer);
+ status = asyncEncode(inBuffer);
}
+
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ struct timespec ts2;
+ clock_gettime(CLOCK_MONOTONIC, &ts2);
+
+ uint32_t encode_time = (ts2.tv_sec - ts1.tv_sec) * 1000000 + (ts2.tv_nsec - ts1.tv_nsec) / 1000;
+ if (encode_time > mVideoStat.max_encode_time) {
+ mVideoStat.max_encode_time = encode_time;
+ mVideoStat.max_encode_frame = mFrameNum;
+ }
+
+ if (encode_time < mVideoStat.min_encode_time) {
+ mVideoStat.min_encode_time = encode_time;
+ mVideoStat.min_encode_frame = mFrameNum;
+ }
+
+ mVideoStat.average_encode_time += encode_time;
+#endif
+
+ return status;
}
Encode_Status VideoEncoderBase::asyncEncode(VideoEncRawBuffer *inBuffer) {
@@ -390,9 +298,10 @@
// Start encoding process
LOG_V( "vaBeginPicture\n");
LOG_I( "mVAContext = 0x%08x\n",(uint32_t) mVAContext);
- LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurFrame->surface);
+ LOG_I( "Surface = 0x%08x\n",(uint32_t) mCurSurface);
LOG_I( "mVADisplay = 0x%08x\n",(uint32_t)mVADisplay);
+#if 0
#ifdef DUMP_SRC_DATA
if (mBufferMode == BUFFER_SHARING_SURFACE && mFirstFrame){
@@ -403,7 +312,7 @@
uint32_t stride = 0;
uint32_t frameSize = 0;
- vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &image);
+ vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &image);
CHECK_VA_STATUS_RETURN("vaDeriveImage");
LOG_V( "vaDeriveImage Done\n");
@@ -437,8 +346,9 @@
CHECK_VA_STATUS_RETURN("vaDestroyImage");
}
#endif
+#endif
- vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface);
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
CHECK_VA_STATUS_RETURN("vaBeginPicture");
ret = sendEncodeCommand();
@@ -454,8 +364,8 @@
decideFrameType();
}
- LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastFrame->surface);
- vaStatus = vaSyncSurface(mVADisplay, mLastFrame->surface);
+ LOG_I ("vaSyncSurface ID = 0x%08x\n", mLastSurface);
+ vaStatus = vaSyncSurface(mVADisplay, mLastSurface);
if (vaStatus != VA_STATUS_SUCCESS) {
LOG_W( "Failed vaSyncSurface\n");
}
@@ -469,7 +379,7 @@
vaStatus = vaUnmapBuffer(mVADisplay, mOutCodedBuffer);
if (mFirstFrame) {
- vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface);
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
CHECK_VA_STATUS_RETURN("vaBeginPicture");
ret = sendEncodeCommand();
@@ -483,18 +393,19 @@
// Query the status of current surface
VASurfaceStatus vaSurfaceStatus;
- vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastFrame->surface, &vaSurfaceStatus);
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mLastSurface, &vaSurfaceStatus);
CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
mPicSkipped = vaSurfaceStatus & VASurfaceSkipped;
- if (!mFirstFrame) {
- VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame);
- }
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ if (mPicSkipped)
+ mVideoStat.skipped_frames ++;
+#endif
- mLastFrame = NULL;
+ mLastSurface = VA_INVALID_SURFACE;
updateProperities();
- mCurFrame = NULL;
+ mCurSurface = VA_INVALID_SURFACE;
if (mLastInputRawBuffer) mLastInputRawBuffer->bufAvailable = true;
@@ -511,7 +422,7 @@
Encode_Status ret = ENCODE_SUCCESS;
VAStatus vaStatus = VA_STATUS_SUCCESS;
uint8_t *buf = NULL;
- VideoEncSurfaceBuffer *tmpFrame = NULL;
+ VASurfaceID tmpSurface = VA_INVALID_SURFACE;
inBuffer->bufAvailable = false;
if (mNewHeader) mFrameNum = 0;
@@ -522,7 +433,7 @@
ret = manageSrcSurface(inBuffer);
CHECK_ENCODE_STATUS_RETURN("manageSrcSurface");
- vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurFrame->surface);
+ vaStatus = vaBeginPicture(mVADisplay, mVAContext, mCurSurface);
CHECK_VA_STATUS_RETURN("vaBeginPicture");
ret = sendEncodeCommand();
@@ -531,8 +442,8 @@
vaStatus = vaEndPicture(mVADisplay, mVAContext);
CHECK_VA_STATUS_RETURN("vaEndPicture");
- LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurFrame->surface);
- vaStatus = vaSyncSurface(mVADisplay, mCurFrame->surface);
+ LOG_I ("vaSyncSurface ID = 0x%08x\n", mCurSurface);
+ vaStatus = vaSyncSurface(mVADisplay, mCurSurface);
if (vaStatus != VA_STATUS_SUCCESS) {
LOG_W( "Failed vaSyncSurface\n");
}
@@ -547,23 +458,27 @@
// Query the status of current surface
VASurfaceStatus vaSurfaceStatus;
- vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurFrame->surface, &vaSurfaceStatus);
+ vaStatus = vaQuerySurfaceStatus(mVADisplay, mCurSurface, &vaSurfaceStatus);
CHECK_VA_STATUS_RETURN("vaQuerySurfaceStatus");
mPicSkipped = vaSurfaceStatus & VASurfaceSkipped;
- VideoEncoderBase::appendVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame);
- mCurFrame = NULL;
+ mCurSurface = 0;
mEncodedFrames ++;
mFrameNum ++;
if (!mPicSkipped) {
- tmpFrame = mRecFrame;
- mRecFrame = mRefFrame;
- mRefFrame = tmpFrame;
+ tmpSurface = mRecSurface;
+ mRecSurface = mRefSurface;
+ mRefSurface = tmpSurface;
}
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ if (mPicSkipped)
+ mVideoStat.skipped_frames ++;
+#endif
+
inBuffer->bufAvailable = true;
return ENCODE_SUCCESS;
}
@@ -651,24 +566,6 @@
LOG_V( "Begin\n");
- // put reconstructed surface back to list
- if (mRecFrame != NULL) {
- appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
- mRecFrame = NULL;
- }
-
- // put reference surface back to list
- if (mRefFrame != NULL) {
- appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
- mRefFrame = NULL;
- }
-
- // Here this raw buffer means the surface being encoding
- if (mLastInputRawBuffer) {
- mLastInputRawBuffer->bufAvailable = true;
- mLastInputRawBuffer = NULL;
- }
-
// reset the properities
mEncodedFrames = 0;
mFrameNum = 0;
@@ -682,37 +579,15 @@
VAStatus vaStatus = VA_STATUS_SUCCESS;
Encode_Status ret = ENCODE_SUCCESS;
- VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
- VideoEncSurfaceBuffer *tmpBuffer = NULL;
-
+ SurfaceMap *map = NULL;
LOG_V( "Begin\n");
- if (mSharedSurfaces) {
- delete [] mSharedSurfaces;
- mSharedSurfaces = NULL;
- }
-
if (mSurfaces) {
delete [] mSurfaces;
mSurfaces = NULL;
}
- if (mUsrPtr) {
- delete [] mUsrPtr;
- mUsrPtr = NULL;
- }
-
- if (mUpstreamBufferList) {
- delete [] mUpstreamBufferList;
- mUpstreamBufferList = NULL;
- }
-
- if (mBufAttrib) {
- delete mBufAttrib;
- mBufAttrib = NULL;
- }
-
// It is possible that above pointers have been allocated
// before we set mInitialized to true
if (!mInitialized) {
@@ -720,27 +595,6 @@
return ENCODE_SUCCESS;
}
- LOG_V( "Release frames\n");
-
- // put reconstructed surface back to list
- if (mRecFrame != NULL) {
- appendVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
- mRecFrame = NULL;
- }
-
- // put reference surface back to list
- if (mRefFrame != NULL) {
- appendVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
- mRefFrame = NULL;
- }
-
- // put Source surface back to list
- if (mLastFrame != NULL) {
- appendVideoSurfaceBuffer(mVideoSrcBufferList, mLastFrame);
- mLastFrame = NULL;
- }
-
- LOG_V( "Release surfaces\n");
LOG_V( "vaDestroyContext\n");
vaStatus = vaDestroyContext(mVADisplay, mVAContext);
CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyContext");
@@ -749,19 +603,28 @@
vaStatus = vaDestroyConfig(mVADisplay, mVAConfig);
CHECK_VA_STATUS_GOTO_CLEANUP("vaDestroyConfig");
- // Release Src Surface Buffer List
- LOG_V( "Rlease Src Surface Buffer \n");
+ // Release Src Surface Buffer Map
+ LOG_V( "Rlease Src Surface Map\n");
- videoSurfaceBuffer = mVideoSrcBufferList;
-
- while (videoSurfaceBuffer != NULL) {
- tmpBuffer = videoSurfaceBuffer;
- videoSurfaceBuffer = videoSurfaceBuffer->next;
- delete tmpBuffer;
+ map = mSrcSurfaceMapList;
+ while(map) {
+ if (! map->added) {
+ //destroy surface by itself
+ LOG_V( "Rlease Src Surface Buffer not added into vaContext\n");
+ vaDestroySurfaces(mVADisplay, &map->surface, 1);
+ }
+ SurfaceMap *tmp = map;
+ map = map->next;
+ delete tmp;
}
CLEAN_UP:
mInitialized = false;
+
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ memset(&mVideoStat, 0, sizeof(VideoStatistics));
+ mVideoStat.min_encode_time = 0xFFFFFFFF;
+#endif
LOG_V( "end\n");
return ret;
}
@@ -948,6 +811,8 @@
mHrdParam.bufferSize = 0;
mHrdParam.initBufferFullness = 0;
+
+ mStoreMetaDataInBuffers.isEnabled = false;
}
Encode_Status VideoEncoderBase::setParameters(
@@ -1010,6 +875,19 @@
break;
}
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ mStoreMetaDataInBuffers.isEnabled = metadata->isEnabled;
+
+ break;
+ }
+
case VideoParamsTypeAVC:
case VideoParamsTypeH263:
case VideoParamsTypeMP4:
@@ -1020,7 +898,7 @@
default: {
LOG_E ("Wrong ParamType here\n");
- break;
+ return ENCODE_INVALID_PARAMS;
}
}
return ret;
@@ -1083,6 +961,19 @@
break;
}
+ case VideoParamsTypeStoreMetaDataInBuffers: {
+ VideoParamsStoreMetaDataInBuffers *metadata =
+ reinterpret_cast <VideoParamsStoreMetaDataInBuffers *> (videoEncParams);
+
+ if (metadata->size != sizeof (VideoParamsStoreMetaDataInBuffers)) {
+ return ENCODE_INVALID_PARAMS;
+ }
+
+ metadata->isEnabled = mStoreMetaDataInBuffers.isEnabled;
+
+ break;
+ }
+
case VideoParamsTypeAVC:
case VideoParamsTypeH263:
case VideoParamsTypeMP4:
@@ -1303,7 +1194,7 @@
void VideoEncoderBase:: updateProperities () {
- VideoEncSurfaceBuffer *tmpFrame = NULL;
+ VASurfaceID tmp = VA_INVALID_SURFACE;
LOG_V( "Begin\n");
mEncodedFrames ++;
@@ -1312,12 +1203,12 @@
mCodedBufIndex ++;
mCodedBufIndex %=2;
- mLastFrame = mCurFrame;
+ mLastSurface = mCurSurface;
if (!mPicSkipped) {
- tmpFrame = mRecFrame;
- mRecFrame = mRefFrame;
- mRefFrame = tmpFrame;
+ tmp = mRecSurface;
+ mRecSurface = mRefSurface;
+ mRefSurface = tmp;
}
LOG_V( "End\n");
@@ -1361,6 +1252,25 @@
return ENCODE_SUCCESS;
}
+Encode_Status VideoEncoderBase::getStatistics (VideoStatistics *videoStat) {
+
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ if (videoStat != NULL) {
+ videoStat->total_frames = mEncodedFrames;
+ videoStat->skipped_frames = mVideoStat.skipped_frames;
+ videoStat->average_encode_time = mVideoStat.average_encode_time / mEncodedFrames;
+ videoStat->max_encode_time = mVideoStat.max_encode_time;
+ videoStat->max_encode_frame = mVideoStat.max_encode_frame;
+ videoStat->min_encode_time = mVideoStat.min_encode_time;
+ videoStat->min_encode_frame = mVideoStat.min_encode_frame;
+ }
+
+ return ENCODE_SUCCESS;
+#else
+ return ENCODE_NOT_SUPPORTED;
+#endif
+}
+
Encode_Status VideoEncoderBase::getNewUsrptrFromSurface(
uint32_t width, uint32_t height, uint32_t format,
uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr) {
@@ -1372,7 +1282,7 @@
VAImage image;
uint32_t index = 0;
- VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
+ SurfaceMap *map = NULL;
LOG_V( "Begin\n");
@@ -1390,24 +1300,24 @@
// Current only NV12 is supported in VA API
// Through format we can get known the number of planes
if (format != STRING_TO_FOURCC("NV12")) {
-
LOG_W ("Format is not supported\n");
return ENCODE_NOT_SUPPORTED;
}
- VASurfaceAttributeTPI *attribute_tpi = new VASurfaceAttributeTPI;
- attribute_tpi->size = expectedSize;
- attribute_tpi->luma_stride = width;
- attribute_tpi->chroma_u_stride = width;
- attribute_tpi->chroma_v_stride = width;
- attribute_tpi->luma_offset = 0;
- attribute_tpi->chroma_u_offset = width*height;
- attribute_tpi->chroma_v_offset = width*height;
- attribute_tpi->pixel_format = VA_FOURCC_NV12;
- attribute_tpi->type = VAExternalMemoryNULL;
+ VASurfaceAttributeTPI attribute_tpi;
+
+ attribute_tpi.size = expectedSize;
+ attribute_tpi.luma_stride = width;
+ attribute_tpi.chroma_u_stride = width;
+ attribute_tpi.chroma_v_stride = width;
+ attribute_tpi.luma_offset = 0;
+ attribute_tpi.chroma_u_offset = width*height;
+ attribute_tpi.chroma_v_offset = width*height;
+ attribute_tpi.pixel_format = VA_FOURCC_NV12;
+ attribute_tpi.type = VAExternalMemoryNULL;
vaCreateSurfacesWithAttribute(mVADisplay, width, height, VA_RT_FORMAT_YUV420,
- 1, &surface, attribute_tpi);
+ 1, &surface, &attribute_tpi);
CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
vaStatus = vaDeriveImage(mVADisplay, surface, &image);
@@ -1428,19 +1338,28 @@
*outsize = image.data_size;
*stride = image.pitches[0];
- videoSurfaceBuffer = new VideoEncSurfaceBuffer;
- if (videoSurfaceBuffer == NULL) {
- LOG_E( "new VideoEncSurfaceBuffer failed\n");
+ map = new SurfaceMap;
+ if (map == NULL) {
+ LOG_E( "new SurfaceMap failed\n");
return ENCODE_NO_MEMORY;
}
- videoSurfaceBuffer->surface = surface;
- videoSurfaceBuffer->usrptr = *usrptr;
- videoSurfaceBuffer->index = mReqSurfacesCnt;
- videoSurfaceBuffer->bufAvailable = true;
- videoSurfaceBuffer->next = NULL;
+ map->surface = surface;
+ map->type = MetadataBufferTypeEncoder;
+ map->value = (int32_t)*usrptr;
+ map->vinfo.mode = (MemMode)MEM_MODE_USRPTR;
+ map->vinfo.handle = 0;
+ map->vinfo.size = 0;
+ map->vinfo.width = width;
+ map->vinfo.height = height;
+ map->vinfo.lumaStride = width;
+ map->vinfo.chromStride = width;
+ map->vinfo.format = VA_FOURCC_NV12;
+ map->vinfo.s3dformat = 0xffffffff;
+ map->added = false;
+ map->next = NULL;
- mVideoSrcBufferList = appendVideoSurfaceBuffer(mVideoSrcBufferList, videoSurfaceBuffer);
+ mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
LOG_I( "surface = 0x%08x\n",(uint32_t)surface);
LOG_I("image->pitches[0] = %d\n", image.pitches[0]);
@@ -1453,10 +1372,7 @@
LOG_I ("data_size = %d\n", image.data_size);
LOG_I ("usrptr = 0x%p\n", *usrptr);
- LOG_I ("mReqSurfacesCnt = %d\n", mReqSurfacesCnt);
- LOG_I ("videoSurfaceBuffer->usrptr = 0x%p\n ", videoSurfaceBuffer->usrptr);
-
- videoSurfaceBuffer = NULL;
+ LOG_I ("map->value = 0x%p\n ", (void *)map->value);
vaStatus = vaUnmapBuffer(mVADisplay, image.buf);
CHECK_VA_STATUS_RETURN("vaUnmapBuffer");
@@ -1473,12 +1389,7 @@
return ENCODE_FAIL;
}
- mReqSurfacesCnt ++;
ret = ENCODE_SUCCESS;
- if(attribute_tpi) {
- delete attribute_tpi;
- attribute_tpi = NULL;
- }
return ret;
}
@@ -1486,68 +1397,63 @@
Encode_Status VideoEncoderBase::setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer) {
+ Encode_Status status = ENCODE_SUCCESS;
+
CHECK_NULL_RETURN_IFFAIL(upStreamBuffer);
if (upStreamBuffer->bufCnt == 0) {
LOG_E("bufCnt == 0\n");
return ENCODE_FAIL;
}
- if (mUpstreamBufferList) delete [] mUpstreamBufferList;
- if (mBufAttrib) delete mBufAttrib;
-
- mUpstreamBufferCnt = upStreamBuffer->bufCnt;
- mVADecoderDisplay = upStreamBuffer->display;
- mBufferMode = upStreamBuffer->bufferMode;
- mBufAttrib = new ExternalBufferAttrib;
- if (!mBufAttrib) {
- LOG_E ("mBufAttrib NULL\n");
- return ENCODE_NO_MEMORY;
- }
-
- if (upStreamBuffer->bufAttrib) {
- memcpy(mBufAttrib, upStreamBuffer->bufAttrib, sizeof(ExternalBufferAttrib));
- } else {
+ if (upStreamBuffer->bufAttrib == NULL) {
LOG_E ("Buffer Attrib doesn't set by client, return error");
return ENCODE_INVALID_PARAMS;
}
- mUpstreamBufferList = new uint32_t [upStreamBuffer->bufCnt];
- if (!mUpstreamBufferList) {
- LOG_E ("mUpstreamBufferList NULL\n");
- return ENCODE_NO_MEMORY;
+ for(unsigned int i=0; i < upStreamBuffer->bufCnt; i++) {
+ if (findSurfaceMapByValue(mSrcSurfaceMapList, upStreamBuffer->bufList[i]) != NULL) //already mapped
+ continue;
+
+ //wrap upstream buffer into vaSurface
+ SurfaceMap *map = new SurfaceMap;
+
+ map->type = MetadataBufferTypeUser;
+ map->value = upStreamBuffer->bufList[i];
+ map->vinfo.mode = (MemMode)upStreamBuffer->bufferMode;
+ map->vinfo.handle = (uint32_t)upStreamBuffer->display;
+ map->vinfo.size = 0;
+ map->vinfo.width = upStreamBuffer->bufAttrib->realWidth;
+ map->vinfo.height = upStreamBuffer->bufAttrib->realHeight;
+ map->vinfo.lumaStride = upStreamBuffer->bufAttrib->lumaStride;
+ map->vinfo.chromStride = upStreamBuffer->bufAttrib->chromStride;
+ map->vinfo.format = upStreamBuffer->bufAttrib->format;
+ map->vinfo.s3dformat = 0xFFFFFFFF;
+ map->added = false;
+ map->next = NULL;
+ status = surfaceMapping(map);
+
+ if (status == ENCODE_SUCCESS)
+ mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+ else
+ delete map;
+
+ if (mSrcSurfaceMapList == NULL) {
+ LOG_E ("mSrcSurfaceMapList should not be NULL now, maybe meet mapping error\n");
+ return ENCODE_NO_MEMORY;
+ }
}
- memcpy(mUpstreamBufferList, upStreamBuffer->bufList, upStreamBuffer->bufCnt * sizeof (uint32_t));
- return ENCODE_SUCCESS;
+ return status;
}
+Encode_Status VideoEncoderBase::surfaceMappingForSurface(SurfaceMap *map) {
-Encode_Status VideoEncoderBase::generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr) {
+ if (!map)
+ return ENCODE_NULL_PTR;
- VideoEncSurfaceBuffer *videoSurfaceBuffer = NULL;
- videoSurfaceBuffer = new VideoEncSurfaceBuffer;
- if (videoSurfaceBuffer == NULL) {
- LOG_E( "new VideoEncSurfaceBuffer failed\n");
- return ENCODE_NO_MEMORY;
- }
-
- videoSurfaceBuffer->surface = mSharedSurfaces[index];
- videoSurfaceBuffer->usrptr = NULL;
- videoSurfaceBuffer->index = index;
- videoSurfaceBuffer->bufAvailable = true;
- videoSurfaceBuffer->next = NULL;
-
- mVideoSrcBufferList = appendVideoSurfaceBuffer
- (mVideoSrcBufferList, videoSurfaceBuffer);
- videoSurfaceBuffer = NULL;
-
- return ENCODE_SUCCESS;
-}
-
-Encode_Status VideoEncoderBase::surfaceMappingForSurfaceList() {
- uint32_t index;
VAStatus vaStatus = VA_STATUS_SUCCESS;
Encode_Status ret = ENCODE_SUCCESS;
+ VASurfaceID surface;
uint32_t fourCC = 0;
uint32_t lumaStride = 0;
@@ -1558,442 +1464,443 @@
uint32_t chromaVOffset = 0;
uint32_t kBufHandle = 0;
- VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI;
- if (vaSurfaceAttrib == NULL) {
- LOG_E("Failed to allocate VASurfaceAttrib\n");
- return ENCODE_NO_MEMORY;
- }
- vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt];
+ VASurfaceAttributeTPI vaSurfaceAttrib;
+ uint32_t buf;
- for (index = 0; index < mSharedSurfacesCnt; index++) {
+ vaSurfaceAttrib.buffers = &buf;
- vaStatus = vaLockSurface(
- mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index],
- &fourCC, &lumaStride, &chromaUStride, &chromaVStride,
- &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL);
+ vaStatus = vaLockSurface(
+ (VADisplay)map->vinfo.handle, (VASurfaceID)map->value,
+ &fourCC, &lumaStride, &chromaUStride, &chromaVStride,
+ &lumaOffset, &chromaUOffset, &chromaVOffset, &kBufHandle, NULL);
- CHECK_VA_STATUS_RETURN("vaLockSurface");
- LOG_I("Surface incoming = 0x%08x", mUpstreamBufferList[index]);
- LOG_I("lumaStride = %d", lumaStride);
- LOG_I("chromaUStride = %d", chromaUStride);
- LOG_I("chromaVStride = %d", chromaVStride);
- LOG_I("lumaOffset = %d", lumaOffset);
- LOG_I("chromaUOffset = %d", chromaUOffset);
- LOG_I("chromaVOffset = %d", chromaVOffset);
- LOG_I("kBufHandle = 0x%08x", kBufHandle);
- LOG_I("fourCC = %d", fourCC);
+ CHECK_VA_STATUS_RETURN("vaLockSurface");
+ LOG_I("Surface incoming = 0x%08x", map->value);
+ LOG_I("lumaStride = %d", lumaStride);
+ LOG_I("chromaUStride = %d", chromaUStride);
+ LOG_I("chromaVStride = %d", chromaVStride);
+ LOG_I("lumaOffset = %d", lumaOffset);
+ LOG_I("chromaUOffset = %d", chromaUOffset);
+ LOG_I("chromaVOffset = %d", chromaVOffset);
+ LOG_I("kBufHandle = 0x%08x", kBufHandle);
+ LOG_I("fourCC = %d", fourCC);
- vaStatus = vaUnlockSurface(mVADecoderDisplay, (VASurfaceID)mUpstreamBufferList[index]);
- CHECK_VA_STATUS_RETURN("vaUnlockSurface");
+ vaStatus = vaUnlockSurface((VADisplay)map->vinfo.handle, (VASurfaceID)map->value);
+ CHECK_VA_STATUS_RETURN("vaUnlockSurface");
- vaSurfaceAttrib->size = mComParams.resolution.width*mComParams.resolution.height*1.5;
- vaSurfaceAttrib->luma_stride = lumaStride;
- vaSurfaceAttrib->chroma_u_stride = chromaUStride;
- vaSurfaceAttrib->chroma_v_stride = chromaVStride;
- vaSurfaceAttrib->luma_offset = lumaOffset;
- vaSurfaceAttrib->chroma_u_offset = chromaUOffset;
- vaSurfaceAttrib->chroma_v_offset = chromaVOffset;
- vaSurfaceAttrib->buffers[0] = kBufHandle;
- vaSurfaceAttrib->pixel_format = fourCC;
- vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer;
+ vaSurfaceAttrib.count = 1;
+ vaSurfaceAttrib.size = mComParams.resolution.width * mComParams.resolution.height * 3 /2;
+ vaSurfaceAttrib.luma_stride = lumaStride;
+ vaSurfaceAttrib.chroma_u_stride = chromaUStride;
+ vaSurfaceAttrib.chroma_v_stride = chromaVStride;
+ vaSurfaceAttrib.luma_offset = lumaOffset;
+ vaSurfaceAttrib.chroma_u_offset = chromaUOffset;
+ vaSurfaceAttrib.chroma_v_offset = chromaVOffset;
+ vaSurfaceAttrib.buffers[0] = kBufHandle;
+ vaSurfaceAttrib.pixel_format = fourCC;
+ vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer;
- vaStatus = vaCreateSurfacesWithAttribute(
- mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420,
- 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib);
+ vaStatus = vaCreateSurfacesWithAttribute(
+ mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420,
+ 1, &surface, &vaSurfaceAttrib);
- CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf");
+ CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf");
- LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]);
+ LOG_I("Surface ID created from Kbuf = 0x%08x", surface);
- mSurfaces[index] = mSharedSurfaces[index];
- ret = generateVideoBufferAndAttachToList(index, NULL);
- CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList");
- }
+ map->surface = surface;
- if(vaSurfaceAttrib) {
- if(vaSurfaceAttrib->buffers) {
- delete [] vaSurfaceAttrib->buffers;
- vaSurfaceAttrib->buffers= NULL;
- }
- delete vaSurfaceAttrib;
- vaSurfaceAttrib = NULL;
- }
return ret;
}
-Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle() {
+Encode_Status VideoEncoderBase::surfaceMappingForGfxHandle(SurfaceMap *map) {
- uint32_t index;
+ if (!map)
+ return ENCODE_NULL_PTR;
+
VAStatus vaStatus = VA_STATUS_SUCCESS;
Encode_Status ret = ENCODE_SUCCESS;
+ VASurfaceID surface;
- VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI;
- if (vaSurfaceAttrib == NULL) {
- LOG_E("Failed to allocate VASurfaceAttrib\n");
- return ENCODE_NO_MEMORY;
- }
+ VASurfaceAttributeTPI vaSurfaceAttrib;
+ uint32_t buf;
- vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt];
- if (vaSurfaceAttrib->buffers == NULL) {
- LOG_E("Failed to allocate buffers for vaSurfaceAttrib\n");
- return ENCODE_NO_MEMORY;
- }
+ vaSurfaceAttrib.buffers = &buf;
- LOG_I("mSharedSurfacesCnt = %d\n", mSharedSurfacesCnt);
- LOG_I("lumaStride = %d\n", mBufAttrib->lumaStride);
- LOG_I("format = 0x%08x\n", mBufAttrib->format);
+ LOG_I("surfaceMappingForGfxHandle ......\n");
+ LOG_I("lumaStride = %d\n", map->vinfo.lumaStride);
+ LOG_I("format = 0x%08x\n", map->vinfo.format);
LOG_I("width = %d\n", mComParams.resolution.width);
LOG_I("height = %d\n", mComParams.resolution.height);
+ LOG_I("gfxhandle = %d\n", map->value);
- vaSurfaceAttrib->count = mSharedSurfacesCnt;
- vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride;
- vaSurfaceAttrib->pixel_format = mBufAttrib->format;
- vaSurfaceAttrib->width = mComParams.resolution.width;
- vaSurfaceAttrib->height = mComParams.resolution.height;
- vaSurfaceAttrib->type = VAExternalMemoryAndroidGrallocBuffer;
- for(index = 0; index < mSharedSurfacesCnt; index++) {
- vaSurfaceAttrib->buffers[index] = (uint32_t) mUpstreamBufferList[index];
- LOG_I("NativeHandleList[%d] = 0x%08x", index, mUpstreamBufferList[index]);
- }
+ vaSurfaceAttrib.count = 1;
+ vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride;
+ vaSurfaceAttrib.pixel_format = map->vinfo.format;
+ vaSurfaceAttrib.width = mComParams.resolution.width;
+ vaSurfaceAttrib.height = mComParams.resolution.height;
+ vaSurfaceAttrib.type = VAExternalMemoryAndroidGrallocBuffer;
+ vaSurfaceAttrib.buffers[0] = (uint32_t) map->value;
vaStatus = vaCreateSurfacesWithAttribute(
mVADisplay,
mComParams.resolution.width,
mComParams.resolution.height,
VA_RT_FORMAT_YUV420,
- mSharedSurfacesCnt,
- mSharedSurfaces,
- vaSurfaceAttrib);
+ 1,
+ &surface,
+ &vaSurfaceAttrib);
CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
LOG_V("Successfully create surfaces from native hanle");
- for(index = 0; index < mSharedSurfacesCnt; index++) {
- mSurfaces[index] = mSharedSurfaces[index];
- ret = generateVideoBufferAndAttachToList(index, NULL);
- LOG_I("mSurfaces[%d] = %08x", index, mSurfaces[index]);
- CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList");
- }
-
- if(vaSurfaceAttrib) {
- if(vaSurfaceAttrib->buffers) {
- delete [] vaSurfaceAttrib->buffers;
- vaSurfaceAttrib->buffers= NULL;
- }
- delete vaSurfaceAttrib;
- vaSurfaceAttrib = NULL;
- }
+ map->surface = surface;
LOG_V("surfaceMappingForGfxHandle: Done");
return ret;
}
-Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle() {
+Encode_Status VideoEncoderBase::surfaceMappingForKbufHandle(SurfaceMap *map) {
- uint32_t index;
+ if (!map)
+ return ENCODE_NULL_PTR;
+
+ LOG_I("surfaceMappingForKbufHandle value=%d\n", map->value);
VAStatus vaStatus = VA_STATUS_SUCCESS;
Encode_Status ret = ENCODE_SUCCESS;
+ VASurfaceID surface;
uint32_t lumaOffset = 0;
- uint32_t chromaUOffset = mBufAttrib->realHeight * mBufAttrib->lumaStride;
+ uint32_t chromaUOffset = map->vinfo.height * map->vinfo.lumaStride;
uint32_t chromaVOffset = chromaUOffset + 1;
- VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI;
- if (vaSurfaceAttrib == NULL) {
- LOG_E("Failed to allocate VASurfaceAttrib\n");
- return ENCODE_NO_MEMORY;
- }
- vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt];
+ VASurfaceAttributeTPI vaSurfaceAttrib;
+ uint32_t buf;
+
+ vaSurfaceAttrib.buffers = &buf;
- for (index = 0; index < mSharedSurfacesCnt; index++) {
- vaSurfaceAttrib->size = mBufAttrib->lumaStride * mComParams.resolution.height * 3 / 2;
- vaSurfaceAttrib->luma_stride = mBufAttrib->lumaStride;
- vaSurfaceAttrib->chroma_u_stride = mBufAttrib->chromStride;
- vaSurfaceAttrib->chroma_v_stride = mBufAttrib->chromStride;
- vaSurfaceAttrib->luma_offset = lumaOffset;
- vaSurfaceAttrib->chroma_u_offset = chromaUOffset;
- vaSurfaceAttrib->chroma_v_offset = chromaVOffset;
- vaSurfaceAttrib->buffers[0] = mUpstreamBufferList[index];
- vaSurfaceAttrib->pixel_format = mBufAttrib->format;
- vaSurfaceAttrib->type = VAExternalMemoryKernelDRMBufffer;
+ vaSurfaceAttrib.count = 1;
+ vaSurfaceAttrib.size = map->vinfo.lumaStride * mComParams.resolution.height * 3 / 2;
+ vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride;
+ vaSurfaceAttrib.chroma_u_stride = map->vinfo.chromStride;
+ vaSurfaceAttrib.chroma_v_stride = map->vinfo.chromStride;
+ vaSurfaceAttrib.luma_offset = lumaOffset;
+ vaSurfaceAttrib.chroma_u_offset = chromaUOffset;
+ vaSurfaceAttrib.chroma_v_offset = chromaVOffset;
+ vaSurfaceAttrib.buffers[0] = map->value;
+ vaSurfaceAttrib.pixel_format = map->vinfo.format;
+ vaSurfaceAttrib.type = VAExternalMemoryKernelDRMBufffer;
- vaStatus = vaCreateSurfacesWithAttribute(
- mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420,
- 1 /*mSharedSurfacesCnt*/, &mSharedSurfaces[index], vaSurfaceAttrib);
+ vaStatus = vaCreateSurfacesWithAttribute(
+ mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420,
+ 1, &surface, &vaSurfaceAttrib);
- CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf");
+ CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromKbuf");
- LOG_I("Surface ID created from Kbuf = 0x%08x", mSharedSurfaces[index]);
+ LOG_I("Surface ID created from Kbuf = 0x%08x", map->value);
- mSurfaces[index] = mSharedSurfaces[index];
- ret = generateVideoBufferAndAttachToList(index, NULL);
- CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList");
- }
+ map->surface = surface;
- if(vaSurfaceAttrib) {
- if(vaSurfaceAttrib->buffers) {
- delete [] vaSurfaceAttrib->buffers;
- vaSurfaceAttrib->buffers= NULL;
- }
- delete vaSurfaceAttrib;
- vaSurfaceAttrib = NULL;
- }
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::surfaceMappingForCI(SurfaceMap *map) {
+
+ if (!map)
+ return ENCODE_NULL_PTR;
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Encode_Status ret = ENCODE_SUCCESS;
+ VASurfaceID surface;
+
+ VASurfaceAttributeTPI vaSurfaceAttrib;
+ uint32_t buf;
+
+ vaSurfaceAttrib.buffers = &buf;
+
+ vaSurfaceAttrib.count = 1;
+ vaSurfaceAttrib.type = VAExternalMemoryCIFrame;
+ vaSurfaceAttrib.buffers[0] = (uint32_t)map->value;
+ vaStatus = vaCreateSurfacesWithAttribute(
+ mVADisplay,
+ mComParams.resolution.width,
+ mComParams.resolution.height,
+ VA_RT_FORMAT_YUV420,
+ 1,
+ &surface,
+ &vaSurfaceAttrib);
+ CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
+
+ map->surface = surface;
+
+ return ret;
+}
+
+Encode_Status VideoEncoderBase::surfaceMappingForMalloc(SurfaceMap *map) {
+
+ if (!map)
+ return ENCODE_NULL_PTR;
+
+ VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Encode_Status ret = ENCODE_SUCCESS;
+ VASurfaceID surface;
+
+ VASurfaceAttributeTPI vaSurfaceAttrib;
+ uint32_t buf;
+
+ vaSurfaceAttrib.buffers = &buf;
+
+ vaSurfaceAttrib.count = 1;
+ vaSurfaceAttrib.width = map->vinfo.width;
+ vaSurfaceAttrib.height = map->vinfo.height;
+ vaSurfaceAttrib.luma_stride = map->vinfo.lumaStride;
+ vaSurfaceAttrib.buffers[0] = map->value;
+ vaSurfaceAttrib.pixel_format = map->vinfo.format;
+ vaSurfaceAttrib.type = VAExternalMemoryUserPointer;
+
+ vaStatus = vaCreateSurfacesWithAttribute(
+ mVADisplay, mComParams.resolution.width, mComParams.resolution.height, VA_RT_FORMAT_YUV420,
+ 1, &surface, &vaSurfaceAttrib);
+
+ CHECK_VA_STATUS_RETURN("vaCreateSurfaceFromMalloc");
+
+ LOG_I("Surface ID created from Malloc = 0x%08x", map->value);
+
+ map->surface = surface;
return ret;
}
-Encode_Status VideoEncoderBase::surfaceMappingForCIFrameList() {
- uint32_t index;
- VAStatus vaStatus = VA_STATUS_SUCCESS;
- Encode_Status ret = ENCODE_SUCCESS;
- VASurfaceAttributeTPI * vaSurfaceAttrib = new VASurfaceAttributeTPI;
- if (vaSurfaceAttrib == NULL) {
- LOG_E("Failed to allocate VASurfaceAttrib\n");
- return ENCODE_NO_MEMORY;
- }
- vaSurfaceAttrib->type = VAExternalMemoryCIFrame;
- vaSurfaceAttrib->buffers = new uint32_t[mSharedSurfacesCnt];
-
- for (index = 0; index < mSharedSurfacesCnt; index++) {
- vaSurfaceAttrib->buffers[0] = (uint32_t)mUpstreamBufferCnt;
- vaStatus = vaCreateSurfacesWithAttribute(
- mVADisplay,
- mComParams.resolution.width,
- mComParams.resolution.height,
- VA_RT_FORMAT_YUV420,
- 1 /*mSharedSurfacesCnt*/,
- &mSharedSurfaces[index],
- vaSurfaceAttrib);
- CHECK_VA_STATUS_RETURN("vaCreateSurfacesWithAttribute");
- mSurfaces[index] = mSharedSurfaces[index];
+Encode_Status VideoEncoderBase::surfaceMapping(SurfaceMap *map) {
- ret = generateVideoBufferAndAttachToList(index, NULL);
- CHECK_ENCODE_STATUS_RETURN("generateVideoBufferAndAttachToList")
+ if (!map)
+ return ENCODE_NULL_PTR;
+
+ Encode_Status status;
+
+LOG_I("surfaceMapping mode=%d, format=%d, lumaStride=%d, width=%d, heith=%d, value=%x\n", map->vinfo.mode, map->vinfo.format, map->vinfo.lumaStride, map->vinfo.width, map->vinfo.height, map->value);
+ switch (map->vinfo.mode) {
+ case MEM_MODE_CI:
+ status = surfaceMappingForCI(map);
+ break;
+ case MEM_MODE_SURFACE:
+ status = surfaceMappingForSurface(map);
+ break;
+ case MEM_MODE_GFXHANDLE:
+ status = surfaceMappingForGfxHandle(map);
+ break;
+ case MEM_MODE_KBUFHANDLE:
+ status = surfaceMappingForKbufHandle(map);
+ break;
+ case MEM_MODE_MALLOC:
+ status = surfaceMappingForMalloc(map);
+ break;
+ case MEM_MODE_ION:
+ case MEM_MODE_V4L2:
+ case MEM_MODE_USRPTR:
+ default:
+ status = ENCODE_NOT_SUPPORTED;
+ break;
}
-
- if(vaSurfaceAttrib) {
- if(vaSurfaceAttrib->buffers) {
- delete [] vaSurfaceAttrib->buffers;
- vaSurfaceAttrib->buffers= NULL;
- }
- delete vaSurfaceAttrib;
- vaSurfaceAttrib = NULL;
- }
- return ret;
+
+ return status;
}
Encode_Status VideoEncoderBase::manageSrcSurface(VideoEncRawBuffer *inBuffer) {
- Encode_Status ret = ENCODE_SUCCESS;
- VAStatus vaStatus = VA_STATUS_SUCCESS;
+ Encode_Status ret = ENCODE_SUCCESS;
+ MetadataBufferType type;
+ int32_t value;
+ ValueInfo vinfo;
+ ValueInfo *pvinfo = &vinfo;
+ int32_t *extravalues = NULL;
+ unsigned int extravalues_count = 0;
- uint32_t idx = 0;
- uint32_t bufIndex = 0;
- uint32_t data = 0;
-
- if (mBufferMode == BUFFER_SHARING_CI) {
-
- memcpy(&bufIndex, inBuffer->data, sizeof(unsigned int));
- // bufIndex = *(uint32_t*)inBuffer->data;
-
- LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt);
- LOG_I("bufIndex = %d\n", bufIndex);
-
- if (bufIndex > mSurfaceCnt - 2) {
- LOG_E("the CI frame idx is bigger than total CI frame count\n");
- ret = ENCODE_FAIL;
- return ret;
-
+ IntelMetadataBuffer *imb = new IntelMetadataBuffer;
+ SurfaceMap *map = NULL;
+
+ if (mStoreMetaDataInBuffers.isEnabled) {
+ //metadatabuffer mode
+ LOG_I("in metadata mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (imb->SetBytes(inBuffer->data, inBuffer->size) != IMB_SUCCESS) {
+ //fail to parse buffer
+ delete imb;
+ return ENCODE_NO_REQUEST_DATA;
}
- } else if (mBufferMode == BUFFER_SHARING_SURFACE ||
- mBufferMode == BUFFER_SHARING_GFXHANDLE ||
- mBufferMode == BUFFER_SHARING_KBUFHANDLE) {
-
- bufIndex = (uint32_t) -1;
- data = *(uint32_t*)inBuffer->data;
-
- LOG_I("data = 0x%08x\n", data);
-
- for (idx = 0; idx < mSharedSurfacesCnt; idx++) {
-
- LOG_I("mUpstreamBufferList[%d] = 0x%08x\n", idx, mUpstreamBufferList[idx]);
- if (data == mUpstreamBufferList[idx])
- bufIndex = idx;
+ imb->GetType(type);
+ imb->GetValue(value);
+ } else {
+ //raw mode
+ LOG_I("in raw mode, data=%p, size=%d\n", inBuffer->data, inBuffer->size);
+ if (! inBuffer->data || inBuffer->size == 0) {
+ delete imb;
+ return ENCODE_NULL_PTR;
}
- LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt);
- LOG_I("bufIndex = %d\n", bufIndex);
+ type = MetadataBufferTypeUser;
+ value = (int32_t)inBuffer->data;
+ }
+
+ //find if mapped
+ map = findSurfaceMapByValue(mSrcSurfaceMapList, value);
- if (bufIndex > mSurfaceCnt - 2) {
- LOG_E("Can't find the surface in our list\n");
- ret = ENCODE_FAIL;
- return ret;
- }
- }else if (mBufferMode == BUFFER_SHARING_USRPTR) {
+ if (map) {
+ //has mapped, get surfaceID directly
+ LOG_I("direct find surface %d from value %x\n", map->surface, value);
+ mCurSurface = map->surface;
- bufIndex = (uint32_t) -1; //fixme, temp use a big value
-
- LOG_I("bufin->data = 0x%p\n", inBuffer->data);
-
- for (idx = 0; idx < mReqSurfacesCnt; idx++) {
- LOG_I("mUsrPtr[%d] = 0x%p\n", idx, mUsrPtr[idx]);
-
- if (inBuffer->data == mUsrPtr[idx])
- bufIndex = idx;
- }
-
- LOG_I("mSurfaceCnt = %d\n", mSurfaceCnt);
- LOG_I("bufIndex = %d\n", bufIndex);
-
- if (bufIndex > mSurfaceCnt - 2) {
- LOG_W("the Surface idx is too big, most likely the buffer passed in is not allocated by us\n");
- ret = ENCODE_FAIL;
- goto no_share_mode;
- }
+ delete imb;
+ return ret;
}
+ //if no found from list, then try to map value with parameters
+ LOG_I("not find surface from cache with value %x, start mapping if enough information\n", value);
- switch (mBufferMode) {
+ if (mStoreMetaDataInBuffers.isEnabled) {
+
+ //if type is MetadataBufferTypeGrallocSource, use default parameters
+ if (type == MetadataBufferTypeGrallocSource) {
+ vinfo.mode = MEM_MODE_GFXHANDLE;
+ vinfo.handle = 0;
+ vinfo.size = 0;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
+ } else {
+ //get all info mapping needs
+ imb->GetValueInfo(pvinfo);
+ imb->GetExtraValues(extravalues, extravalues_count);
+ }
+
+ } else {
- case BUFFER_SHARING_CI:
- case BUFFER_SHARING_SURFACE:
- case BUFFER_SHARING_GFXHANDLE:
- case BUFFER_SHARING_KBUFHANDLE:
- case BUFFER_SHARING_USRPTR: {
-
- if (mRefFrame== NULL) {
- mRefFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt -1 );
- if (mRefFrame == NULL) {
- LOG_E ("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
-
- }
-
- if (mRecFrame== NULL) {
- mRecFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, mSurfaceCnt - 2);
- if (mRecFrame == NULL) {
- LOG_E ("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
-
- }
-
- if (mCurFrame== NULL) {
- mCurFrame = getVideoSurfaceBufferByIndex(mVideoSrcBufferList, bufIndex);
- if (mCurFrame == NULL) {
- LOG_E ("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame);
- }
- }
-
- break;
- case BUFFER_SHARING_V4L2:
- LOG_E("Not Implemented\n");
- break;
-
- case BUFFER_SHARING_NONE: {
-no_share_mode:
-
- if (mRefFrame== NULL) {
- mRefFrame = mVideoSrcBufferList;
- if (mRefFrame == NULL) {
- LOG_E("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRefFrame);
-
- }
-
- if (mRecFrame== NULL) {
- mRecFrame = mVideoSrcBufferList;
- if (mRecFrame == NULL) {
- LOG_E ("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mRecFrame);
-
- }
-
- if (mCurFrame== NULL) {
- mCurFrame = mVideoSrcBufferList;
- if (mCurFrame == NULL) {
- LOG_E ("No Surface buffer available, something should be wrong\n");
- return ENCODE_FAIL;
- }
- mVideoSrcBufferList = removeVideoSurfaceBuffer(mVideoSrcBufferList, mCurFrame);
- }
-
- LOG_V( "Get Surface Done\n");
- ret = uploadDataToSurface (inBuffer);
- CHECK_ENCODE_STATUS_RETURN("uploadDataToSurface");
- }
- break;
- default:
- break;
-
+ //raw mode
+ vinfo.mode = MEM_MODE_MALLOC;
+ vinfo.handle = 0;
+ vinfo.size = inBuffer->size;
+ vinfo.width = mComParams.resolution.width;
+ vinfo.height = mComParams.resolution.height;
+ vinfo.lumaStride = mComParams.resolution.width;
+ vinfo.chromStride = mComParams.resolution.width;
+ vinfo.format = VA_FOURCC_NV12;
+ vinfo.s3dformat = 0xFFFFFFFF;
}
- return ENCODE_SUCCESS;
+ /* Start mapping, if pvinfo is not NULL, then have enough info to map;
+ * if extravalues is not NULL, then need to do more times mapping
+ */
+ if (pvinfo){
+ //map according info, and add to surfacemap list
+ map = new SurfaceMap;
+ map->type = type;
+ map->value = value;
+ memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo));
+ map->added = false;
+ map->next = NULL;
+
+ ret = surfaceMapping(map);
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping success, map value %x into surface %d\n", value, map->surface);
+ mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+ } else {
+ delete map;
+ delete imb;
+ LOG_E("surface mapping failed, wrong info or meet serious error\n");
+ return ret;
+ }
+
+ mCurSurface = map->surface;
+
+ } else {
+ //can't map due to no info
+ delete imb;
+ LOG_E("surface mapping failed, missing information\n");
+ return ENCODE_NO_REQUEST_DATA;
+ }
+
+ if (extravalues) {
+ //map more using same ValueInfo
+ for(unsigned int i=0; i<extravalues_count; i++) {
+ map = new SurfaceMap;
+ map->type = type;
+ map->value = extravalues[i];
+ memcpy(&(map->vinfo), pvinfo, sizeof(ValueInfo));
+ map->added = false;
+ map->next = NULL;
+
+ ret = surfaceMapping(map);
+ if (ret == ENCODE_SUCCESS) {
+ LOG_I("surface mapping extravalue success, map value %x into surface %d\n", extravalues[i], map->surface);
+ mSrcSurfaceMapList = appendSurfaceMap(mSrcSurfaceMapList, map);
+ } else {
+ delete map;
+ map = NULL;
+ LOG_E( "surface mapping extravalue failed, extravalue is %x\n", extravalues[i]);
+ }
+ }
+ }
+
+ delete imb;
+
+ return ret;
}
-VideoEncSurfaceBuffer *VideoEncoderBase::appendVideoSurfaceBuffer(
- VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) {
+SurfaceMap *VideoEncoderBase::appendSurfaceMap(
+ SurfaceMap *head, SurfaceMap *map) {
if (head == NULL) {
- return buffer;
+ return map;
}
- VideoEncSurfaceBuffer *node = head;
- VideoEncSurfaceBuffer *tail = NULL;
+ SurfaceMap *node = head;
+ SurfaceMap *tail = NULL;
while (node != NULL) {
tail = node;
node = node->next;
}
- tail->next = buffer;
+ tail->next = map;
return head;
}
-VideoEncSurfaceBuffer *VideoEncoderBase::removeVideoSurfaceBuffer(
- VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer) {
+SurfaceMap *VideoEncoderBase::removeSurfaceMap(
+ SurfaceMap *head, SurfaceMap *map) {
- VideoEncSurfaceBuffer *node = head;
- VideoEncSurfaceBuffer *tmpNode = NULL;
+ SurfaceMap *node = head;
+ SurfaceMap *tmpNode = NULL;
- if (head == buffer) {
+ if (head == map) {
tmpNode = head->next;
- buffer->next = NULL;
+ map->next = NULL;
return tmpNode;
}
while (node != NULL) {
- if (node->next == buffer)
+ if (node->next == map)
break;
node = node->next;
}
if (node != NULL) {
- node->next = buffer->next;
+ node->next = map->next;
}
- buffer->next = NULL;
+ map->next = NULL;
return head;
-
}
-VideoEncSurfaceBuffer *VideoEncoderBase::getVideoSurfaceBufferByIndex(
- VideoEncSurfaceBuffer *head, uint32_t index) {
- VideoEncSurfaceBuffer *node = head;
+SurfaceMap *VideoEncoderBase::findSurfaceMapByValue(
+ SurfaceMap *head, int32_t value) {
+
+ SurfaceMap *node = head;
while (node != NULL) {
- if (node->index == index)
+ if (node->value == value)
break;
node = node->next;
}
@@ -2001,6 +1908,7 @@
return node;
}
+#if 0
Encode_Status VideoEncoderBase::uploadDataToSurface(VideoEncRawBuffer *inBuffer) {
VAStatus vaStatus = VA_STATUS_SUCCESS;
@@ -2023,9 +1931,9 @@
uint32_t uvWidth = width;
LOG_V("map source data to surface\n");
- LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurFrame->surface);
+ LOG_I("Surface ID = 0x%08x\n", (uint32_t) mCurSurface);
- vaStatus = vaDeriveImage(mVADisplay, mCurFrame->surface, &srcImage);
+ vaStatus = vaDeriveImage(mVADisplay, mCurSurface, &srcImage);
CHECK_VA_STATUS_RETURN("vaDeriveImage");
LOG_V( "vaDeriveImage Done\n");
@@ -2092,6 +2000,7 @@
return ENCODE_SUCCESS;
}
+#endif
Encode_Status VideoEncoderBase::renderDynamicBitrate() {
VAStatus vaStatus = VA_STATUS_SUCCESS;
diff --git a/videoencoder/VideoEncoderBase.h b/videoencoder/VideoEncoderBase.h
index 7cc8f62..cf65085 100644
--- a/videoencoder/VideoEncoderBase.h
+++ b/videoencoder/VideoEncoderBase.h
@@ -13,6 +13,18 @@
#include <va/va_tpi.h>
#include "VideoEncoderDef.h"
#include "VideoEncoderInterface.h"
+#include "IntelMetadataBuffer.h"
+
+struct SurfaceMap {
+ VASurfaceID surface;
+ MetadataBufferType type;
+ int32_t value;
+ ValueInfo vinfo;
+ uint32_t index;
+ bool added;
+ SurfaceMap *next;
+};
+
class VideoEncoderBase : IVideoEncoder {
public:
@@ -38,6 +50,7 @@
virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig);
virtual Encode_Status getMaxOutSize(uint32_t *maxSize);
+ virtual Encode_Status getStatistics(VideoStatistics *videoStat);
protected:
virtual Encode_Status sendEncodeCommand(void) = 0;
@@ -59,23 +72,24 @@
Encode_Status setUpstreamBuffer(VideoParamsUpstreamBuffer *upStreamBuffer);
Encode_Status getNewUsrptrFromSurface(uint32_t width, uint32_t height, uint32_t format,
uint32_t expectedSize, uint32_t *outsize, uint32_t *stride, uint8_t **usrptr);
- Encode_Status generateVideoBufferAndAttachToList(uint32_t index, uint8_t *usrptr);
- Encode_Status surfaceMappingForSurfaceList();
- Encode_Status surfaceMappingForGfxHandle();
- Encode_Status surfaceMappingForCIFrameList();
- Encode_Status surfaceMappingForKbufHandle();
+ Encode_Status surfaceMappingForSurface(SurfaceMap *map);
+ Encode_Status surfaceMappingForGfxHandle(SurfaceMap *map);
+ Encode_Status surfaceMappingForCI(SurfaceMap *map);
+ Encode_Status surfaceMappingForKbufHandle(SurfaceMap *map);
+ Encode_Status surfaceMappingForMalloc(SurfaceMap *map);
+ Encode_Status surfaceMapping(SurfaceMap *map);
- VideoEncSurfaceBuffer *appendVideoSurfaceBuffer(
- VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer);
- VideoEncSurfaceBuffer *removeVideoSurfaceBuffer(
- VideoEncSurfaceBuffer *head, VideoEncSurfaceBuffer *buffer);
- VideoEncSurfaceBuffer *getVideoSurfaceBufferByIndex(
- VideoEncSurfaceBuffer *head, uint32_t index);
+ SurfaceMap *appendSurfaceMap(
+ SurfaceMap *head, SurfaceMap *map);
+ SurfaceMap *removeSurfaceMap(
+ SurfaceMap *head, SurfaceMap *map);
+ SurfaceMap *findSurfaceMapByValue(
+ SurfaceMap *head, int32_t value);
Encode_Status manageSrcSurface(VideoEncRawBuffer *inBuffer);
void updateProperities(void);
void decideFrameType(void);
- Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer);
+// Encode_Status uploadDataToSurface(VideoEncRawBuffer *inBuffer);
Encode_Status syncEncode(VideoEncRawBuffer *inBuffer);
Encode_Status asyncEncode(VideoEncRawBuffer *inBuffer);
@@ -83,7 +97,6 @@
bool mInitialized;
VADisplay mVADisplay;
- VADisplay mVADecoderDisplay;
VAContextID mVAContext;
VAConfigID mVAConfig;
VAEntrypoint mVAEntrypoint;
@@ -95,11 +108,7 @@
VideoParamsCommon mComParams;
VideoParamsHRD mHrdParam;
-
- VideoBufferSharingMode mBufferMode;
- uint32_t *mUpstreamBufferList;
- uint32_t mUpstreamBufferCnt;
- ExternalBufferAttrib *mBufAttrib;
+ VideoParamsStoreMetaDataInBuffers mStoreMetaDataInBuffers;
bool mForceKeyFrame;
bool mNewHeader;
@@ -121,18 +130,16 @@
VABufferID mPicParamBuf;
VABufferID mSliceParamBuf;
- VASurfaceID *mSharedSurfaces;
VASurfaceID *mSurfaces;
uint32_t mSurfaceCnt;
- uint32_t mSharedSurfacesCnt;
- uint32_t mReqSurfacesCnt;
- uint8_t **mUsrPtr;
- VideoEncSurfaceBuffer *mVideoSrcBufferList;
- VideoEncSurfaceBuffer *mCurFrame; //current input frame to be encoded;
- VideoEncSurfaceBuffer *mRefFrame; //reference frame
- VideoEncSurfaceBuffer *mRecFrame; //reconstructed frame;
- VideoEncSurfaceBuffer *mLastFrame; //last frame;
+ SurfaceMap *mSrcSurfaceMapList;
+
+ //for new design
+ VASurfaceID mCurSurface; //current input surface to be encoded
+ VASurfaceID mRefSurface; //reference surface
+ VASurfaceID mRecSurface; //reconstructed surface
+ VASurfaceID mLastSurface; //last surface
VideoEncRawBuffer *mLastInputRawBuffer;
@@ -148,6 +155,10 @@
bool mDataCopiedOut;
bool mKeyFrame;
+#ifdef VIDEO_ENC_STATISTICS_ENABLE
+ VideoStatistics mVideoStat;
+#endif
+
// Constants
static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_SHARED_MODE = 2;
static const uint32_t VENCODER_NUMBER_EXTRA_SURFACES_NON_SHARED_MODE = 8;
diff --git a/videoencoder/VideoEncoderDef.h b/videoencoder/VideoEncoderDef.h
index d5f10b5..b9feca2 100644
--- a/videoencoder/VideoEncoderDef.h
+++ b/videoencoder/VideoEncoderDef.h
@@ -261,6 +261,7 @@
VideoParamsTypeUpSteamBuffer,
VideoParamsTypeUsrptrBuffer,
VideoParamsTypeHRD,
+ VideoParamsTypeStoreMetaDataInBuffers,
VideoConfigTypeFrameRate,
VideoConfigTypeBitRate,
@@ -410,6 +411,16 @@
uint32_t initBufferFullness;
};
+struct VideoParamsStoreMetaDataInBuffers : VideoParamConfigSet {
+
+ VideoParamsStoreMetaDataInBuffers() {
+ type = VideoParamsTypeStoreMetaDataInBuffers;
+ size = sizeof(VideoParamsStoreMetaDataInBuffers);
+ }
+
+ bool isEnabled;
+};
+
struct VideoConfigFrameRate : VideoParamConfigSet {
VideoConfigFrameRate() {
@@ -500,4 +511,15 @@
SliceNum sliceNum;
};
+
+typedef struct {
+ uint32_t total_frames;
+ uint32_t skipped_frames;
+ uint32_t average_encode_time;
+ uint32_t max_encode_time;
+ uint32_t max_encode_frame;
+ uint32_t min_encode_time;
+ uint32_t min_encode_frame;
+}VideoStatistics;
+
#endif /* __VIDEO_ENCODER_DEF_H__ */
diff --git a/videoencoder/VideoEncoderH263.cpp b/videoencoder/VideoEncoderH263.cpp
index 2aed78f..68f8741 100644
--- a/videoencoder/VideoEncoderH263.cpp
+++ b/videoencoder/VideoEncoderH263.cpp
@@ -86,8 +86,8 @@
LOG_V( "Begin\n\n");
// set picture params for HW
- h263PictureParams.reference_picture = mRefFrame->surface;
- h263PictureParams.reconstructed_picture = mRecFrame->surface;
+ h263PictureParams.reference_picture = mRefSurface;
+ h263PictureParams.reconstructed_picture = mRecSurface;
h263PictureParams.coded_buf = mVACodedBuffer [mCodedBufIndex];
h263PictureParams.picture_width = mComParams.resolution.width;
h263PictureParams.picture_height = mComParams.resolution.height;
diff --git a/videoencoder/VideoEncoderInterface.h b/videoencoder/VideoEncoderInterface.h
index 416c29d..243e4a1 100644
--- a/videoencoder/VideoEncoderInterface.h
+++ b/videoencoder/VideoEncoderInterface.h
@@ -24,6 +24,7 @@
virtual Encode_Status getConfig(VideoParamConfigSet *videoEncConfig) = 0;
virtual Encode_Status setConfig(VideoParamConfigSet *videoEncConfig) = 0;
virtual Encode_Status getMaxOutSize(uint32_t *maxSize) = 0;
+ virtual Encode_Status getStatistics(VideoStatistics *videoStat) = 0;
};
#endif /* VIDEO_ENCODER_INTERFACE_H_ */
diff --git a/videoencoder/VideoEncoderLog.h b/videoencoder/VideoEncoderLog.h
index 4c1e982..49c34df 100644
--- a/videoencoder/VideoEncoderLog.h
+++ b/videoencoder/VideoEncoderLog.h
@@ -23,6 +23,7 @@
__android_log_print(level, comp, "%s():%d: "format, \
__FUNCTION__, __LINE__, ##__VA_ARGS__)
+#if 1
#ifdef VIDEO_ENC_LOG_ENABLE
#define LOG_V(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__)
#define LOG_I(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_INFO, format, ##__VA_ARGS__)
@@ -34,6 +35,12 @@
#endif
#define LOG_E(format, ...) mix_log(VIDEO_ENC_COMP, VIDEO_ENC_LOG_LEVEL_ERROR, format, ##__VA_ARGS__)
+#else
+#define LOG_V printf
+#define LOG_I printf
+#define LOG_W printf
+#define LOG_E printf
+#endif
#define CHECK_VA_STATUS_RETURN(FUNC)\
if (vaStatus != VA_STATUS_SUCCESS) {\
diff --git a/videoencoder/VideoEncoderMP4.cpp b/videoencoder/VideoEncoderMP4.cpp
index a220563..51068f4 100644
--- a/videoencoder/VideoEncoderMP4.cpp
+++ b/videoencoder/VideoEncoderMP4.cpp
@@ -211,8 +211,8 @@
LOG_V( "Begin\n\n");
// set picture params for HW
- mpeg4_pic_param.reference_picture = mRefFrame->surface;
- mpeg4_pic_param.reconstructed_picture = mRecFrame->surface;
+ mpeg4_pic_param.reference_picture = mRefSurface;
+ mpeg4_pic_param.reconstructed_picture = mRecSurface;
mpeg4_pic_param.coded_buf = mVACodedBuffer[mCodedBufIndex];
mpeg4_pic_param.picture_width = mComParams.resolution.width;
mpeg4_pic_param.picture_height = mComParams.resolution.height;