Add a convenient library to export vpp capability
BZ: 128069
Add a convenient library to export video postprocessing capability.
So intended users won't need to touch low level and complex vaapi.
Change-Id: Idff180d657eba6f89e25a1c7ade431a3b3bdcbb6
Signed-off-by: Chang Ying <ying.chang@intel.com>
Reviewed-on: http://android.intel.com:8080/124020
Reviewed-by: Shi, PingX <pingx.shi@intel.com>
Tested-by: Shi, PingX <pingx.shi@intel.com>
Reviewed-by: cactus <cactus@intel.com>
Tested-by: cactus <cactus@intel.com>
diff --git a/Android.mk b/Android.mk
index 17d0811..1b7383d 100644
--- a/Android.mk
+++ b/Android.mk
@@ -9,4 +9,7 @@
include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videodecoder/Android.mk
include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videoencoder/Android.mk
include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/imagedecoder/Android.mk
+ifeq ($(ENABLE_IMG_GRAPHICS),)
+include $(VENDORS_INTEL_MRST_LIBMIX_ROOT)/videovpp/Android.mk
+endif
endif
diff --git a/videovpp/Android.mk b/videovpp/Android.mk
new file mode 100755
index 0000000..bc53873
--- /dev/null
+++ b/videovpp/Android.mk
@@ -0,0 +1,56 @@
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ VideoVPPBase.cpp
+
+
+LOCAL_C_INCLUDES += \
+ $(TARGET_OUT_HEADERS)/libwsbm \
+ $(TARGET_OUT_HEADERS)/libpsb_drm \
+ $(TARGET_OUT_HEADERS)/libva \
+
+LOCAL_SHARED_LIBRARIES := \
+ libcutils \
+ libutils \
+ libui \
+ liblog \
+ libhardware \
+ libdrm \
+ libdrm_intel \
+ libwsbm \
+ libva \
+ libva-android \
+ libva-tpi
+
+LOCAL_COPY_HEADERS_TO := libmix_videovpp
+
+LOCAL_COPY_HEADERS := \
+ VideoVPPBase.h
+
+LOCAL_MODULE := libmix_videovpp
+
+LOCAL_MODULE_TAGS := eng
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES:= \
+ test/main.cpp
+
+LOCAL_C_INCLUDES += \
+ $(TARGET_OUT_HEADERS)/libva \
+ $(TARGET_OUT_HEADERS)/libmix_videovpp
+
+LOCAL_SHARED_LIBRARIES := \
+ libhardware \
+ libmix_videovpp
+
+LOCAL_MODULE := csc_vpp
+
+LOCAL_MODULE_TAGS := eng
+
+
+include $(BUILD_EXECUTABLE)
diff --git a/videovpp/VideoVPPBase.cpp b/videovpp/VideoVPPBase.cpp
new file mode 100644
index 0000000..1564938
--- /dev/null
+++ b/videovpp/VideoVPPBase.cpp
@@ -0,0 +1,348 @@
+#include "VideoVPPBase.h"
+
+enum {
+ HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100,
+ HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL = 0x101,
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL
+#if HAL_PIXEL_FORMAT_NV12_DEFINED
+ HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12,
+#else
+ HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL,
+#endif
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL
+ HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00,
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL
+ HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_Tiled_INTEL = 0x7FA00F00,
+
+};
+
+struct mfx_gralloc_drm_handle_t {
+ native_handle_t base;
+ int magic;
+
+ int width;
+ int height;
+ int format;
+ int usage;
+
+ int name;
+ int stride;
+
+ int data_owner;
+ int data;
+};
+
+#define VPWRAPPER_NATIVE_DISPLAY 0x18c34078
+
+#define CHECK_VA_STATUS(FUNC) \
+ if (vret != VA_STATUS_SUCCESS) {\
+ printf("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\
+ return vret;\
+ }
+
+VPParameters::VPParameters(VideoVPPBase *vvb) {
+ va_display = vvb->va_display;
+ va_context = vvb->va_context;
+
+ vret = init();
+
+ if (vret != VA_STATUS_SUCCESS)
+ mInitialized = false;
+ else
+ mInitialized = true;
+}
+
+VPParameters* VPParameters::create(VideoVPPBase *vvb) {
+ VPParameters* v = new VPParameters(vvb);
+
+ if (v->mInitialized)
+ return v;
+ else
+ return NULL;
+}
+
+VAStatus VPParameters::init() {
+ num_supported_filters = VAProcFilterCount;
+ vret = vaQueryVideoProcFilters(va_display,
+ va_context, supported_filters,
+ &num_supported_filters);
+ CHECK_VA_STATUS("vaQueryVideoProcFilters");
+
+ for (size_t i = 0; i < num_supported_filters; i++) {
+ switch(supported_filters[i]) {
+ case VAProcFilterNoiseReduction:
+ {
+ num_denoise_caps = 1;
+ vret = vaQueryVideoProcFilterCaps(va_display, va_context,
+ VAProcFilterNoiseReduction, &denoise_caps, &num_denoise_caps);
+ CHECK_VA_STATUS("vaQueryVideoProcFilters");
+
+ nr.valid = true;
+ nr.min = denoise_caps.range.min_value;
+ nr.max = denoise_caps.range.max_value;
+ nr.def = denoise_caps.range.default_value;
+ nr.step = denoise_caps.range.step;
+ nr.cur = 0.0;
+ printf("VAProcFilterNoiseReduction");
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ return vret;
+}
+
+VAStatus VPParameters::buildfilters(VABufferID *filters, unsigned int *num_filter) {
+ for (int i = 0; i < num_filter_bufs; i++) {
+ switch (supported_filters[i]) {
+ case VAProcFilterNoiseReduction:
+ {
+ if (nr.cur != 0) {
+ denoise_buf.type = VAProcFilterNoiseReduction;
+ denoise_buf.value = nr.cur;
+ vret = vaCreateBuffer(va_display, va_context,
+ VAProcFilterParameterBufferType,
+ sizeof(denoise_buf), 1, &denoise_buf, &denoise_buf_id);
+ CHECK_VA_STATUS("vaCreateBuffer");
+ filter_bufs[num_filter_bufs] = denoise_buf_id;
+ num_filter_bufs++;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ memcpy(filters, filter_bufs, sizeof(VABufferID) * num_filter_bufs);
+ *num_filter = num_filter_bufs;
+
+ return vret;
+}
+
+VideoVPPBase::VideoVPPBase()
+ : mInitialized(false),
+ width(1280),
+ height(720),
+ va_display(NULL),
+ va_config(VA_INVALID_ID),
+ va_context(VA_INVALID_ID),
+ vpp_pipeline_buf(VA_INVALID_ID),
+ SrcSurf(VA_INVALID_SURFACE),
+ DstSurf(VA_INVALID_SURFACE) {
+
+}
+
+VAStatus VideoVPPBase::start() {
+ if (mInitialized)
+ return VA_STATUS_SUCCESS;
+
+ int va_major_version, va_minor_version;
+ unsigned int nativeDisplay = VPWRAPPER_NATIVE_DISPLAY;
+ VAConfigAttrib vaAttrib;
+
+ va_display = vaGetDisplay(&nativeDisplay);
+
+ vret = vaInitialize(va_display, &va_major_version, &va_minor_version);
+ CHECK_VA_STATUS("vaInitialize");
+
+ vaAttrib.type = VAConfigAttribRTFormat;
+ vaAttrib.value = VA_RT_FORMAT_YUV420;
+ vret = vaCreateConfig(va_display, VAProfileNone,
+ VAEntrypointVideoProc, &vaAttrib, 1, &va_config);
+ CHECK_VA_STATUS("vaCreateConfig");
+
+ vret = vaCreateContext(va_display, va_config, width,
+ height, 0, NULL, 0, &va_context);
+ CHECK_VA_STATUS("vaCreateContext");
+
+ num_supported_filters = VAProcFilterCount;
+ vret = vaQueryVideoProcFilters(va_display,
+ va_context, supported_filters,
+ &num_supported_filters);
+ CHECK_VA_STATUS("vaQueryVideoProcFilters");
+
+ for (size_t i = 0; i < num_supported_filters; i++) {
+ switch(supported_filters[i]) {
+ case VAProcFilterDeblocking:
+ {
+ break;
+ }
+ default:
+ break;
+ }
+ }
+
+ mInitialized = true;
+
+ return vret;
+}
+
+VAStatus VideoVPPBase::stop() {
+ if (!mInitialized)
+ return VA_STATUS_SUCCESS;
+
+ int c = SrcSurfHandleMap.size();
+ for (int i = 0; i < c; i++) {
+ SrcSurf = SrcSurfHandleMap.valueAt(i);
+ if (SrcSurf != VA_INVALID_SURFACE) {
+ vret = vaDestroySurfaces(va_display, &SrcSurf, 1);
+ CHECK_VA_STATUS("vaDestroySurfaces");
+ }
+ printf("remove src surf %x\n", SrcSurf);
+ SrcSurfHandleMap.removeItemsAt(i);
+ }
+
+ c = DstSurfHandleMap.size();
+ for (int i = 0; i < c; i++) {
+ DstSurf = DstSurfHandleMap.valueAt(i);
+ if (DstSurf != VA_INVALID_SURFACE) {
+ vret = vaDestroySurfaces(va_display, &DstSurf, 1);
+ CHECK_VA_STATUS("vaDestroySurfaces");
+ }
+ printf("remove dst surf %x\n", DstSurf);
+ DstSurfHandleMap.removeItemsAt(i);
+ }
+
+ if (vpp_pipeline_buf != VA_INVALID_ID) {
+ vret = vaDestroyBuffer(va_display, vpp_pipeline_buf);
+ CHECK_VA_STATUS("vaDestroyBuffer");
+ vpp_pipeline_buf = VA_INVALID_ID;
+ }
+
+ if (va_context != VA_INVALID_ID) {
+ vret = vaDestroyContext(va_display, va_context);
+ CHECK_VA_STATUS("vaDestroyContext");
+ va_context = VA_INVALID_ID;
+ }
+
+ if (va_config != VA_INVALID_ID) {
+ vret = vaDestroyConfig(va_display, va_config);
+ CHECK_VA_STATUS("vaDestroyConfig");
+ va_config = VA_INVALID_ID;
+ }
+
+ if (va_display != NULL) {
+ vret = vaTerminate(va_display);
+ CHECK_VA_STATUS("vaTerminate");
+ va_display = NULL;
+ }
+
+ mInitialized = false;
+
+ return vret;
+}
+
+VAStatus VideoVPPBase::_CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfaceID *surf) {
+ unsigned int buffer;
+ VASurfaceAttrib SurfAttrib;
+ VASurfaceAttribExternalBuffers SurfExtBuf;
+
+ SurfExtBuf.pixel_format = VA_FOURCC_NV12;
+ SurfExtBuf.width = rt.width;
+ SurfExtBuf.height = rt.height;
+ SurfExtBuf.pitches[0] = rt.stride;
+ buffer = rt.handle;
+ SurfExtBuf.buffers = (unsigned long*)&buffer;
+ SurfExtBuf.num_buffers = 1;
+ if (rt.type == RenderTarget::KERNEL_DRM)
+ SurfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_KERNEL_DRM;
+ else
+ SurfExtBuf.flags = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
+
+ SurfAttrib.type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
+ SurfAttrib.flags = VA_SURFACE_ATTRIB_SETTABLE;
+ SurfAttrib.value.type = VAGenericValueTypePointer;
+ SurfAttrib.value.value.p = &SurfExtBuf;
+
+ vret = vaCreateSurfaces(va_display, VA_RT_FORMAT_YUV420,
+ rt.width, rt.height, surf, 1,
+ &SurfAttrib, 1);
+ CHECK_VA_STATUS("vaCreateSurfaces");
+
+ return vret;
+}
+
+VAStatus VideoVPPBase::_perform(VASurfaceID SrcSurf, VARectangle SrcRect,
+ VASurfaceID DstSurf, VARectangle DstRect, bool no_wait) {
+ vpp_param.surface = SrcSurf;
+ vpp_param.output_region = &DstRect;
+ vpp_param.surface_region = &SrcRect;
+ vpp_param.surface_color_standard = VAProcColorStandardBT601;
+ vpp_param.output_background_color = 0;
+ vpp_param.output_color_standard = VAProcColorStandardBT601;
+ vpp_param.filter_flags = VA_FRAME_PICTURE;
+ vpp_param.filters = NULL;
+ vpp_param.num_filters = 0;
+ vpp_param.forward_references = NULL;
+ vpp_param.num_forward_references = 0;
+ vpp_param.backward_references = NULL;
+ vpp_param.num_backward_references = 0;
+ vpp_param.blend_state = NULL;
+ vpp_param.rotation_state = VA_ROTATION_NONE;
+
+ vret = vaCreateBuffer(va_display, va_context,
+ VAProcPipelineParameterBufferType,
+ sizeof(VAProcPipelineParameterBuffer),
+ 1, &vpp_param, &vpp_pipeline_buf);
+ CHECK_VA_STATUS("vaCreateBuffer");
+
+ vret = vaBeginPicture(va_display, va_context, DstSurf);
+ CHECK_VA_STATUS("vaBeginPicture");
+
+ vret = vaRenderPicture(va_display, va_context, &vpp_pipeline_buf, 1);
+ CHECK_VA_STATUS("vaRenderPicture");
+
+ vret = vaEndPicture(va_display, va_context);
+ CHECK_VA_STATUS("vaEndPicture");
+
+ if (!no_wait) {
+ vret = vaSyncSurface(va_display, DstSurf);
+ CHECK_VA_STATUS("vaSyncSurface");
+ }
+
+ return vret;
+}
+
+VAStatus VideoVPPBase::perform(RenderTarget Src, RenderTarget Dst, VPParameters *vpp, bool no_wait) {
+ if (!mInitialized) {
+ vret = start();
+ CHECK_VA_STATUS("start");
+ }
+
+ ssize_t i = SrcSurfHandleMap.indexOfKey(Src.handle);
+ if (i >= 0) {
+ SrcSurf = SrcSurfHandleMap.valueAt(i);
+ } else {
+ vret = _CreateSurfaceFromGrallocHandle(Src, &SrcSurf);
+ CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle");
+ SrcSurfHandleMap.add(Src.handle, SrcSurf);
+ printf("add src surface %x\n", SrcSurf);
+ }
+
+ i = DstSurfHandleMap.indexOfKey(Dst.handle);
+ if (i >= 0) {
+ DstSurf = DstSurfHandleMap.valueAt(i);
+ } else {
+ vret = _CreateSurfaceFromGrallocHandle(Dst, &DstSurf);
+ CHECK_VA_STATUS("_CreateSurfaceFromGrallocHandle");
+ DstSurfHandleMap.add(Dst.handle, DstSurf);
+ printf("add dst surface %x\n", DstSurf);
+ }
+
+ vret = vpp->buildfilters(filter_bufs, &num_filter_bufs);
+ CHECK_VA_STATUS("buildfilters");
+ vret = _perform(SrcSurf, Src.rect, DstSurf, Dst.rect, no_wait);
+ CHECK_VA_STATUS("_perform");
+
+ return vret;
+}
+
+VideoVPPBase::~VideoVPPBase() {
+ stop();
+}
diff --git a/videovpp/VideoVPPBase.h b/videovpp/VideoVPPBase.h
new file mode 100644
index 0000000..a3bdce2
--- /dev/null
+++ b/videovpp/VideoVPPBase.h
@@ -0,0 +1,133 @@
+#ifndef __VIDEO_VPP_BASE_H__
+#define __VIDEO_VPP_BASE_H__
+#include <system/graphics.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <va/va.h>
+#include <va/va_drmcommon.h>
+#include <va/va_vpp.h>
+#include <va/va_android.h>
+#include <va/va_tpi.h>
+
+#include <hardware/gralloc.h>
+
+#include <utils/KeyedVector.h>
+
+class VideoVPPBase;
+
+struct FilterConfig {
+ bool valid;
+ int type;
+ float min, max, step, def;
+ float cur;
+};
+
+class VPParameters {
+public:
+ static VPParameters* create(VideoVPPBase *);
+ ~VPParameters();
+ VAStatus buildfilters(VABufferID *filter_bufs, unsigned int *num_filter_bufs);
+ void getNR(FilterConfig& NR) { memcpy(&NR, &nr, sizeof(FilterConfig)); }
+ void setNR(FilterConfig NR) { nr.cur = NR.cur; }
+
+private:
+ bool mInitialized;
+ VADisplay va_display;
+ VAContextID va_context;
+ VAStatus vret;
+
+ VAProcFilterType supported_filters[VAProcFilterCount];
+ unsigned int num_supported_filters;
+
+ VAProcFilterCap denoise_caps, sharpen_caps, deblock_caps;
+ VAProcFilterCapColorBalance color_balance_caps[VAProcColorBalanceCount];
+ unsigned int num_denoise_caps, num_color_balance_caps, num_sharpen_caps, num_deblock_caps;
+
+ VAProcFilterParameterBuffer denoise_buf, sharpen_buf, deblock_buf;
+ VAProcFilterParameterBufferColorBalance balance_buf[VAProcColorBalanceCount];
+ VABufferID sharpen_buf_id, denoise_buf_id, deblock_buf_id, balance_buf_id;
+
+ VABufferID filter_bufs[VAProcFilterCount];
+ unsigned int num_filter_bufs;
+
+ FilterConfig nr;
+ FilterConfig deblock;
+ FilterConfig sharpen;
+ FilterConfig colorbalance[VAProcColorBalanceCount];
+
+ VPParameters(VideoVPPBase *);
+ VPParameters(const VPParameters&);
+ VPParameters &operator=(const VPParameters&);
+
+ VAStatus init();
+};
+
+struct RenderTarget {
+ enum bufType{
+ KERNEL_DRM,
+ ANDROID_GRALLOC,
+ };
+
+ int width;
+ int height;
+ int stride;
+ bufType type;
+ int pixel_format;
+ int handle;
+ VARectangle rect;
+};
+
+class VideoVPPBase {
+public:
+ VideoVPPBase();
+ ~VideoVPPBase();
+ VAStatus start();
+ VAStatus stop();
+ VAStatus perform(RenderTarget Src, RenderTarget Dst, VPParameters *vpp, bool no_wait);
+
+private:
+ bool mInitialized;
+ unsigned width, height;
+ VAStatus vret;
+ VADisplay va_display;
+ VAConfigID va_config;
+ VAContextID va_context;
+ VABufferID vpp_pipeline_buf;
+ VAProcPipelineParameterBuffer vpp_param;
+ VASurfaceAttrib SrcSurfAttrib, DstSurfAttrib;
+ VASurfaceAttribExternalBuffers SrcSurfExtBuf, DstSurfExtBuf;
+ VASurfaceID SrcSurf, DstSurf;
+ VASurfaceAttributeTPI attribs;
+
+ VAProcFilterType supported_filters[VAProcFilterCount];
+ unsigned int num_supported_filters;
+
+ VAProcFilterCap denoise_caps, sharpen_caps, deblock_caps;
+ VAProcFilterCapColorBalance color_balance_caps[VAProcColorBalanceCount];
+ unsigned int num_denoise_caps, num_color_balance_caps, num_sharpen_caps, num_deblock_caps;
+
+ VAProcFilterParameterBuffer denoise_buf, sharpen_buf, deblock_buf;
+ VAProcFilterParameterBufferColorBalance balance_buf[VAProcColorBalanceCount];
+
+ VABufferID sharpen_buf_id, denoise_buf_id, deblock_buf_id, balance_buf_id;
+
+ VABufferID filter_bufs[VAProcFilterCount];
+ unsigned int num_filter_bufs;
+
+ KeyedVector<int, VASurfaceID> SrcSurfHandleMap;
+ KeyedVector<int, VASurfaceID> DstSurfHandleMap;
+
+ VideoVPPBase(const VideoVPPBase &);
+ VideoVPPBase &operator=(const VideoVPPBase &);
+
+ VAStatus _perform(VASurfaceID SrcSurf, VARectangle SrcRect,
+ VASurfaceID DstSurf, VARectangle DstRect, bool no_wait);
+
+ VAStatus _CreateSurfaceFromGrallocHandle(RenderTarget rt, VASurfaceID *surf);
+
+ friend class VPParameters;
+
+};
+
+#endif
diff --git a/videovpp/test/main.cpp b/videovpp/test/main.cpp
new file mode 100644
index 0000000..9c6e095
--- /dev/null
+++ b/videovpp/test/main.cpp
@@ -0,0 +1,196 @@
+#include <system/graphics.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <va/va.h>
+#include <va/va_drmcommon.h>
+#include <va/va_vpp.h>
+#include <va/va_android.h>
+#include <va/va_tpi.h>
+
+#include <hardware/gralloc.h>
+
+#include "VideoVPPBase.h"
+
+enum {
+ HAL_PIXEL_FORMAT_NV12_TILED_INTEL = 0x100,
+ HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL = 0x101,
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL
+#if HAL_PIXEL_FORMAT_NV12_DEFINED
+ HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12,
+#else
+ HAL_PIXEL_FORMAT_INTEL_NV12 = HAL_PIXEL_FORMAT_NV12_TILED_INTEL,
+#endif
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_LINEAR_INTEL
+ HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_INTEL = 0x7FA00E00,
+
+// deprecated use HAL_PIXEL_FORMAT_NV12_TILED_INTEL
+ HAL_PIXEL_FORMAT_YUV420PackedSemiPlanar_Tiled_INTEL = 0x7FA00F00,
+
+};
+
+struct mfx_gralloc_drm_handle_t {
+ native_handle_t base;
+ int magic;
+
+ int width;
+ int height;
+ int format;
+ int usage;
+
+ int name;
+ int stride;
+
+ int data_owner;
+ int data;
+};
+
+static void usage(const char *me) {
+ fprintf(stderr, "color space conversion\n"
+ "\t\tusage: %s -i input -o output\n"
+ "\t\t-w width -h height\n",
+ me);
+
+ exit(1);
+}
+
+#define VPWRAPPER_NATIVE_DISPLAY 0x18c34078
+
+#define CHECK_VA_STATUS(FUNC) \
+ if (vret != VA_STATUS_SUCCESS) {\
+ printf("[%d] " FUNC" failed with 0x%x\n", __LINE__, vret);\
+ return vret;\
+ }
+
+
+static inline unsigned long GetTickCount()
+{
+ struct timeval tv;
+ if (gettimeofday(&tv, NULL))
+ return 0;
+ return tv.tv_usec / 1000 + tv.tv_sec * 1000;
+}
+
+int main(int argc, char *argv[])
+{
+ int width = 1280, height = 720;
+ int i, j, res;
+ const char *me = argv[0];
+ char input[128], output[128];
+ int has_input = 0;
+ int has_output = 0;
+ int has_width = 0;
+ int has_height = 0;
+
+ while ((res = getopt(argc, argv, "i:o:w:h:")) >= 0) {
+ switch (res) {
+ case 'i':
+ {
+ strcpy(input, optarg);
+ has_input = 1;
+ break;
+ }
+ case 'o':
+ {
+ strcpy(output, optarg);
+ has_output = 1;
+ break;
+ }
+ case 'w':
+ {
+ width = atoi(optarg);
+ has_width = 1;
+ break;
+ }
+ case 'h':
+ {
+ height = atoi(optarg);
+ has_height = 1;
+ break;
+ }
+ default:
+ {
+ usage(me);
+ }
+ }
+ }
+
+ if (!has_input || !has_output || !has_width || !has_height)
+ usage(me);
+
+ hw_module_t const* module;
+ alloc_device_t *mAllocDev;
+ int32_t stride_YUY2, stride_NV12;
+ buffer_handle_t handle_YUY2, handle_NV12;
+ struct gralloc_module_t *gralloc_module;
+ struct mfx_gralloc_drm_handle_t *pGrallocHandle;
+ RenderTarget Src, Dst;
+
+ res = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, &module);
+ gralloc_module = (struct gralloc_module_t*)module;
+ res = gralloc_open(module, &mAllocDev);
+ res = mAllocDev->alloc(mAllocDev, width, height,
+ HAL_PIXEL_FORMAT_YCbCr_422_I,
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE,
+ &handle_YUY2, &stride_YUY2);
+ if (res != 0)
+ printf("%d: alloc()\n", __LINE__);
+ else {
+ pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_YUY2;
+ printf("YUY2 %d %d %d\n", pGrallocHandle->width,
+ pGrallocHandle->height, stride_YUY2);
+ Src.width = pGrallocHandle->width;
+ Src.height = pGrallocHandle->height;
+ Src.stride = stride_YUY2;
+ Src.type = RenderTarget::KERNEL_DRM;
+ Src.handle = pGrallocHandle->name;
+ Src.rect.x = Src.rect.y = 0;
+ Src.rect.width = Src.width;
+ Src.rect.height = Src.height;
+ }
+ res = mAllocDev->alloc(mAllocDev, width, height,
+ HAL_PIXEL_FORMAT_NV12_TILED_INTEL,
+ GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE,
+ &handle_NV12, &stride_NV12);
+ if (res != 0)
+ printf("%d: alloc()\n", __LINE__);
+ else {
+ pGrallocHandle = (struct mfx_gralloc_drm_handle_t *)handle_NV12;
+ printf("NV12 %d %d %d\n", pGrallocHandle->width,
+ pGrallocHandle->height, stride_NV12);
+ Dst.width = pGrallocHandle->width;
+ Dst.height = pGrallocHandle->height;
+ Dst.stride = stride_NV12;
+ Dst.type = RenderTarget::KERNEL_DRM;
+ Dst.handle = pGrallocHandle->name;
+ Dst.rect.x = 0;
+ Dst.rect.y = 0;
+ Dst.rect.width = Dst.width;
+ Dst.rect.height = Dst.height;
+ }
+
+ VAStatus vret;
+
+ VideoVPPBase * p = new VideoVPPBase();
+
+ p->start();
+
+ VPParameters *vpp = VPParameters::create(p);
+
+ vret = p->perform(Src, Dst, vpp, false);
+ CHECK_VA_STATUS("doVp");
+
+ vret = p->perform(Src, Dst, vpp, false);
+ CHECK_VA_STATUS("doVp");
+
+ p->stop();
+
+ mAllocDev->free(mAllocDev, handle_YUY2);
+ mAllocDev->free(mAllocDev, handle_NV12);
+
+ gralloc_close(mAllocDev);
+
+ return 0;
+}