LibMIX vbp parser code re-structure

BZ: 131068

The LibMIX vbp parser codes are re-structured and legacy
codes are removded.

Change-Id: I8216a21f39f29bce7ac5f6aaa25e164806e8f012
Signed-off-by: wfeng6 <wei.feng@intel.com>
Signed-off-by: Gu, Wangyi <wangyi.gu@intel.com>
Reviewed-on: http://android.intel.com:8080/130377
Reviewed-by: Shi, PingX <pingx.shi@intel.com>
Tested-by: Shi, PingX <pingx.shi@intel.com>
Reviewed-by: cactus <cactus@intel.com>
Tested-by: cactus <cactus@intel.com>
diff --git a/mixvbp/Android.mk b/mixvbp/Android.mk
new file mode 100644
index 0000000..01ddde2
--- /dev/null
+++ b/mixvbp/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+MIXVBP_DIR := $(LOCAL_PATH)
+
+include $(MIXVBP_DIR)/vbp_manager/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/h264/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/mp4/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/vc1/Android.mk
+
+ifeq ($(USE_HW_VP8),true)
+include $(MIXVBP_DIR)/vbp_plugin/vp8/Android.mk
+endif
diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h
new file mode 100755
index 0000000..fde232c
--- /dev/null
+++ b/mixvbp/include/vbp_trace.h
@@ -0,0 +1,66 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#ifndef VBP_TRACE_H_
+#define VBP_TRACE_H_
+
+
+
+#define VBP_TRACE
+
+
+#ifdef VBP_TRACE /* if VBP_TRACE is defined*/
+
+#ifndef ANDROID
+
+#include <stdio.h>
+#include <stdarg.h>
+
+extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...);
+#define VBP_TRACE_UTIL(cat, format, ...) \
+vbp_trace_util(cat, __FUNCTION__, __LINE__, format,  ##__VA_ARGS__)
+
+
+#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR:   ",  format, ##__VA_ARGS__)
+#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ",  format, ##__VA_ARGS__)
+#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO:    ",  format, ##__VA_ARGS__)
+#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ",  format, ##__VA_ARGS__)
+
+
+#else
+
+// For Android OS
+
+#define LOG_NDEBUG 0
+
+#define LOG_TAG "MixVBP"
+
+#include <utils/Log.h>
+#define ETRACE(...) LOGE(__VA_ARGS__)
+#define WTRACE(...) LOGW(__VA_ARGS__)
+#define ITRACE(...) LOGI(__VA_ARGS__)
+#define VTRACE(...) LOGV(__VA_ARGS__)
+
+#endif
+
+
+#else /* if VBP_TRACE is not defined */
+
+#define ETRACE(format, ...)
+#define WTRACE(format, ...)
+#define ITRACE(format, ...)
+#define VTRACE(format, ...)
+
+
+#endif /* VBP_TRACE*/
+
+
+#endif /*VBP_TRACE_H_*/
+
+
diff --git a/mixvbp/include/viddec_debug.h b/mixvbp/include/viddec_debug.h
new file mode 100755
index 0000000..fcae102
--- /dev/null
+++ b/mixvbp/include/viddec_debug.h
@@ -0,0 +1,31 @@
+#ifndef VIDDEC_DEBUG_H
+#define VIDDEC_DEBUG_H
+
+#ifndef VBP
+
+#ifdef HOST_ONLY
+#include <stdio.h>
+#include <osal.h>
+#define DEB                        OS_PRINT
+#define FWTRACE                    OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ );
+//  #define DEB(format, args...)
+//  #define FWTRACE
+#define DEB_FNAME(format, args...) OS_PRINT("%s:  %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ ,  ## args )
+#define CDEB(a, format, args...)   if(a != 0) {DEB(format, ##args);}
+#else
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+#endif
+
+#else  // VBP is defined
+
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+
+#endif // end of VBP
+
+#endif
diff --git a/mixvbp/include/viddec_fw_common_defs.h b/mixvbp/include/viddec_fw_common_defs.h
new file mode 100644
index 0000000..2cc32b7
--- /dev/null
+++ b/mixvbp/include/viddec_fw_common_defs.h
@@ -0,0 +1,223 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_COMMON_DEFS_H
+#define VIDDEC_FW_COMMON_DEFS_H
+
+#define VIDDEC_FW_PARSER_IPC_HOST_INT    0x87654321
+#define EMITTER_WORKLOAD_ENTRIES    2048
+
+/* Maximum supported dependent views for H264 MVC. Based on spec this can be 1023 */
+#define MVC_MAX_SUPPORTED_VIEWS  1
+
+/* This enum defines priority level for opening a stream */
+enum viddec_stream_priority
+{
+    viddec_stream_priority_BACKGROUND, /* Lowest priority stream */
+    viddec_stream_priority_REALTIME,   /* Real time highest priority stream */
+    viddec_stream_priority_INVALID,
+};
+
+/* This enum defines supported flush types */
+enum viddec_stream_flushtype
+{
+    VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */
+    VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state  by preserving sequence info*/
+};
+
+enum viddec_stream_inband_flags
+{
+    VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */
+    VIDDEC_STREAM_EOS,          /* End of stream message */
+    VIDDEC_STREAM_DISCONTINUITY,  /* new segment which forces flush and preserve */
+};
+
+/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */
+typedef struct viddec_input_buffer
+{
+    unsigned int             flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */
+    unsigned int             phys;/* DDR addr of where ES/WKLD is at. */
+    unsigned int             len;/* size of buffer at phys_addr */
+    unsigned int             id;/* A id for the buffer which is not used or modified by the FW. */
+#ifdef HOST_ONLY
+    unsigned char           *buf; /* virt pointer to buffer. This is a don't care for FW */
+#endif
+} ipc_msg_data;
+
+typedef ipc_msg_data viddec_input_buffer_t;
+typedef ipc_msg_data viddec_ipc_msg_data;
+
+/* Return types for interface functions */
+typedef enum
+{
+    VIDDEC_FW_SUCCESS, /* succesful with current operation */
+    VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */
+    VIDDEC_FW_FAILURE,    /* Failed for Uknown reason */
+    VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */
+    VIDDEC_FW_PORT_FULL,     /* The operation failed since queue is full */
+    VIDDEC_FW_PORT_EMPTY,   /* The operation failed since queue is empty */
+    VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */
+} viddec_fw_return_types_t;
+
+/* Defines for Interrupt mask and status */
+typedef enum
+{
+    VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */
+    VIDDEC_FW_INPUT_WATERMARK_REACHED=2,     /* The input path is below the set watermark for current stream */
+} viddec_fw_parser_int_status_t;
+
+/* Defines for attributes on stream, If not set explicitly will be default values */
+typedef enum
+{
+    VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */
+    VIDDEC_FW_STREAM_PRIORITY,    /* Define for setting stream priority */
+} viddec_fw_stream_attributes_t;
+
+typedef struct
+{
+    unsigned int input_q_space; /* Num of messages that can be written to input queue */
+    unsigned int output_q_data; /* Num of messages in output queue */
+    unsigned int workload_q_status; /* Number of free wklds available to parser */
+} viddec_fw_q_status_t;
+
+typedef struct
+{
+    unsigned int to_fw_q_space;     /* Num of messages that can be written to input queue */
+    unsigned int from_fw_q_data;    /* Num of messages in output queue */
+} viddec_fw_decoder_q_status_t;
+
+enum viddec_fw_decoder_int_status
+{
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_0       = (1<< 0), /* Decoder Stream 0 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_1       = (1<< 1), /* Decoder Stream 1 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_2       = (1<< 2), /* Decoder Stream 2 Requires Service */
+
+
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH    = (1<<30), /* Any Decoder Stream >= 30 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_AUTO_API       = (1<<31)  /* An Auto-API Function has completed */
+};
+
+/** Hardware Accelerated stream formats */
+typedef enum viddec_stream_format
+{
+    MFD_STREAM_FORMAT_MPEG=1,
+    MFD_STREAM_FORMAT_H264,
+    MFD_STREAM_FORMAT_VC1,
+    MFD_STREAM_FORMAT_MPEG42,
+
+    MFD_STREAM_FORMAT_MAX,   /* must be last  */
+    MFD_STREAM_FORMAT_INVALID
+} viddec_stream_format;
+
+/* Workload specific error codes */
+enum viddec_fw_workload_error_codes
+{
+    VIDDEC_FW_WORKLOAD_SUCCESS               = 0,
+    VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE      = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */
+    VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW  = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */
+    VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW    = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */
+    VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME     = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM      = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */
+    VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED       = (1 << 5),/* Parser Detected unsupported feature in the stream */
+    /* First 8 bits reserved for Non Decodable errors */
+    VIDDEC_FW_WORKLOAD_ERR_CONCEALED         = (1 << 9),/* The decoder concealed some errors in this frame */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */
+    VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE      = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */
+    VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD      = (1 << 12),/* Parser detected at least one of the fields are missing */
+    VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE     = (1 << 13),/* Deocder detected at least one of the fields are missing */
+    VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK        = (1 << 14),/* Deocder detected macroblock errors */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO  = (1 << 16),/* Parser detected sequence information is missing */
+
+    VIDDEC_FW_WORKLOAD_ERR_TOPFIELD          = (1 << 17),/* Decoder/Parser detected  errors in "top field" or "frame"*/
+    VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD       = (1 << 18),/* Decoder/Parser detected  errors in "bottom field" or "frame" */
+    VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR   = (1 << 19),/* Parser detected errors */
+
+};
+
+enum viddec_fw_mpeg2_error_codes
+{
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR       = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT       = (1 << 25),/* Parser detected corruption in seqeunce extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT  = (1 << 26),/* Parser detected corruption in sequence display extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR       = (1 << 27),/* Parser detected corruption in GOP header. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR       = (1 << 26),/* Parser detected corruption in picture header. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT   = (1 << 27),/* Parser detected corruption in picture coding extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT  = (1 << 28),/* Parser detected corruption in picture display extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT      = (1 << 29),/* Parser detected corruption in quantization matrix extension. */
+};
+
+#ifdef VBP
+
+#ifndef NULL
+#define NULL (void*)0x0
+#endif
+
+#ifndef true
+#define true 1
+#define false 0
+#endif
+
+#ifndef __cplusplus
+#ifndef bool
+typedef int bool;
+#endif
+#endif
+
+#endif
+/* end of #ifdef VBP */
+
+#endif
diff --git a/mixvbp/include/viddec_fw_decoder_host.h b/mixvbp/include/viddec_fw_decoder_host.h
new file mode 100644
index 0000000..d902520
--- /dev/null
+++ b/mixvbp/include/viddec_fw_decoder_host.h
@@ -0,0 +1,242 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_DECODER_HOST_H
+#define VIDDEC_FW_DECODER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "viddec_fw_common_defs.h"
+
+    /** @weakgroup viddec Fw Decoder interface Functions */
+    /** @ingroup viddec_fw_decoder */
+    /*@{*/
+
+    /**
+       This function returns the size required for loading fw.
+       @retval  size : Required size.
+    */
+    uint32_t viddec_fw_decoder_query_fwsize(void);
+
+    /**
+       This function loads Decoder Firmware and initialises necessary state information.
+       @param[in] phys                : Physical address on where firmware should be loaded.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully loaded firmware.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to allocate resources for Loading firmware.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns required size for global memory for all supported decoders. This is a synchronous message to FW.
+       @param[out] size               : returns the size required.
+       @retval VIDDEC_FW_SUCCESS      : Successfuly got required information from FW.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size);
+
+    /**
+       This function sets global memory for the firmware to use.This is a synchronous message to FW.
+       @param[in] phys                : Physical address on where global memory starts.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully setup global memory.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns the size required opening a stream. This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want information about.
+       @param[out] size               : Size of memory required for opening a stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfuly talked to FW and got required size.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size);
+
+    /**
+       This function opens requested codec.This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want to open.
+       @param[in] phys                : Physical address of allocated memory for this codec.
+       @param[in] prority             : Priority of stream. 1 for realtime and 0 for background.
+       @param[out] strm_handle        : Handle of the opened stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Opened the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to Open a stream.
+    */
+    uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+
+    /**
+       This function closes stream.This a synchronous message to FW.
+       @param[in] strm_handle        : Handle of the stream to close.
+    */
+    void viddec_fw_decoder_closestream(uint32_t strm_handle);
+
+    /**
+       This function allows to get current status of the decoder workload queues. If the current stream is active we return
+       number of input messages that can be written to input queue and the number of messages in output queue of the stream.
+
+       Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT
+       Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is
+       written into output queue of a stream.
+       @param[in] strm_handle         : The handle of stream that we want to get status of queues.
+       @param[out] status             : The status of each queue gets updated in here.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Got the status information.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+    */
+    uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status);
+
+    /**
+       This function flushes the current stream. This is a synchronous message to FW.
+       Before calling this function the host has to make sure the output queue of the firmware
+       is empty. After this function is executed the FW will read all entries in input
+       wkld buffer queue into output queue. After this operation the host has to read all entries
+       in output queue again to finish the flush operation.
+       @param[in] flush_type          : Type of flush we want to perform.ex:flush and discard.
+       @param[in]  strm_handle        : Handle of the stream we want to flush.
+       @retval VIDDEC_FW_SUCCESS      : Successfully flushed the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to flush a stream.
+    */
+    uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+    /**
+       This function sends an input workload buffer. The host should provide required frame buffers in this workload before
+       sending it to fw.
+       @param[in] strm_handle         : The handle of stream that we want to send workload buffer to.
+       @param[in] cur_wkld            : The workload buffer we want to send.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Port to fw full unsuccesful in sending message.
+    */
+    uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+    /**
+       This function gets the decoded workload from fw.
+       @param[in] strm_handle         : The handle of stream that we want to read workload from.
+       @param[out] cur_wkld           : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_EMPTY   : Workload port is empty,unsuccesful in reading wkld.
+    */
+    uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+    /**
+       This function unloads Decoder Firmware and free's the resources allocated in Load fw.
+       If this function is called before load fw it will crash with a segmentation fault.
+    */
+    void viddec_fw_decoder_deinit(void);
+
+    /**
+       This function gets the major and minor revison numbers of the loaded firmware.
+       @param[out] major              : The major revision number.
+       @param[out] minor              : The minor revision number.
+       @param[out] build              : The Internal Build number.
+    */
+    void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+    /**
+       This function returns the interrupt status of all streams which need to be processed. A value of zero
+       means no active streams which generated this interrupt.
+    */
+    uint32_t viddec_fw_decoder_active_pending_interrupts(void);
+
+    /**
+       This function clears the interrupts for all active streams represented by status input parameter.
+       The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts().
+       @param[in] status              : The status value that was returned by viddec_fw_decoder_active_pending_interrupts().
+    */
+    void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status);
+
+    /**
+       This function enables/disables interrupt for the stream specified.
+       @param[in] strm_handle         : The handle of stream that we want enable or disable interrupts for.
+       @param[in] enable              : Boolean value if ==0 means disable Interrupts else enable.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed.
+    */
+    uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable);
+
+    /**
+       This function returns which stream interrupted in the past based on status, which is a snapshot of
+       interrupt status that was cleared in the past. The host has to call clear with status information
+       before calling this function again with status value. The Host should do this operation until this function
+       returns 0, which means all the streams that generated interrupt have been processed.
+       @param[out]strm_handle         : The handle of a stream that generated interrupt.
+       @param[in] status              : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+       @retval 1                      : A valid stream handle was found.
+       @retval 0                      : No more streams from the status which caused interrupt.
+    */
+    uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle);
+
+    /**
+       This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(),
+       This should be called after host performs all necessary actions for the stream.
+       @param[in] strm_handle         : The handle of a stream that we want to clear to indicate we handled it.
+       @param[in] status              : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+       @retval 1                      : Operation was sucessful.
+       @retval 0                      : Invalid stream handle was passed.
+    */
+    uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle);
+
+    /*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_DECODER_HOST_H
diff --git a/mixvbp/include/viddec_fw_frame_attr.h b/mixvbp/include/viddec_fw_frame_attr.h
new file mode 100644
index 0000000..4f4b479
--- /dev/null
+++ b/mixvbp/include/viddec_fw_frame_attr.h
@@ -0,0 +1,294 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_FRAME_ATTR_H
+#define VIDDEC_FW_FRAME_ATTR_H
+
+#include "viddec_fw_item_types.h"
+
+#define VIDDEC_PANSCAN_MAX_OFFSETS 4
+#define VIDDEC_MAX_CPB_CNT 32
+
+/**
+This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications.
+Frame types applicable to a single codec are specified in the comments.
+*/
+typedef enum
+{
+    VIDDEC_FRAME_TYPE_INVALID=0,   /** Unknown type - default value */
+    VIDDEC_FRAME_TYPE_IDR=0x1,       /** IDR frame - h264 only */
+    VIDDEC_FRAME_TYPE_I=0x2,         /** I frame */
+    VIDDEC_FRAME_TYPE_P=0x3,         /** P frame */
+    VIDDEC_FRAME_TYPE_B=0x4,         /** B frame */
+    VIDDEC_FRAME_TYPE_BI=0x5,        /** BI frame - Intracoded B frame - vc1 only */
+    VIDDEC_FRAME_TYPE_SKIP=0x6,      /** Skipped frame - vc1 only */
+    VIDDEC_FRAME_TYPE_D=0x7,         /** D frame - mpeg1 only */
+    VIDDEC_FRAME_TYPE_S=0x8,         /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */
+    VIDDEC_FRAME_TYPE_MAX,
+} viddec_frame_type_t;
+
+/**
+This structure contains the content size info extracted from the stream.
+*/
+typedef struct viddec_rect_size
+{
+    unsigned int width;
+    unsigned int height;
+} viddec_rect_size_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_mpeg2_frame_center_offset
+{
+    int horz;
+    int vert;
+} viddec_mpeg2_frame_center_offset_t;
+
+/**
+This structure contains the MPEG2 specific frame attributes.
+*/
+typedef struct viddec_mpeg2_frame_attributes
+{
+    /**
+    10 bit unsigned integer corresponding to the display order of each coded picture
+    in the stream (or gop if gop header is present).
+    Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification.
+    */
+    unsigned int temporal_ref;
+
+    /**
+    Pan/Scan rectangle info
+    Refer to the picture display extension in ITU-T H.262 Specification.
+    */
+    viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS];
+    unsigned int number_of_frame_center_offsets;
+
+    /**
+    Top-Field first flag
+    Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int top_field_first;
+
+    /**
+    Progressive frame flag - Indicates if current frame is progressive or not.
+    Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int progressive_frame;
+
+    /**
+    Frame/field polarity for each coded picture.
+    Refer to Table 6-14 in ITU-T H.262 Specification.
+    */
+    unsigned int picture_struct;
+
+    /**
+    Repeat field/frame flag.
+    Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int repeat_first_field;
+
+
+} viddec_mpeg2_frame_attributes_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_vc1_pan_scan_window
+{
+    unsigned int hoffset;
+    unsigned int voffset;
+    unsigned int width;
+    unsigned int height;
+} viddec_vc1_pan_scan_window_t;
+
+/**
+This structure contains the VC1 specific frame attributes.
+*/
+typedef struct viddec_vc1_frame_attributes
+{
+    /**
+    Temporal Reference of frame/field.
+    Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification.
+    */
+    unsigned int tfcntr;
+
+    /**
+    Frame/field repeat information in the bitstream.
+    Refer to "RPTFRM", "TFF", "BFF" in the picture layer
+    of the SMPTE VC1 Specification.
+    */
+    unsigned int rptfrm;
+    unsigned int tff;
+    unsigned int rff;
+
+    /**
+    Pan-scan information in the bitstream.
+    Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET",
+    "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification.
+    */
+    unsigned int panscan_flag;
+    unsigned int ps_present;
+    unsigned int num_of_pan_scan_windows;
+    viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+} viddec_vc1_frame_attributes_t;
+
+/**
+This structure contains the H264 specific frame attributes.
+*/
+typedef struct viddec_h264_frame_attributes
+{
+    /**
+       used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame
+    */
+    unsigned int used_for_reference;
+    /**
+       Picture Order Count for the current frame/field.
+       This value is computed using information from the bitstream.
+       Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.
+       These fileds will be supported in future
+    */
+    int top_field_poc;
+    int bottom_field_poc;
+
+    /**
+       Display size, which is cropped from content size.
+       Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed
+    */
+    viddec_rect_size_t cropped_size;
+
+    /**
+       top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1
+    */
+    unsigned int top_field_first;
+
+    /**
+       field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1
+    */
+    unsigned int field_pic_flag;
+
+    /**
+       This data type holds view specific information of current frame.
+       The following information is packed into this data type:
+         view_id(0-9 bits):        Assigned 10 bit value in the encoded stream.
+         priority_id(10-15bits):   Assigned 6 bit priority id.
+         is_base_view(16th bit):   Flag on true indicates current frame belongs to base view, else dependent view.
+     */
+#define viddec_fw_h264_mvc_get_view_id(x)              viddec_fw_bitfields_extract( (x)->view_spcific_info, 0, 0x3FF)
+#define viddec_fw_h264_mvc_set_view_id(x, val)         viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 0, 0x3FF)
+#define viddec_fw_h264_mvc_get_priority_id(x)          viddec_fw_bitfields_extract( (x)->view_spcific_info, 10, 0x3F)
+#define viddec_fw_h264_mvc_set_priority_id(x, val)     viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 10, 0x3F)
+#define viddec_fw_h264_mvc_get_is_base_view(x)         viddec_fw_bitfields_extract( (x)->view_spcific_info, 16, 0x1)
+#define viddec_fw_h264_mvc_set_is_base_view(x, val)    viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 16, 0x1)
+    unsigned int view_spcific_info;
+} viddec_h264_frame_attributes_t;
+
+/**
+This structure contains the MPEG4 specific frame attributes.
+*/
+typedef struct viddec_mpeg4_frame_attributes
+{
+    /**
+    Top-Field first flag
+    Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec.
+    */
+    unsigned int top_field_first;
+
+} viddec_mpeg4_frame_attributes_t;
+
+/**
+This structure groups all the frame attributes that are exported by the firmware.
+The frame attributes are split into attributes that are common to all codecs and
+that are specific to codec type.
+*/
+typedef struct viddec_frame_attributes
+{
+    /**
+    Content size specified in the stream.
+    For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and
+    "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification.
+    For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the
+    sequence parameter set in ITU-T H.264 Specification.
+    For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer,
+    "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification.
+    */
+    viddec_rect_size_t cont_size;
+
+    /**
+    Type of frame populated in the workload.
+    frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames.
+    bottom_field_type contains the field type for the bottom field for interlaced frames.
+    For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification.
+    For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification.
+    For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification.
+    */
+    viddec_frame_type_t frame_type;
+    viddec_frame_type_t bottom_field_type;
+
+    /** Codec specific attributes */
+    union
+    {
+        viddec_mpeg2_frame_attributes_t  mpeg2;
+        viddec_vc1_frame_attributes_t    vc1;
+        viddec_h264_frame_attributes_t   h264;
+        viddec_mpeg4_frame_attributes_t  mpeg4;
+    };
+
+} viddec_frame_attributes_t;
+
+#endif /* VIDDEC_FRAME_ATTR_H */
diff --git a/mixvbp/include/viddec_fw_item_types.h b/mixvbp/include/viddec_fw_item_types.h
new file mode 100644
index 0000000..472dff2
--- /dev/null
+++ b/mixvbp/include/viddec_fw_item_types.h
@@ -0,0 +1,784 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_ITEM_TYPES_H
+#define VIDDEC_FW_ITEM_TYPES_H
+
+
+/* The following macros are defined to pack data into 32 bit words.
+   mask:    A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing.
+   start:   Bit start position of data we want.
+          ex:  If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are
+          the start and mask values for width and height.
+          width: start = 0 mask=0xFFFF
+          Height:start=  16 mask=0xFFFF
+
+   extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in
+            unsigned integer type.
+   insert:  Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to
+           be a unsigned int of N bits starting with lsb.
+*/
+
+#define viddec_fw_bitfields_extract(x_32, start, mask)     (((x_32) >> (start)) & (mask) )
+#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+
+/* Workload items type. Each item here represents data that Parser detected ex:slice data which
+ is used either by host or decoder.*/
+typedef enum workload_item_type
+{
+    VIDDEC_WORKLOAD_INVALID                               =0x0,/* Unknown type */
+    VIDDEC_WORKLOAD_PIXEL_ES                              =0x100,/* Slice data tag */
+    VIDDEC_WORKLOAD_TAG                                   =0x200,/* Frame association tag */
+    VIDDEC_WORKLOAD_USERDATA                              =0x300,/* user data tag */
+
+    VIDDEC_WORKLOAD_IBUF_DONE                             =0x500,/* Es buffer completely used tag */
+    VIDDEC_WORKLOAD_IBUF_CONTINUED                        =0x600,/* Es buffer partially used tag */
+    VIDDEC_WORKLOAD_IBUF_DISCONTINUITY                    =0x700,/* Discontinuity tag on first workload after discontinuity */
+    VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER               =0x800, /* Reorder frames in DPB tag */
+    VIDDEC_WORKLOAD_IBUF_EOS                              =0x900,/* EOS tag on last workload used for current stream */
+    VIDDEC_WORKLOAD_SEQUENCE_INFO                         =0xa00,/* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */
+    VIDDEC_WORKLOAD_DISPLAY_INFO                          =0xb00,/* MPEG2 Seq Disp Ext, H264 VUI */
+    VIDDEC_WORKLOAD_GOP_INFO                              =0xc00,/* MPEG2 GOP, VC1 Entrypoint */
+    VIDDEC_WORKLOAD_SEQ_USER_DATA                         =0xd00,/* MPEG2, VC1 Sequence Level User data */
+    VIDDEC_WORKLOAD_GOP_USER_DATA                         =0xe00,/* MPEG2, VC1 Gop Level User data */
+    VIDDEC_WORKLOAD_FRM_USER_DATA                         =0xf00,/* MPEG2 Picture User data, VC1 Frame User data */
+
+    VIDDEC_WORKLOAD_FLD_USER_DATA                         =0x1000,/* MPEG2, VC1 Field User data */
+    VIDDEC_WORKLOAD_SLC_USER_DATA                         =0x1100,/* VC1 Slice User data */
+    VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA                  =0x1200,/* MPEG4 Visual Object User data */
+    VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C                =0x1200,/* VC1 Only */
+    VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA                   =0x1300,/* MPEG4 Video Object Layer User data */
+    VIDDEC_WORKLOAD_H264_CROPPING                         =0x1400,/* H264 only */
+    VIDDEC_WORKLOAD_H264_PAN_SCAN                         =0x1500,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_PIC_TIMING                        =0x1600,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT                     =0x1700,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED              =0x1800,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED            =0x1900,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_RECOVERY_POINT                    =0x1a00,/* H264 only */
+    VIDDEC_WORKLOAD_MPEG2_SEQ_EXT                         =0x1b00,/* MPEG2 Only - Sequence Extension */
+    VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS                 =0x1c00,/* H264 only */
+    VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ                  =0x1d00,/* MPEG4 Only - Visual Sequence */
+    VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ                       =0x1e00,/* MPEG4 Only - Video Object Layer */
+    VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ                   =0x1f00,/* MPEG4 Only - Group of Video Object Planes */
+
+    VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT               =0x2000,/* MPEG4 Only - Video Plane with Short Header */
+    VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO                  =0x2100,/* H264 only */
+
+    VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0                    =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0                   =0x20000,/* release frames tag, last eight bits indicate index in dpb*/
+    VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0                   =0x30000,/* Display order in DPB tag, for H264 */
+    VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0                   =0x40000,/* Release frames but not display, for H264 */
+    VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0                   =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0                   =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0                    =0x70000,/* required for H264 as it needs whole DPB for each frame */
+    VIDDEC_WORKLOAD_H264_REFR_LIST_0                      =0x80000,/* ref list 0 for H264 */
+    VIDDEC_WORKLOAD_H264_REFR_LIST_1                      =0x90000,/* ref list 1 for H264 */
+    VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY                    =0xa0000,/* eos items begin after this */
+
+    VIDDEC_WORKLOAD_DECODER_SPECIFIC                      =0x100000,/* pvt info for decoder tags */
+    VIDDEC_WORKLOAD_MAX,
+} workload_item_type;
+
+struct h264_witem_sps_mvc_id
+{
+    /*
+      0-9:    num_views_minus1
+      10-19:  start index of views in current item.
+      20-23:  Number of valid items.
+    */
+#define viddec_fw_h264_sps_mvc_id_get_num_views_minus1(x)         viddec_fw_bitfields_extract( (x)->num_views, 0, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_set_num_views_minus1(x, val)    viddec_fw_bitfields_insert( (x)->num_views, val, 0, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_get_cur_start_index(x)          viddec_fw_bitfields_extract( (x)->num_views, 10, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_set_cur_start_index(x, val)     viddec_fw_bitfields_insert( (x)->num_views, val, 10, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_get_num_cur_valid_items(x)      viddec_fw_bitfields_extract( (x)->num_views, 20, 0x7)
+#define viddec_fw_h264_sps_mvc_id_set_num_cur_valid_items(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 20, 0x7)
+    unsigned int num_views;
+
+    /* We pack six id's into two integers.Each packed_view(integer) contains three 10 bit ids at 0-9, 10-19, 20-29
+       These values can be extracted/set using viddec_fw_h264_sps_mvc_id_get_data_frm_index()
+       and viddec_fw_h264_sps_mvc_id_set_data_frm_index() functions.
+    */
+#define viddec_fw_h264_sps_mvc_id_max_packed_ids        6 /* Max number of packed ids in a workload item */
+    unsigned int packed_view[2];
+};
+
+/* This function extracts a 10 bit view id of index( <6) that was packed into h264_witem_sps_mvc_id structure */
+static inline unsigned int viddec_fw_h264_sps_mvc_id_get_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index)
+{
+    unsigned int start=0, *word;
+
+    start = ((index > 2) ?(index - 3) : index) *10;
+    word = &(data->packed_view[(index > 2) ? 1:0]);
+    return viddec_fw_bitfields_extract(*word, start, 0x3FF);
+}
+
+/* This function packs  a 10 bit view id(val) at index( <6) in h264_witem_sps_mvc_id structure */
+static inline void viddec_fw_h264_sps_mvc_id_set_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index, unsigned int val)
+{
+    unsigned int start=0, *word;
+
+    start = ((index > 2) ?(index - 3) : index) *10;
+    word = &(data->packed_view[(index > 2) ? 1:0]);
+    viddec_fw_bitfields_insert(*word, val, start, 0x3FF);
+}
+
+/* 16-byte workload */
+typedef struct viddec_workload_item
+{
+    enum workload_item_type vwi_type;
+    union
+    {
+        struct
+        {
+            unsigned int    es_phys_addr;
+            unsigned int    es_phys_len;
+            unsigned int    es_flags;
+        } es;
+        struct
+        {
+            unsigned int    tag_phys_addr;
+            unsigned int    tag_phys_len;
+            unsigned int    tag_value;
+        } tag;
+        struct
+        {
+            unsigned int    data_offset;
+            unsigned int    data_payload[2];
+        } data;
+        struct
+        {
+            signed int      reference_id;     /* Assigned by parser */
+            unsigned int    luma_phys_addr;   /* assigned by host, for DM */
+            unsigned int    chroma_phys_addr; /* assigned by host, for DM */
+        } ref_frame;
+        struct   /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */
+        {
+            signed int      ref_table_offset;    /* Index of first "reordered" */
+            /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */
+            unsigned int    ref_reorder_00010203;
+            /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */
+            unsigned int    ref_reorder_04050607;
+        } ref_reorder;
+        struct
+        {
+            /* we pack a maximum of 11 bytes of user data and 1 byte for size */
+            /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */
+#define viddec_fw_get_user_data_size(x)  ((x)->user_data.size)
+#define viddec_fw_get_user_data(x)   (unsigned char *)&((x)->user_data.data_payload[0])
+            unsigned char size;
+            unsigned char data_payload[11];
+            /*
+              ITEM TYPES WHICH use this:
+              VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED,
+              VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA,
+              VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA,
+            */
+        } user_data;
+        struct
+        {
+            // Sequence Header Item I (From LSB):
+            //    - horizontal_size_value    - 12 bits
+            //    - vertical_size_value      - 12 bits
+            //    - aspect_ratio_information - 4 bits
+            //    - frame_rate_code          - 4 bits
+#define viddec_fw_mp2_sh_get_horizontal_size_value(x)    viddec_fw_bitfields_extract( (x)->seq_hdr_item_1,  0, 0xFFF)
+#define viddec_fw_mp2_sh_get_vertical_size_value(x)      viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF)
+#define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF)
+#define viddec_fw_mp2_sh_get_frame_rate_code(x)          viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF)
+#define viddec_fw_mp2_sh_set_horizontal_size_value(x, val)    viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val,  0, 0xFFF)
+#define viddec_fw_mp2_sh_set_vertical_size_value(x, val)      viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF)
+#define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF)
+#define viddec_fw_mp2_sh_set_frame_rate_code(x, val)          viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF)
+            unsigned int seq_hdr_item_1;
+
+            // Sequence Header Item II (From LSB):
+            //    - bit_rate_value        - 18 bits
+            //    - vbv_buffer_size_value - 10 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_sh_get_bit_rate_value(x)        viddec_fw_bitfields_extract( (x)->seq_hdr_item_2,  0, 0x3FFFF)
+#define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF)
+#define viddec_fw_mp2_sh_set_bit_rate_value(x, val)        viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val,  0, 0x3FFFF)
+#define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF)
+            unsigned int seq_hdr_item_2;
+
+            unsigned int pad;
+        } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+        struct
+        {
+            // Sequence Extension Item I (From LSB):
+            //    - profile_and_level_indication - 8 bits
+            //    - progressive_sequence         - 1 bit
+            //    - chroma_format                - 2 bits
+            //    - horizontal_size_extension    - 2 bits
+            //    - vertical_size_extension      - 2 bits
+            //    - bit_rate_extension           - 12 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  0, 0xFF)
+#define viddec_fw_mp2_se_get_progressive_sequence(x)         viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  8, 0x1)
+#define viddec_fw_mp2_se_get_chroma_format(x)                viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  9, 0x3)
+#define viddec_fw_mp2_se_get_horizontal_size_extension(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3)
+#define viddec_fw_mp2_se_get_vertical_size_extension(x)      viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3)
+#define viddec_fw_mp2_se_get_bit_rate_extension(x)           viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF)
+#define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  0, 0xFF)
+#define viddec_fw_mp2_se_set_progressive_sequence(x, val)         viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  8, 0x1)
+#define viddec_fw_mp2_se_set_chroma_format(x, val)                viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  9, 0x3)
+#define viddec_fw_mp2_se_set_horizontal_size_extension(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3)
+#define viddec_fw_mp2_se_set_vertical_size_extension(x, val)      viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3)
+#define viddec_fw_mp2_se_set_bit_rate_extension(x, val)           viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF)
+            unsigned int seq_ext_item_1;
+
+            // Sequence Extension Item II (From LSB):
+            //    - vbv_buffer_size_extension - 8 bits
+            //    - frame_rate_extension_n    - 2 bits
+            //    - frame_rate_extension_d    - 5 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2,  0, 0xFF)
+#define viddec_fw_mp2_se_get_frame_rate_extension_n(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_2,  8, 0x3)
+#define viddec_fw_mp2_se_get_frame_rate_extension_d(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F)
+#define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val,  0, 0xFF)
+#define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val,  8, 0x3)
+#define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F)
+            unsigned int seq_ext_item_2;
+
+            unsigned int pad;
+        } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT
+        struct
+        {
+            // Sequence Display Extension Item I (From LSB):
+            //   - display_horizontal_size - 14 bits
+            //   - display_vertical_size   - 14 bits
+            //   - video_format            - 3 bits
+            //   - color_description       - 1 bit
+#define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1,  0, 0x3FFF)
+#define viddec_fw_mp2_sde_get_display_vertical_size(x)   viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF)
+#define viddec_fw_mp2_sde_get_video_format(x)            viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7)
+#define viddec_fw_mp2_sde_get_color_description(x)       viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1)
+#define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val,  0, 0x3FFF)
+#define viddec_fw_mp2_sde_set_display_vertical_size(x, val)   viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF)
+#define viddec_fw_mp2_sde_set_video_format(x, val)            viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7)
+#define viddec_fw_mp2_sde_set_color_description(x, val)       viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1)
+            unsigned int seq_disp_ext_item_1;
+
+            // Sequence Display Extension II (From LSB):
+            //   - color_primaries - 8 bits
+            //   - transfer_characteristics - 8 bits
+            //   - remaining pad bits
+#define viddec_fw_mp2_sde_get_color_primaries(x)          viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2,  0, 0xFF)
+#define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2,  8, 0xFF)
+#define viddec_fw_mp2_sde_set_color_primaries(x, val)          viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val,  0, 0xFF)
+#define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val,  8, 0xFF)
+            unsigned int seq_disp_ext_item_2;
+
+            unsigned int pad;
+        } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+            // Group of Pictures Header Item I (From LSB):
+            //   - closed_gop - 1 bit
+            //   - broken_link - 1 bit
+            //   - remaining pad bits
+#define viddec_fw_mp2_gop_get_closed_gop(x)  viddec_fw_bitfields_extract( (x)->gop_hdr_item_1,  0, 0x1)
+#define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1,  1, 0x1)
+#define viddec_fw_mp2_gop_set_closed_gop(x, val)  viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val,  0, 0x1)
+#define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val,  1, 0x1)
+            unsigned int gop_hdr_item_1;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO
+        struct
+        {
+#define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3)
+#define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3)
+
+#define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7)
+#define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7)
+
+#define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3)
+#define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3)
+
+#define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+#define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+#define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+#define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+#define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+#define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+#define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F)
+#define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F)
+
+#define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7)
+#define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7)
+
+#define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+#define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+#define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+#define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1)
+#define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1)
+
+#define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1)
+#define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1)
+
+#define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+#define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+            uint32_t size;    // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12
+            uint32_t flags;   // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1
+            uint32_t pad;
+        } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+        struct
+        {
+            // This item is populated when display_ext flag is set in the sequence layer
+            // therefore, no need to provide this flag
+#define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF)
+#define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF)
+
+#define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF)
+#define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1)
+#define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1)
+#define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1)
+
+#define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1)
+#define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1)
+
+#define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1)
+#define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF)
+#define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF)
+
+#define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF)
+#define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF)
+
+#define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF)
+#define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF)
+
+#define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF)
+#define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF)
+#define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF)
+#define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF)
+
+#define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF)
+#define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF)
+
+#define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF)
+#define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF)
+
+            uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1
+            uint32_t framerate;  // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16
+            uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8
+        } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+#define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF)
+#define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF)
+
+#define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF)
+#define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF)
+
+#define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F)
+#define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F)
+
+#define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7)
+#define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7)
+
+#define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF)
+#define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF)
+
+#define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7)
+#define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7)
+
+#define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1)
+#define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1)
+
+#define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+#define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+#define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7)
+#define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7)
+
+#define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+#define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+            uint32_t size;    // horiz_size:16, vert_size:16
+            uint32_t flags;   // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1
+            uint32_t pad;
+        } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C
+        struct
+        {
+#define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+#define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+#define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+#define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+#define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+#define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+#define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1)
+#define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1)
+
+#define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1)
+#define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1)
+
+#define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1)
+#define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1)
+#define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7)
+#define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7)
+
+#define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+#define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7)
+#define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7)
+
+            uint32_t size;    // coded_size_flag:1, coded_width:12, coded_height:12
+            uint32_t flags;   // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3
+            uint32_t pad;
+        } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO
+        struct
+        {
+            /*
+              0-7 bits for profile_idc.
+              8-15 bits for level_idc.
+              16-17 bits for chroma_format_idc.
+              18-22 bits for num_ref_frames.
+              23  for gaps_in_frame_num_value_allowed_flag.
+              24 for frame_mbs_only_flag.
+              25 for frame_cropping_flag.
+              26 for vui_parameters_present_flag.
+             */
+#define viddec_fw_h264_sps_get_profile_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF)
+#define viddec_fw_h264_sps_set_profile_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF)
+#define viddec_fw_h264_sps_get_level_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF)
+#define viddec_fw_h264_sps_set_level_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF)
+#define viddec_fw_h264_sps_get_chroma_format_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3)
+#define viddec_fw_h264_sps_set_chroma_format_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3)
+#define viddec_fw_h264_sps_get_num_ref_frames(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F)
+#define viddec_fw_h264_sps_set_num_ref_frames(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F)
+#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1)
+#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1)
+#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1)
+#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1)
+#define viddec_fw_h264_sps_get_frame_cropping_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1)
+#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1)
+#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1)
+#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1)
+            unsigned int sps_messages;
+            unsigned int pic_width_in_mbs_minus1;
+            unsigned int pic_height_in_map_units_minus1;
+        } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+
+        struct h264_witem_sps_mvc_id h264_sps_mvc_id;
+
+        struct
+        {
+#define viddec_fw_h264_cropping_get_left(x)  viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_left(x, val)  viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+            unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+#define viddec_fw_h264_cropping_get_top(x)  viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_top(x, val)  viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+            unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+            unsigned int pad;
+        } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING
+
+        struct
+        {
+            /* 0 bit for aspect_ratio_info_present_flag
+               1 st bit for video_signal_type_present_flag
+               2 nd bit for colour_description_present_flag
+               3 rd bit for timing_info_present_flag
+               4 th bit for nal_hrd_parameters_present_flag
+               5 th bit for vcl_hrd_parameters_present_flag
+               6 th bit for fixed_frame_rate_flag
+               7 th bit for pic_struct_present_flag
+               8 th bit for low_delay_hrd_flag
+               9,10,11 bits for video_format
+            */
+#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1)
+#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1)
+#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1)
+#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1)
+#define viddec_fw_h264_vui_get_colour_description_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1)
+#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1)
+#define viddec_fw_h264_vui_get_timing_info_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1)
+#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1)
+#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1)
+#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1)
+#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1)
+#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1)
+#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1)
+#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1)
+#define viddec_fw_h264_vui_get_pic_struct_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1)
+#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1)
+#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1)
+#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1)
+#define viddec_fw_h264_vui_get_video_format(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7)
+#define viddec_fw_h264_vui_set_video_format(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7)
+            unsigned int vui_flags_and_format;
+
+#define viddec_fw_h264_vui_get_aspect_ratio_idc(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF)
+#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF)
+#define viddec_fw_h264_vui_get_colour_primaries(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF)
+#define viddec_fw_h264_vui_set_colour_primaries(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF)
+#define viddec_fw_h264_vui_get_transfer_characteristics(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF)
+#define viddec_fw_h264_vui_set_transfer_characteristics(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF)
+            /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */
+            unsigned int aspc_color_transfer;
+
+#define viddec_fw_h264_vui_get_sar_width(x)  viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF)
+#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_width(x, val)  viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF)
+            unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */
+        } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x)  viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val)  viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_get_time_scale_flag(x)  viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_time_scale_flag(x, val)  viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF)
+            unsigned int num_units_in_tick;
+            unsigned int time_scale;
+            unsigned int pad1;
+        } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO
+        struct
+        {
+            unsigned int pic_struct; /* 4 bit length */
+            unsigned int pad1;
+            unsigned int pad2;
+        } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING
+        struct
+        {
+            unsigned int pan_scan_rect_id;
+
+#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x)  viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3)
+#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val)  viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3)
+            unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */
+            unsigned int pan_scan_rect_repetition_period;
+        } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN
+
+        struct
+        {
+
+#define viddec_fw_h264_pan_scan_get_left(x)  viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_left(x, val)  viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+            unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+
+#define viddec_fw_h264_pan_scan_get_top(x)  viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_top(x, val)  viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+            unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+
+            unsigned int pad;
+        } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT
+        struct
+        {
+            unsigned int recovery_frame_cnt;
+#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x)  viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val)  viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1)
+            unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */
+
+            unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */
+
+        } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT
+
+
+        struct
+        {
+            // Visual Sequence (From LSB):
+            //   - profile_and_level_indication - 8 bits
+#define viddec_fw_mp4_vs_get_profile_and_level_indication(x)      viddec_fw_bitfields_extract( (x)->vs_item,  0, 0xFF)
+#define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val,  0, 0xFF)
+            unsigned int vs_item;
+
+            // Visual Object - video_signal_type
+            //   - video_signal_type - 1b
+            //   - video_format - 3b
+            //   - video_range - 1b
+            //   - colour_description - 1b
+#define viddec_fw_mp4_vo_get_colour_description(x)      viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1)
+#define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1)
+#define viddec_fw_mp4_vo_get_video_range(x)      viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1)
+#define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1)
+#define viddec_fw_mp4_vo_get_video_format(x)      viddec_fw_bitfields_extract( (x)->video_signal_type,  1, 0x7)
+#define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val,  1, 0x7)
+#define viddec_fw_mp4_vo_get_video_signal_type(x)      viddec_fw_bitfields_extract( (x)->video_signal_type,  0, 0x1)
+#define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val,  0, 0x1)
+            unsigned int video_signal_type;
+
+            // Visual Object - video_signal_type
+            //   - color_primaries - 8 bits
+            //   - transfer_characteristics - 8 bits
+#define viddec_fw_mp4_vo_get_transfer_char(x)      viddec_fw_bitfields_extract( (x)->color_desc,  8, 0xFF)
+#define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val,  8, 0xFF)
+#define viddec_fw_mp4_vo_get_color_primaries(x)      viddec_fw_bitfields_extract( (x)->color_desc,  0, 0xFF)
+#define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val,  0, 0xFF)
+            unsigned int color_desc;
+        } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ
+
+        struct
+        {
+            // Video Object Layer(From LSB):
+            //   - aspect_ratio_info - 4b
+            //   - par_width - 8b
+            //   - par_height - 8b
+            //   - vol_control_param - 1b
+            //   - chroma_format - 2b
+            //   - interlaced - 1b
+            //   - fixed_vop_rate - 1b
+#define viddec_fw_mp4_vol_get_fixed_vop_rate(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1)
+#define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1)
+#define viddec_fw_mp4_vol_get_interlaced(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1)
+#define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1)
+#define viddec_fw_mp4_vol_get_chroma_format(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3)
+#define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3)
+#define viddec_fw_mp4_vol_get_control_param(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1)
+#define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1)
+#define viddec_fw_mp4_vol_get_par_height(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF)
+#define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF)
+#define viddec_fw_mp4_vol_get_par_width(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF)
+#define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF)
+#define viddec_fw_mp4_vol_get_aspect_ratio_info(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF)
+#define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF)
+            unsigned int vol_aspect_ratio;
+
+            // Video Object Layer(From LSB):
+            //   - vbv_parameters - 1b
+            //   - bit_rate - 30b
+#define viddec_fw_mp4_vol_get_bit_rate(x)      viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF)
+#define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF)
+#define viddec_fw_mp4_vol_get_vbv_param(x)      viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1)
+#define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1)
+            unsigned int vol_bit_rate;
+
+            // Video Object Layer(From LSB):
+            //   - fixed_vop_time_increment - 16b
+            //   - vop_time_increment_resolution - 16b
+#define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF)
+#define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF)
+#define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x)      viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF)
+#define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF)
+            unsigned int vol_frame_rate;
+        } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ
+
+        struct
+        {
+            // Group of Video Object Planes(From LSB):
+            //   - time_code - 18b
+            //   - closed_gov - 1b
+            //   - broken_link - 1b
+#define viddec_fw_mp4_gvop_get_broken_link(x)      viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1)
+#define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1)
+#define viddec_fw_mp4_gvop_get_closed_gov(x)      viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1)
+#define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1)
+#define viddec_fw_mp4_gvop_get_time_code(x)      viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF)
+#define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF)
+            unsigned int gvop_info;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ
+
+        struct
+        {
+            // Group of Video Object Planes(From LSB):
+            //   - source_format - 3b
+#define viddec_fw_mp4_vpsh_get_source_format(x)      viddec_fw_bitfields_extract((x)->info, 0, 0x7)
+#define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7)
+            unsigned int info;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT
+
+        unsigned int    vwi_payload[3];
+    };
+} viddec_workload_item_t;
+
+
+
+#endif /* VIDDEC_ITEM_TYPES_H */
diff --git a/mixvbp/include/viddec_fw_parser_host.h b/mixvbp/include/viddec_fw_parser_host.h
new file mode 100644
index 0000000..550cf0a
--- /dev/null
+++ b/mixvbp/include/viddec_fw_parser_host.h
@@ -0,0 +1,237 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_PARSER_HOST_H
+#define VIDDEC_FW_PARSER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "viddec_fw_common_defs.h"
+
+    /** @weakgroup viddec Fw Parser interface Functions */
+    /** @ingroup viddec_fw_parser */
+    /*@{*/
+
+    /**
+       This function returns the size required for loading fw.
+       @retval  size : Required size.
+    */
+    uint32_t viddec_fw_parser_query_fwsize(void);
+
+    /**
+       This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW.
+       @param[in] phys                : Physical address on where firmware should be loaded.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully loaded firmware.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to allocate resources for Loading firmware.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns the size required opening a stream. This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want information about.
+       @param[out] num_wklds          : Number of wklds required for initialisation.
+       @param[out] size               : Size of memory required for opening a stream.
+    */
+    void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size);
+
+    /**
+       This function opens requested codec.This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want to open.
+       @param[in] phys                : Physical address of allocated memory for this codec.
+       @param[in] prority             : Priority of stream. 1 for realtime and 0 for background.
+       @param[out] strm_handle        : Handle of the opened stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Opened the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to Open a stream.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to Open a stream as we are out of resources.
+    */
+    uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+    /**
+       This function closes stream.This a synchronous message to FW.
+       For the close stream to be effective, host has to do flush with discard first and then close the stream.
+       @param[in] strm_handle        : Handle of the stream to close.
+    */
+    void viddec_fw_parser_closestream(uint32_t strm_handle);
+
+    /**
+       This function flushes the current stream. This is a synchronous message to FW.
+       Before calling this function the host has to make sure the output queue of the firmware
+       is empty. After this function is executed the FW will read all entries in input
+       es buffer queue into a free or partial workload and push it into output queue.
+       After this operation the host has to read all entries in output queue again to
+       finish the flush operation.
+       @param[in] flush_type          : Type of flush we want to perform.ex:flush and discard.
+       @param[in]  strm_handle        : Handle of the stream we want to flush.
+       @retval VIDDEC_FW_SUCCESS      : Successfully flushed the stream.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+       @retval VIDDEC_FW_NEED_FREE_WKLD  : Failed to flush sice a free wkld was not available.
+    */
+    uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+    /**
+       This function sends an input es buffer.
+       @param[in] strm_handle         : The handle of stream that we want to send es buffer to.
+       @param[in] message             : The es buffer we want to send.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Port to fw full unsuccesful in sending message.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function gets the next processed workload. The host is required to add free workloads
+       to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue.
+       @param[in] strm_handle         : The handle of stream that we want to read workload from.
+       @param[out] message            : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_EMPTY   : Workload port is empty,unsuccesful in reading wkld.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function adds a free workload to current stream.
+       @param[in] strm_handle         : The handle of stream that we want to write workload to.
+       @param[out] message            : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Workload port is full,unsuccesful in writing wkld.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts.
+       The driver can disable/enable Interrupts if it needs for this particular stream.
+
+       @param[in] strm_handle         : The handle of stream that we want to get mask from
+       @param[in] mask                : This is read as boolean variable, true to enable, false to disable.
+       @retval VIDDEC_FW_SUCCESS      : Successfully set mask.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask);
+    /**
+       This function gets the interrupt status for current stream.
+       When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams,
+       by calling this function. The status is what the FW thinks the current state of stream is. The status information that
+       FW provides is complete information on all possible events that are defined. The host should only access this information
+       in its ISR at which state FW doesn't modify this information.
+
+       @param[in] strm_handle         : The handle of stream that we want to get mask from
+       @param[out] status             : The status of the stream based on viddec_fw_parser_int_status_t enum.
+       @retval VIDDEC_FW_SUCCESS      : Successfully in reading status.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status);
+
+    /**
+       This function allows to set stream attributes that are supported.
+       @param[in] strm_handle         : The handle of stream that we want to set attribute on.
+       @param[in] type                : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t.
+       @param[in] value               : The value of the type that we want to set.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Set the attribute.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value);
+
+    /**
+       This function allows to get current status of all the parser queues. If the current stream is active we return
+       number of inout messages that can be written to input queue, no of messages in output queue and number of
+       free available workloads the stream has.
+       Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT
+       Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or
+       a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT
+       FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee
+       one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT
+       to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will
+       give only one INT and host should try to empty output queue.
+       @param[in] strm_handle         : The handle of stream that we want to get status of queues.
+       @param[out] status             : The status of each queue gets updated in here.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Got the status information.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+    */
+    uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status);
+
+    /**
+       This function unloads Parser Firmware and free's the resources allocated in Load fw.
+       If this function is called before load fw it will crash with a segmentation fault.
+    */
+    void viddec_fw_parser_deinit(void);
+
+    /**
+       This function gets the major and minor revison numbers of the loaded firmware.
+       @param[out] major              : The major revision numner.
+       @param[out] minor              : The minor revision number.
+       @param[out] build              : The Internal Build number.
+    */
+    void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+    /**
+       This function clears the global interrupt. This is the last thing host calls before exiting ISR.
+    */
+    void viddec_fw_parser_clear_global_interrupt(void);
+
+    /*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_PARSER_HOST_H
diff --git a/mixvbp/include/viddec_fw_workload.h b/mixvbp/include/viddec_fw_workload.h
new file mode 100644
index 0000000..3b86270
--- /dev/null
+++ b/mixvbp/include/viddec_fw_workload.h
@@ -0,0 +1,152 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_WORKLOAD_H
+#define VIDDEC_FW_WORKLOAD_H
+
+#include <stdint.h>
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_frame_attr.h"
+#include "viddec_fw_common_defs.h"
+
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0)
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE   (1 << 2)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME   (1 << 3)
+
+#define VIDDEC_FRAME_REFERENCE_IS_VALID   (0x1<<1)
+// PIP Output Frame request bits
+#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE  24
+#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE  (0x3<<BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE)
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_NORMAL     0x0
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_HALF     0x1
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_QUARTER  0x2
+
+/** Frame reference information to pass to video decoder  when performing a workload (frame decode)  */
+typedef struct viddec_frame_reference
+{
+    signed int   driver_frame_id;
+    unsigned int luma_phys_addr;
+    unsigned int chroma_phys_addr;
+    int internal_id; /* Used by workload manager only */
+} viddec_frame_reference_t;
+
+#define WORKLOAD_REFERENCE_FRAME (1 << 16)
+#define WORKLOAD_SKIPPED_FRAME   (1 << 17)
+/**
+Bitmask to indicate that this workload has range adjustment and needs a range_adjusted_out buffer for successful decode.
+Will be used for VC1 only.
+*/
+#define WORKLOAD_FLAGS_RA_FRAME   (1 << 21)
+#define WORKLOAD_REFERENCE_FRAME_BMASK 0x000000ff
+
+/** This structure contains all the information required  to fully decode one frame of data  */
+/**
+    num_error_mb: This field is populated at the output of the decoder.
+                  Currently, its valid only for MPEG2.
+                  For other codecs, it defaults to 0.
+
+    range_adjusted_out:	Frame buffer needed to store range adjusted frames for VC1 only.
+                        Range adjustment in VC1 requires that the luma/chroma values in the decoded frame be modified
+                        before the frame can be displayed. In this case, we need a new frame buffer to store he adjusted values.
+                        The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the
+                        is_reference_frame of the workload. The decoder expects this field to be valid when range adjustment
+                        is indicated and populates this frame buffer along with frame_out.
+
+    Expectation from user:
+                        Before feeding workload to the decoder, do the following:
+                           If pip is indicated/needed,
+                              provide the pip_out buffer
+                           If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set),
+                              provide range_adjusted_out buffer
+                           Provide frame_out buffer.
+
+                        After workload is returned from the decoder, do the following:
+                           If pip is indicated,
+                              display the pip_out buffer
+                           Else If range adjustment is indicated,
+                              display range_adjusted_out buffer
+                           Else
+                              display frame_out buffer.
+*/
+typedef struct viddec_workload
+{
+    enum viddec_stream_format codec;
+    signed int                is_reference_frame;
+    unsigned int              result;
+    unsigned int              time;
+    unsigned int              num_items;/* number of viddec_workload_item_t in current workload */
+    unsigned int              num_error_mb; /* Number of error macroblocks in the current picture. */
+    viddec_frame_attributes_t attrs;
+
+    viddec_frame_reference_t  frame_out;   /* output frame */
+    viddec_frame_reference_t  range_adjusted_out;   /* for VC1 only */
+    viddec_frame_reference_t  pip_out;     /* PIP Buffer */
+
+    /* Alignment is needed because the packing different between host and vSparc */
+    __attribute__ ((aligned (16))) viddec_workload_item_t   item[1];
+
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* This structure is ALLOC_EXTENDED with workload_items   */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+} viddec_workload_t;
+
+#endif /* VIDDEC_WORKLOAD_H */
diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk
new file mode 100755
index 0000000..e5f722d
--- /dev/null
+++ b/mixvbp/vbp_manager/Android.mk
@@ -0,0 +1,62 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+#MIXVBP_LOG_ENABLE := true
+
+LOCAL_SRC_FILES :=			\
+	vbp_h264_parser.c		\
+	vbp_vc1_parser.c		\
+	vbp_loader.c			\
+	vbp_mp42_parser.c		\
+	vbp_utils.c			\
+	viddec_parse_sc.c		\
+	viddec_pm_parser_ops.c		\
+	viddec_pm_utils_bstream.c       \
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES +=			\
+	$(LOCAL_PATH)/include		\
+	$(MIXVBP_DIR)/include		      \
+	$(MIXVBP_DIR)/vbp_plugin/h264/include \
+	$(MIXVBP_DIR)/vbp_plugin/mp2/include  \
+	$(MIXVBP_DIR)/vbp_plugin/mp4/include  \
+	$(MIXVBP_DIR)/vbp_plugin/vc1/include  \
+	$(MIXVBP_DIR)/vbp_plugin/vc1/         \
+	$(MIXVBP_DIR)/vbp_plugin/mp4/         \
+	$(TARGET_OUT_HEADERS)/libva
+
+LOCAL_COPY_HEADERS_TO := libmixvbp
+
+LOCAL_COPY_HEADERS :=	\
+	vbp_loader.h
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp
+
+LOCAL_SHARED_LIBRARIES :=		\
+	libdl				\
+	libcutils
+
+ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true)
+LOCAL_CFLAGS += -DVBP_TRACE
+LOCAL_SHARED_LIBRARIES += liblog
+endif
+
+ifeq ($(USE_HW_VP8),true)
+LOCAL_SRC_FILES += vbp_vp8_parser.c
+LOCAL_C_INCLUDES += $(MIXVBP_DIR)/vbp_plugin/vp8/include
+LOCAL_CFLAGS += -DUSE_HW_VP8
+endif
+
+PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \
+    baytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),)
+LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT
+LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/baytrail/
+LOCAL_SRC_FILES += secvideo/baytrail/vbp_h264secure_parser.c
+endif
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/mixvbp/vbp_manager/include/vbp_common.h b/mixvbp/vbp_manager/include/vbp_common.h
new file mode 100755
index 0000000..ee3d796
--- /dev/null
+++ b/mixvbp/vbp_manager/include/vbp_common.h
@@ -0,0 +1,9 @@
+#ifndef VBP_COMMON
+#define VBP_COMMON
+
+#define SWAP_BYTE(x,y,z)   (( ( (x) >> ((y) << 3))& 0xFF)  << ((z) << 3))
+#define SWAP_WORD(x)      ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0))
+
+#define DEB
+
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h
new file mode 100755
index 0000000..b7e9984
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h
@@ -0,0 +1,121 @@
+#ifndef VIDDEC_PARSER_OPS_H
+#define VIDDEC_PARSER_OPS_H
+
+#include "viddec_fw_workload.h"
+#include <stdint.h>
+
+#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF
+
+typedef enum
+{
+    VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */
+    VIDDEC_PARSE_DISCONTINUITY,  /* Dummy start code to force completion and flush */
+} viddec_parser_inband_messages_t;
+
+typedef struct
+{
+    uint32_t context_size;
+    uint32_t persist_size;
+} viddec_parser_memory_sizes_t;
+
+typedef    void  (*fn_init)(void *ctxt, uint32_t *persist, uint32_t preserve);
+typedef    uint32_t (*fn_parse_sc) (void *ctxt, void *pcxt, void *sc_state);
+typedef    uint32_t (*fn_parse_syntax) (void *parent, void *ctxt);
+typedef    void (*fn_get_cxt_size) (viddec_parser_memory_sizes_t *size);
+typedef    uint32_t (*fn_is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors);
+typedef    uint32_t (*fn_is_frame_start)(void *ctxt);
+typedef    uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial);
+typedef    uint32_t (*fn_gen_assoc_tags)(void *parent);
+typedef    void (*fn_flush_parser) (void *parent, void *ctxt);
+#ifdef USE_AVC_SHORT_FORMAT
+typedef    uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size);
+#endif
+
+
+typedef struct
+{
+    fn_init init;
+    fn_parse_sc parse_sc;
+    fn_parse_syntax parse_syntax;
+    fn_get_cxt_size get_cxt_size;
+    fn_is_wkld_done is_wkld_done;
+    fn_is_frame_start is_frame_start;
+    fn_gen_contrib_tags gen_contrib_tags;
+    fn_gen_assoc_tags gen_assoc_tags;
+    fn_flush_parser flush;
+#ifdef USE_AVC_SHORT_FORMAT
+    fn_update_data update_data;
+#endif
+} viddec_parser_ops_t;
+
+
+typedef enum
+{
+    VIDDEC_PARSE_ERROR = 0xF0,
+    VIDDEC_PARSE_SUCESS = 0xF1,
+    VIDDEC_PARSE_FRMDONE = 0xF2,
+} viddec_parser_error_t;
+
+/*
+ *
+ *Functions used by Parsers
+ *
+ */
+
+/* This function returns the requested number of bits(<=32) and increments au byte position.
+ */
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function returns requested number of bits(<=32) with out incrementing au byte position
+ */
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function skips requested number of bits(<=32) by incrementing au byte position.
+ */
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits);
+
+/* This function appends a work item to current/next workload.
+ */
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next);
+
+/* This function gets current byte and bit positions and information on whether an emulation byte is present after
+current byte.
+ */
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+/* This function appends Pixel tag to current work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata(void *parent);
+
+/* This function appends Pixel tag to next work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata_next(void *parent);
+
+/* This function provides the workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_header(void *parent);
+
+/* This function provides the next workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_next_header(void *parent);
+
+/* Returns the current byte value where offset is on */
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte);
+
+/* Tells us if there is more data that need to parse */
+int32_t viddec_pm_is_nomoredata(void *parent);
+
+/* This function appends misc tag to work load starting from start position to end position of au unit */
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next);
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error);
+
+void viddec_pm_set_late_frame_detect(void *parent);
+
+static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi)
+{
+    wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h
new file mode 100755
index 0000000..45b884b
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm.h
@@ -0,0 +1,93 @@
+#ifndef VIDDEC_PM_H
+#define VIDDEC_PM_H
+
+#include <stdint.h>
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_pm_parse.h"
+#include "viddec_parser_ops.h"
+
+#define SC_DETECT_BUF_SIZE 1024
+#define MAX_CODEC_CXT_SIZE 4096
+
+typedef enum
+{
+    PM_SUCCESS = 0,
+    /* Messages to indicate more ES data */
+    PM_NO_DATA = 0x100,
+    /* Messages to indicate SC found */
+    PM_SC_FOUND = 0x200,
+    PM_FIRST_SC_FOUND = 0x201,
+    /* Messages to indicate Frame done */
+    PM_WKLD_DONE = 0x300,
+    /* Messages to indicate Error conditions */
+    PM_OVERFLOW = 0x400,
+    /* Messages to indicate inband conditions */
+    PM_INBAND_MESSAGES = 0x500,
+    PM_EOS = 0x501,
+    PM_DISCONTINUITY = 0x502,
+} pm_parse_state_t;
+
+/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers
+   cur_es points to current es buffer we are parsing. */
+typedef struct
+{
+    int32_t list_index; /* current index of list */
+    uint32_t cur_offset;
+    uint32_t cur_size;
+    viddec_input_buffer_t *cur_es;
+} viddec_pm_sc_cur_buf_t;
+
+typedef struct
+{
+    uint32_t pending_tags[MAX_IBUFS_PER_SC];
+    uint8_t dummy;
+    uint8_t frame_done;
+    uint8_t first_buf_aligned;
+    uint8_t using_next;
+} vidded_pm_pending_tags_t;
+
+/* This structure holds all necessary data required by parser manager for stream parsing.
+ */
+typedef struct
+{
+    /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */
+    uint8_t scbuf[SC_DETECT_BUF_SIZE + 8];
+    viddec_sc_parse_cubby_cxt_t parse_cubby;
+    viddec_pm_utils_list_t list;
+    /* Place to store tags to be added to next to next workload */
+    viddec_pm_sc_cur_buf_t cur_buf;
+    //viddec_emitter emitter;
+    viddec_pm_utils_bstream_cxt_t getbits;
+    viddec_sc_prefix_state_t sc_prefix_info;
+    vidded_pm_pending_tags_t pending_tags;
+    uint8_t word_align_dummy;
+    uint8_t late_frame_detect;
+    uint8_t frame_start_found;
+    uint8_t found_fm_st_in_current_au;
+    uint32_t next_workload_error_eos;
+    uint32_t pending_inband_tags;
+#ifdef VBP
+    uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3];
+#else
+    uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2];
+#endif
+} viddec_pm_cxt_t;
+
+/*
+ *
+ * Functions used by Parser kernel
+ *
+ */
+
+/* This is for initialising parser manager context to default values */
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean);
+
+/* This is the main parse function which returns state information that parser kernel can understand.*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf);
+
+void viddec_pm_init_ops();
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time);
+
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_parse.h b/mixvbp/vbp_manager/include/viddec_pm_parse.h
new file mode 100755
index 0000000..beca8d7
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_parse.h
@@ -0,0 +1,24 @@
+#ifndef VIDDEC_PM_PARSE_H
+#define VIDDEC_PM_PARSE_H
+
+#include <stdint.h>
+/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes
+   that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte
+   in the current buffer */
+typedef struct
+{
+    uint32_t size; /* size pointed to by buf */
+    uint8_t *buf;  /* ptr to data */
+    int32_t sc_end_pos; /* return value end position of sc */
+    uint32_t phase; /* phase information(state) for sc */
+} viddec_sc_parse_cubby_cxt_t;
+
+typedef struct
+{
+    uint16_t next_sc;
+    uint8_t  second_scprfx_length;
+    uint8_t  first_sc_detect;
+} viddec_sc_prefix_state_t;
+
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h
new file mode 100755
index 0000000..999a067
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h
@@ -0,0 +1,88 @@
+#ifndef VIDDEC_PM_UTILS_BSTREAM_H
+#define VIDDEC_PM_UTILS_BSTREAM_H
+
+#include "viddec_pm_utils_list.h"
+
+#define CUBBY_SIZE 1024
+#define SCRATCH_SIZE 20
+#define MIN_DATA     8
+
+typedef struct
+{
+#ifdef VBP
+    uint8_t *buf;
+#else
+    uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */
+#endif
+    uint32_t buf_st; /* start pos in buf */
+    uint32_t buf_end; /* first invalid byte in buf */
+    uint32_t buf_index; /* current index in buf */
+    uint32_t buf_bitoff; /* bit offset in current index position */
+} viddec_pm_utils_bstream_buf_cxt_t;
+
+typedef struct
+{
+    uint8_t  buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/
+    uint32_t st; /* start index of valid byte */
+    uint32_t size;/* Total number of bytes in current buffer */
+    uint32_t bitoff; /* bit offset in first valid byte */
+} viddec_pm_utils_bstream_scratch_cxt_t;
+
+typedef struct
+{
+#ifdef VBP
+    /* counter of emulation prevention byte */
+    uint32_t emulation_byte_counter;
+#endif
+    /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store
+       the bstream buffer's first valid byte index wrt to accessunit in this variable */
+    uint32_t au_pos;
+    /* This is for keeping track of which list item was used to load data last */
+    uint32_t list_off;
+    /* This is for tracking emulation prevention bytes */
+    uint32_t phase;
+    /* This flag tells us whether to look for emulation prevention or not */
+    uint32_t is_emul_reqd;
+    /* A pointer to list of es buffers which contribute to current access unit */
+    viddec_pm_utils_list_t *list;
+    /* scratch buffer to stage data on boundaries and reloads */
+    viddec_pm_utils_bstream_scratch_cxt_t scratch;
+    /* Actual context which has valid data for get bits functionality */
+    viddec_pm_utils_bstream_buf_cxt_t bstrm_buf;
+} viddec_pm_utils_bstream_cxt_t;
+
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul);
+
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits);
+
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip);
+
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte);
+
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt);
+
+/*
+  This function gets bit and byte position of where we are in the current AU. We always return the position of next byte to be
+  read.
+  is_emul on true indicates we are on second zero byte in emulation prevention sequence.
+ */
+static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+    uint32_t phase=cxt->phase;
+
+    *bit = cxt->bstrm_buf.buf_bitoff;
+    *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st);
+    if (cxt->phase > 0)
+    {
+        phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 );
+    }
+    /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */
+    *is_emul = (cxt->is_emul_reqd) && (phase > 0) &&
+               (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) &&
+               (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3);
+}
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_list.h b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h
new file mode 100755
index 0000000..0e650d5
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h
@@ -0,0 +1,50 @@
+#ifndef VIDDEC_PM_COMMON_LIST_H
+#define VIDDEC_PM_COMMON_LIST_H
+
+/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter
+   a case if this is not sufficent */
+#ifdef VBP
+#define MAX_IBUFS_PER_SC 512
+#else
+#define MAX_IBUFS_PER_SC 64
+#endif
+
+/* This structure is for storing information on byte position in the current access unit.
+   stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last
+   valid byte in current es buffer.*/
+typedef struct
+{
+    uint32_t stpos;
+    uint32_t edpos;
+} viddec_pm_utils_au_bytepos_t;
+
+/* this structure is for storing all necessary information for list handling */
+typedef struct
+{
+    uint16_t num_items;                  /* Number of buffers in List */
+    uint16_t first_scprfx_length;        /* Length of first sc prefix in this list */
+    int32_t start_offset;                /* starting offset of unused data including sc prefix in first buffer */
+    int32_t end_offset;                  /* Offset of unsused data in last buffer including 2nd sc prefix */
+    //viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */
+    viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */
+    int32_t total_bytes;                 /* total bytes for current access unit including first sc prefix*/
+} viddec_pm_utils_list_t;
+
+/* This function initialises the list to default values */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt);
+#ifndef VBP
+/* This function adds a new entry to list and will emit tags if needed */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf);
+
+/* This function updates au byte position of the current list. This should be called after sc codes are detected and before
+   syntax parsing as get bits requires this to be initialized. */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length);
+
+/* This function walks through the list and removes consumed buffers based on total bytes. It then moves
+   unused entires to the top of list. */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length);
+
+/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset);
+#endif
+#endif
diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c
new file mode 100644
index 0000000..498cbc4
--- /dev/null
+++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c
@@ -0,0 +1,1830 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264secure_parser.h"
+
+#define TERMINATE_KEY 0xFFFFFFFF
+
+typedef struct vbp_h264secure_parser_private vbp_h264secure_parser_private;
+
+typedef enum
+{
+    H264_BS_LENGTH_PREFIXED,
+    H264_BS_SC_PREFIXED,
+    H264_BS_SINGLE_NAL
+} H264_BS_PATTERN;
+
+struct vbp_h264secure_parser_private
+{
+    /* number of bytes used to encode length of NAL payload.  If parser does not receive configuration data
+    and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB
+    byte stream format. */
+    int NAL_length_size;
+
+    /* indicate if stream is length prefixed */
+    int length_prefix_verified;
+
+    H264_BS_PATTERN bitstream_pattern;
+
+    uint8_t* start;
+    int32_t  offset;
+    int32_t  size;
+};
+
+/* default scaling list table */
+static unsigned char Default_4x4_Intra[16] =
+{
+    6,13,20,28,
+    13,20,28,32,
+    20,28,32,37,
+    28,32,37,42
+};
+
+static unsigned char Default_4x4_Inter[16] =
+{
+    10,14,20,24,
+    14,20,24,27,
+    20,24,27,30,
+    24,27,30,34
+};
+
+static unsigned char Default_8x8_Intra[64] =
+{
+    6,10,13,16,18,23,25,27,
+    10,11,16,18,23,25,27,29,
+    13,16,18,23,25,27,29,31,
+    16,18,23,25,27,29,31,33,
+    18,23,25,27,29,31,33,36,
+    23,25,27,29,31,33,36,38,
+    25,27,29,31,33,36,38,40,
+    27,29,31,33,36,38,40,42
+};
+
+static unsigned char Default_8x8_Inter[64] =
+{
+    9,13,15,17,19,21,22,24,
+    13,13,17,19,21,22,24,25,
+    15,17,19,21,22,24,25,27,
+    17,19,21,22,24,25,27,28,
+    19,21,22,24,25,27,28,30,
+    21,22,24,25,27,28,30,32,
+    22,24,25,27,28,30,32,33,
+    24,25,27,28,30,32,33,35
+};
+
+static unsigned char quant_flat[16] =
+{
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16
+};
+
+static unsigned char quant8_flat[64] =
+{
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16
+};
+
+static unsigned char* UseDefaultList[8] =
+{
+    Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+    Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+    Default_8x8_Intra,
+    Default_8x8_Inter
+};
+
+static uint8 h264_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    {4, 3},
+    {3, 2},
+    {2, 1},
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264secure_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264secure_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264secure_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->update_data = dlsym(pcontext->fd_parser, "viddec_h264secure_update");
+    if (NULL == pcontext->parser_ops->update_data)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_wkld_done = NULL;
+    pcontext->parser_ops->flush = NULL;
+    pcontext->parser_ops->is_frame_start = NULL;
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+    vbp_data_h264 *query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_h264, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+    vbp_h264secure_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_h264secure_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->start = 0;
+    parser_private->offset = 0;
+    parser_private->size = 0;
+    parser_private->NAL_length_size = 0;
+    parser_private->length_prefix_verified = 0;
+    parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_h264secure(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264secure(vbp_context *pcontext)
+{
+    if (NULL != pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    int i;
+    vbp_data_h264 *query_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].pic_parms);
+        }
+        free(query_data->pic_data);
+    }
+
+    free(query_data->IQ_matrix_buf);
+    free(query_data->codec_data);
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+    uint16_t i = ((*p) << 8) + ((*(p+1)));
+    return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+    uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+    return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264secure(
+    int curr_picture_structure,
+    int bottom_field,
+    frame_store* store,
+    VAPictureH264* pic)
+{
+    if (FRAME == curr_picture_structure)
+    {
+        if (FRAME != viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("Reference picture structure is not frame for current frame picture!");
+        }
+        pic->flags = 0;
+        pic->TopFieldOrderCnt = store->top_field.poc;
+        pic->BottomFieldOrderCnt = store->bottom_field.poc;
+    }
+    else
+    {
+        if (FRAME == viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("reference picture structure is frame for current field picture!");
+        }
+        if (bottom_field)
+        {
+            pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+        else
+        {
+            pic->flags = VA_PICTURE_H264_TOP_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+    }
+}
+
+static inline void vbp_set_slice_ref_list_h264secure(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    int i, j;
+    int num_ref_idx_active = 0;
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    uint8_t* p_list = NULL;
+    VAPictureH264* refPicListX = NULL;
+    frame_store* fs = NULL;
+
+    /* initialize ref picutre list, set picture id and flags to invalid. */
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+        for (j = 0; j < 32; j++)
+        {
+            refPicListX->picture_id = VA_INVALID_SURFACE;
+            refPicListX->frame_idx = 0;
+            refPicListX->flags = VA_PICTURE_H264_INVALID;
+            refPicListX->TopFieldOrderCnt = 0;
+            refPicListX->BottomFieldOrderCnt = 0;
+            refPicListX++;
+        }
+    }
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+        if ((i == 0) &&
+            ((h264_PtypeB == slice_header->slice_type) ||
+             (h264_PtypeP == slice_header->slice_type)))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+            if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list0;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_0;
+            }
+        }
+        else if ((i == 1) && (h264_PtypeB == slice_header->slice_type))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+            if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list1;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_1;
+            }
+        }
+        else
+        {
+            num_ref_idx_active = 0;
+            p_list = NULL;
+        }
+
+
+        for (j = 0; j < num_ref_idx_active; j++)
+        {
+            fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+            /* bit 5 indicates if reference picture is bottom field */
+            vbp_set_VAPicture_h264secure(
+                h264_parser->info.img.structure,
+                (p_list[j] & 0x20) >> 5,
+                fs,
+                refPicListX);
+
+            refPicListX->frame_idx = fs->frame_num;
+            refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            refPicListX++;
+        }
+    }
+}
+
+static inline void vbp_set_pre_weight_table_h264secure(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    int i, j;
+
+    if ((((h264_PtypeP == slice_header->slice_type) ||
+          (h264_PtypeB == slice_header->slice_type)) &&
+          h264_parser->info.active_PPS.weighted_pred_flag) ||
+         ((h264_PtypeB == slice_header->slice_type) &&
+         (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+    {
+        slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+        slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+        slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+        slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+        slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+        slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i];
+            slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i];
+            slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i];
+            slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i];
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+                slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+                slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+                slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+            }
+        }
+    }
+    else
+    {
+        /* default weight table */
+        slc_parms->luma_log2_weight_denom = 5;
+        slc_parms->chroma_log2_weight_denom = 5;
+        slc_parms->luma_weight_l0_flag = 0;
+        slc_parms->luma_weight_l1_flag = 0;
+        slc_parms->chroma_weight_l0_flag = 0;
+        slc_parms->chroma_weight_l1_flag = 0;
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 0;
+            slc_parms->luma_offset_l0[i] = 0;
+            slc_parms->luma_weight_l1[i] = 0;
+            slc_parms->luma_offset_l1[i] = 0;
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = 0;
+                slc_parms->chroma_offset_l0[i][j] = 0;
+                slc_parms->chroma_weight_l1[i][j] = 0;
+                slc_parms->chroma_offset_l1[i][j] = 0;
+            }
+        }
+    }
+}
+
+
+static inline void vbp_set_reference_frames_h264secure(
+    struct h264_viddec_parser *parser,
+    VAPictureParameterBufferH264* pic_parms)
+{
+    int buffer_idx;
+    int frame_idx;
+    frame_store* store = NULL;
+    h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+    /* initialize reference frames */
+    for (frame_idx = 0; frame_idx < 16; frame_idx++)
+    {
+        pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+        pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+        pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+        pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+        pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+    }
+    pic_parms->num_ref_frames = 0;
+
+    frame_idx = 0;
+
+    /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer);  */
+    /* set short term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+
+        store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+        /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0) */
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    /* set long term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+        store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+        if (!viddec_h264_get_is_long_term(store))
+        {
+            WTRACE("long term frame is not marked as long term.");
+        }
+        /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->long_term_frame_idx;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0)*/
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    pic_parms->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (frame_idx > parser->info.active_SPS.num_ref_frames)
+    {
+        WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+               frame_idx, parser->info.active_SPS.num_ref_frames);
+    }
+}
+
+
+static inline void vbp_set_scaling_list_h264secure(
+    struct h264_viddec_parser *parser,
+    VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+    int i;
+    int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0);
+
+    if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+    {
+        for (i = 0; i < lists_to_set; i++)
+        {
+            if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+            {
+                if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                        ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+                {
+                    /* use default scaling list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                    }
+                }
+                else
+                {
+                    /* use PPS list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+                    }
+                }
+            }
+            else /* pic_scaling_list not present */
+            {
+                if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+                {
+                    /* SPS matrix present - use fallback rule B */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+                               16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+                               64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        //g_warning("invalid scaling list index.");
+                        break;
+                    }
+                }
+                else /* seq_scaling_matrix not present */
+                {
+                    /* SPS matrix not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                } /* end of seq_scaling_matrix not present */
+            } /* end of  pic_scaling_list not present */
+        } /* for loop for each index from 0 to 7 */
+    } /* end of pic_scaling_matrix present */
+    else
+    {
+        /* PPS matrix not present, use SPS information */
+        if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+        {
+            for (i = 0; i < lists_to_set; i++)
+            {
+                if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+                {
+                    if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                            ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+                    {
+                        /* use default scaling list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        }
+                    }
+                    else
+                    {
+                        /* use SPS list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+                        }
+                    }
+                }
+                else
+                {
+                    /* SPS list not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                }
+            }
+        }
+        else
+        {
+            /* SPS matrix not present - use flat lists */
+            for (i = 0; i < 6; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+            }
+            for (i = 0; i < 2; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+            }
+        }
+    }
+
+    if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+            (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+             parser->info.active_SPS.seq_scaling_matrix_present_flag))
+    {
+        for (i = 0; i < 2; i++)
+        {
+            memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+        }
+    }
+}
+
+static void vbp_set_codec_data_h264secure(
+    struct h264_viddec_parser *parser,
+     vbp_data_h264 *query_data)
+{
+    vbp_codec_data_h264* codec_data = query_data->codec_data;
+
+    /* The following variables are used to detect if there is new SPS or PPS */
+    uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id;
+    uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id;
+    int frame_width = codec_data->frame_width;
+    int frame_height = codec_data->frame_height;
+
+    /* parameter id */
+    codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+    codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+    /* profile and level */
+    codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+    codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+    /*constraint flag sets (h.264 Spec v2009)*/
+    codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4;
+    codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3;
+    codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+    codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1;
+    codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1;
+
+    /* reference frames */
+    codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+        !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+    {
+        /* no longer necessary: two fields share the same interlaced surface */
+        /* codec_data->num_ref_frames *= 2; */
+    }
+
+    codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+    /* frame coding */
+    codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+    codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+    /* frame dimension */
+    codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+    codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                               (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+    /* cropping information */
+    codec_data->crop_left = 0;
+    codec_data->crop_right = 0;
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = 0;
+    if(parser->info.active_SPS.sps_disp.frame_cropping_flag) {
+        int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0;
+        int ChromaArrayType = 0;
+        if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) {
+            if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) {
+                SubWidthC = 2;
+                SubHeightC = 2;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) {
+                SubWidthC = 2;
+                SubHeightC = 1;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) {
+                SubWidthC = 1;
+                SubHeightC = 1;
+            }
+            ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        }
+
+        if(ChromaArrayType == 0) {
+            CropUnitX = 1;
+            CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        } else {
+            CropUnitX = SubWidthC;
+            CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag);
+        }
+
+        codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+        codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1;
+        codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+        codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1;
+    }
+
+    /* aspect ratio */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+    {
+        codec_data->aspect_ratio_idc =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+        if (codec_data->aspect_ratio_idc < 17)
+        {
+            codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0];
+            codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1];
+        }
+        else if (codec_data->aspect_ratio_idc == 255)
+        {
+            codec_data->sar_width =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+            codec_data->sar_height =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+        }
+        else
+        {
+            codec_data->sar_width = 0;
+            codec_data->sar_height = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        codec_data->aspect_ratio_idc = 0;
+        codec_data->sar_width = 0;
+        codec_data->sar_height = 0;
+    }
+
+    /* video format */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+    {
+        codec_data->video_format =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format = 5;
+    }
+
+    codec_data->video_full_range_flag =
+        parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag;
+
+
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+    {
+        codec_data->matrix_coefficients =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients;
+    }
+    else
+    {
+        // Unspecified
+        codec_data->matrix_coefficients = 2;
+    }
+
+    codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value;
+
+    /* picture order type and count */
+    codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+    codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+
+
+    /* udpate sps and pps status */
+    query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0;
+    query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0;
+    query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff;
+    query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff;
+    if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height)
+    {
+        query_data->new_sps = 1;
+        query_data->new_pps = 1;
+    }
+}
+
+
+static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext, int list_index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    struct h264_viddec_parser* parser = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+    VAPictureParameterBufferH264* pic_parms = NULL;
+
+    parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+    if (0 == parser->info.SliceHeader.first_mb_in_slice)
+    {
+        /* a new picture is parsed */
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        /* partial frame */
+        query_data->num_pictures = 1;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        WTRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    pic_parms = pic_data->pic_parms;
+
+    // relax this condition to support partial frame parsing
+
+    //if (parser->info.SliceHeader.first_mb_in_slice == 0)
+    {
+        /**
+        * picture parameter only needs to be set once,
+        * even multiple slices may be encoded
+        */
+
+        /* VAPictureParameterBufferH264 */
+        pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+        pic_parms->CurrPic.frame_idx = 0;
+        if (parser->info.img.field_pic_flag == 1)
+        {
+            if (parser->info.img.bottom_field_flag)
+            {
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            }
+            else
+            {
+                /* also OK set to 0 (from test suite) */
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+            }
+        }
+        else
+        {
+            pic_parms->CurrPic.flags = 0; /* frame picture */
+        }
+        pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+        pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+        pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+        /* don't care if current frame is used as long term reference */
+        if (parser->info.SliceHeader.nal_ref_idc != 0)
+        {
+            pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+
+        pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+        /* frame height in MBS */
+        pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+        pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+        pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+        pic_parms->seq_fields.value = 0;
+        pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+        pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+        pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+        /* new fields in libva 0.31 */
+        pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+        pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+        pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+        pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+        pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+        /* referened from UMG_Moorstown_TestSuites */
+        pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+        pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+        pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+        pic_parms->slice_group_change_rate_minus1 = 0;
+        pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+        pic_parms->pic_init_qs_minus26 = 0;
+        pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+        pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+        pic_parms->pic_fields.value = 0;
+        pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+        pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+        pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+        pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+        /* new LibVA fields in v0.31*/
+        pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+        pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+        pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+        pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+        /* all slices in the pciture have the same field_pic_flag */
+        pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+        pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+        pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+
+        pic_parms->num_ref_idx_l0_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l0_active-1;
+        pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active-1;
+    }
+
+
+    /* set reference frames, and num_ref_frames */
+    vbp_set_reference_frames_h264secure(parser, pic_parms);
+    if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        int frame_idx;
+        for (frame_idx = 0; frame_idx < 16; frame_idx++)
+        {
+            pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+            pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+            pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+        }
+    }
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_data->buffer_addr = cxt->parse_cubby.buf;
+    slc_parms = &(slc_data->slc_parms);
+
+    /* byte: how many bytes have been parsed */
+    /* bit: bits parsed within the current parsing position */
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    slc_data->nal_unit_type = h264_parser->info.nal_unit_type;
+
+    slc_parms->slice_data_size = slc_data->slice_size =
+                                     pcontext->parser_cxt->list.data[index].edpos -
+                                     pcontext->parser_cxt->list.data[index].stpos;
+
+    slc_parms->slice_data_offset = 0;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = cxt->list.data[index].stpos;
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    pic_data->num_slices++;
+
+    //vbp_update_reference_frames_h264_methodB(pic_data);
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    return VBP_OK;
+}
+
+
+static uint32_t vbp_update_slice_data_h264secure(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private;
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_parms = &(slc_data->slc_parms);
+
+    slc_parms->slice_data_size = parser_private->size;
+    slc_parms->slice_data_offset = parser_private->offset;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = 0;
+    slc_data->buffer_addr  = parser_private->start;
+    slc_data->slice_size = parser_private->size + parser_private->offset;
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    pic_data->num_slices++;
+
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    return VBP_OK;
+}
+
+
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264secure(vbp_context* pcontext)
+{
+    /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+    uint8 configuration_version = 0;
+    uint8 AVC_profile_indication = 0;
+    uint8 profile_compatibility = 0;
+    uint8 AVC_level_indication = 0;
+    uint8 length_size_minus_one = 0;
+    uint8 num_of_sequence_parameter_sets = 0;
+    uint8 num_of_picture_parameter_sets = 0;
+    uint16 sequence_parameter_set_length = 0;
+    uint16 picture_parameter_set_length = 0;
+
+    int i = 0;
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private;
+    //Enable emulation prevention
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* check if configuration data is start code prefix */
+    viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    int ret = ops->parse_sc((void *)&cubby,
+                            NULL, /* context, not used */
+                            &(cxt->sc_prefix_info));
+    if (ret == 1)
+    {
+        WTRACE("configuration data is start-code prefixed.\n");
+        parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+        return vbp_parse_start_code_h264secure(pcontext);
+    }
+
+
+    uint8* cur_data = cxt->parse_cubby.buf;
+
+
+    if (cxt->parse_cubby.size < 6)
+    {
+        /* need at least 6 bytes to start parsing the structure, see spec 15 */
+        return VBP_DATA;
+    }
+
+    configuration_version = *cur_data++;
+    AVC_profile_indication = *cur_data++;
+
+    /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+    profile_compatibility = *cur_data++;
+    AVC_level_indication = *cur_data++;
+
+    /* ITRACE("Level indication: %d", AVC_level_indication);*/
+    /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+    length_size_minus_one = (*cur_data) & 0x3;
+
+    if (length_size_minus_one != 3)
+    {
+        WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+    }
+
+    parser_private->NAL_length_size = length_size_minus_one + 1;
+
+    cur_data++;
+
+    /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+    num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+    if (num_of_sequence_parameter_sets > 1)
+    {
+        WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+    }
+    if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+    {
+        /* this would never happen as MAX_NUM_SPS = 32 */
+        WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+    }
+    cur_data++;
+
+    cxt->list.num_items = 0;
+    for (i = 0; i < num_of_sequence_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse sequence_parameter_set_length */
+            ETRACE("Not enough data to parse SPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least sequence_parameter_set_length bytes for SPS */
+            ETRACE("Not enough data to parse SPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += sequence_parameter_set_length;
+    }
+
+    if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+    {
+        /* need at least one more byte to parse num_of_picture_parameter_sets */
+        ETRACE("Not enough data to parse number of PPS.");
+        return VBP_DATA;
+    }
+
+    num_of_picture_parameter_sets = *cur_data++;
+    if (num_of_picture_parameter_sets > 1)
+    {
+        /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+    }
+
+    for (i = 0; i < num_of_picture_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse picture_parameter_set_length */
+            ETRACE("Not enough data to parse PPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least picture_parameter_set_length bytes for PPS */
+            ETRACE("Not enough data to parse PPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += picture_parameter_set_length;
+    }
+
+    if ((cur_data - cxt->parse_cubby.buf) !=  cxt->parse_cubby.size)
+    {
+        WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+               cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+    }
+
+    parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED;
+    return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size)
+{
+    switch (*NAL_length_size)
+    {
+    case 4:
+        return vbp_utils_ntohl(p);
+
+    case 3:
+    {
+        uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+        return i;
+    }
+
+    case 2:
+        return vbp_utils_ntohs(p);
+
+    case 1:
+        return *p;
+
+    default:
+        WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+        /* default to 4 bytes for length */
+        *NAL_length_size = 4;
+        return vbp_utils_ntohl(p);
+    }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define H264_SC_SIZE 3
+uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private;
+
+    /* reset query data for the new sample buffer */
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+    int i;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    cxt->list.num_items = 0;
+
+    /* reset start position of first item to 0 in case there is only one item */
+    cxt->list.data[0].stpos = 0;
+
+    /* start code emulation prevention byte is present in NAL */
+    cxt->getbits.is_emul_reqd = 1;
+
+    if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+        int32_t size_left = 0;
+        int32_t size_parsed = 0;
+        int32_t NAL_length = 0;
+
+        cubby = &(cxt->parse_cubby);
+
+        size_left = cubby->size;
+
+        while (size_left >= parser_private->NAL_length_size)
+        {
+            NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size);
+            if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size)
+            {
+                ETRACE("Invalid NAL_length parsed.");
+                break;
+            }
+
+            size_parsed += parser_private->NAL_length_size;
+            cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+            size_parsed += NAL_length; /* skip NAL bytes */
+            /* end position is exclusive */
+            cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+            cxt->list.num_items++;
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+                break;
+            }
+
+            size_left = cubby->size - size_parsed;
+        }
+
+        if (size_left != 0 && parser_private->length_prefix_verified == 0)
+        {
+            WTRACE("Elementary stream is not aligned (%d).", size_left);
+
+            /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will
+                    * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed
+                    */
+            parser_private->length_prefix_verified = 1;
+            viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby;
+
+            viddec_parser_ops_t *ops = pcontext->parser_ops;
+            int ret = ops->parse_sc((void *)&temp_cubby,
+                                    NULL, /* context, not used */
+                                    &(cxt->sc_prefix_info));
+
+            /* found start code */
+            if (ret == 1)
+            {
+                WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed.");
+                parser_private->NAL_length_size = 0;
+                parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+                /* reset parsing data */
+                for (i = 0; i < MAX_NUM_PICTURES; i++)
+                {
+                    query_data->pic_data[i].num_slices = 0;
+                }
+                query_data->num_pictures = 0;
+                cxt->list.num_items = 0;
+            }
+        }
+    }
+
+
+    if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t cubby;
+        /*  memory copy without updating cxt->parse_cubby */
+        cubby = cxt->parse_cubby;
+        viddec_parser_ops_t *ops = pcontext->parser_ops;
+        int ret = 0;
+
+        while (1)
+        {
+            ret = ops->parse_sc((void *)&cubby,
+                                NULL, /* context, not used */
+                                &(cxt->sc_prefix_info));
+            if (ret == 1)
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.data[0].stpos = cubby.sc_end_pos;
+                }
+                else
+                {
+                    cxt->list.data[cxt->list.num_items].stpos =
+                        cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+                    cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE;
+                }
+
+                cubby.phase = 0;
+                cubby.buf = cxt->parse_cubby.buf +
+                            cxt->list.data[cxt->list.num_items].stpos;
+
+                cubby.size = cxt->parse_cubby.size -
+                             cxt->list.data[cxt->list.num_items].stpos;
+
+                cxt->list.num_items++;
+                if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+                {
+                    WTRACE("Num items exceeds the limit!");
+                    /* not fatal, just stop parsing */
+                    break;
+                }
+            }
+            else
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.num_items = 1;
+                    parser_private->bitstream_pattern = H264_BS_SINGLE_NAL;
+                    WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL.");
+                }
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+        }
+
+    }
+
+    if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL)
+    {
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i)
+{
+    if (i >= MAX_NUM_SLICES)
+    {
+        return VBP_PARM;
+    }
+
+    uint32 error = VBP_OK;
+
+    struct h264_viddec_parser* parser = NULL;
+    parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+    vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data;
+    switch (parser->info.nal_unit_type)
+    {
+    case h264_NAL_UNIT_TYPE_SLICE:
+        VTRACE("slice header is parsed.");
+        error = vbp_add_pic_data_h264secure(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264secure(pcontext, i);
+        }
+        break;
+
+    case  h264_NAL_UNIT_TYPE_IDR:
+        VTRACE("IDR header is parsed.");
+        error = vbp_add_pic_data_h264secure(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264secure(pcontext, i);
+        }
+        break;
+    case h264_NAL_UNIT_TYPE_SEI:
+        //ITRACE("SEI header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_SPS:
+        VTRACE("SPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_PPS:
+        VTRACE("PPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        VTRACE("ACC unit delimiter is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+        ITRACE("EOSeq is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOstream:
+        ITRACE("EOStream is parsed");
+        break;
+
+    default:
+        WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+        break;
+    }
+
+    if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1)
+    {
+        WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures);
+        return (error == VBP_OK ? VBP_MULTI : error);
+    }
+    return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext)
+{
+    vbp_data_h264 *query_data = NULL;
+    struct h264_viddec_parser *parser = NULL;
+    struct vbp_h264_parser_private_t* private = NULL;
+
+    parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+    private = (struct vbp_h264_parser_private_t *)pcontext->parser_private;
+
+    vbp_set_codec_data_h264secure(parser, query_data);
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* VQIAMatrixBufferH264 */
+    vbp_set_scaling_list_h264secure(parser, query_data->IQ_matrix_buf);
+
+    if (query_data->num_pictures > 0)
+    {
+     /*
+        * picture parameter buffer and slice parameter buffer have been populated
+        */
+    }
+    else
+    {
+        /**
+        * add a dummy picture that contains picture parameters parsed
+          from SPS and PPS.
+        */
+        vbp_add_pic_data_h264secure(pcontext, 0);
+    }
+
+    return VBP_OK;
+}
+
+uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size)
+{
+    uint32 error = VBP_OK;
+    uint32 offset = 0;
+    uint32 key = 0;
+    uint32 i,j;
+
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private;
+
+    int32_t  sliceheadersize;
+    uint32_t slice_num = 0;
+    while (offset < size) {
+        memcpy(&key, (uint8_t *)newdata+offset, sizeof(uint32_t));
+        if (key == TERMINATE_KEY) {
+            break;
+        }
+        slice_num++;
+        offset += sizeof(uint32_t);
+
+        memcpy(&parser_private->start, (uint8_t *)newdata+offset, 4);
+        offset += 4;
+
+        memcpy(&parser_private->offset, (uint8_t *)newdata+offset, sizeof(int32_t));
+        offset += 4;
+
+        memcpy(&parser_private->size, (uint8_t *)newdata+offset, sizeof(int32_t));
+        offset += 4;
+
+        sliceheadersize = sizeof(slice_header_t) + sizeof(dec_ref_pic_marking_t);
+        error = pcontext->parser_ops->update_data(pcontext->parser_cxt,
+                                                  newdata+offset, sliceheadersize);
+        offset += sliceheadersize;
+        if (error != VBP_OK)
+        {
+            ETRACE("update_data error = 0x%x",error);
+            return error;
+        }
+
+        error = vbp_add_pic_data_h264secure(pcontext, slice_num);
+        if (error != VBP_OK)
+        {
+            ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error);
+            return error;
+        }
+
+        error = vbp_update_slice_data_h264secure(pcontext, slice_num);
+        if (error != VBP_OK)
+        {
+            ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error);
+            return error;
+        }
+    }
+    if (key != TERMINATE_KEY)
+    {
+        ETRACE("Don't find a terminated key 0xFFFFFF!");
+        return VBP_DATA;
+    } else {
+        if (slice_num < 1) {
+            ETRACE("Don't find a valid slice header!");
+            return VBP_DATA;
+        }
+    }
+    error = vbp_populate_query_data_h264secure(pcontext);
+
+    if (error != VBP_OK)
+    {
+        ETRACE("vbp_populate_query_data_h264secure error = 0x%x",error);
+        return error;
+    }
+    return error;
+}
diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h
new file mode 100644
index 0000000..a55c07c
--- /dev/null
+++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h
@@ -0,0 +1,70 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_H264SECURE_PARSER_H
+#define VBP_H264SECURE_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264secure(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264secure(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * update the parsing result with extra data
+ */
+uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size);
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c
new file mode 100755
index 0000000..3f6400d
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_h264_parser.c
@@ -0,0 +1,1751 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264_parser.h"
+
+typedef struct vbp_h264_parser_private_t vbp_h264_parser_private;
+
+typedef enum
+{
+    H264_BS_LENGTH_PREFIXED,
+    H264_BS_SC_PREFIXED,
+    H264_BS_SINGLE_NAL
+} H264_BS_PATTERN;
+
+struct vbp_h264_parser_private_t
+{
+    /* number of bytes used to encode length of NAL payload.  If parser does not receive configuration data
+    and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB
+    byte stream format. */
+    int NAL_length_size;
+
+    /* indicate if stream is length prefixed */
+    int length_prefix_verified;
+
+    H264_BS_PATTERN bitstream_pattern;
+};
+
+/* default scaling list table */
+unsigned char Default_4x4_Intra[16] =
+{
+    6,13,20,28,
+    13,20,28,32,
+    20,28,32,37,
+    28,32,37,42
+};
+
+unsigned char Default_4x4_Inter[16] =
+{
+    10,14,20,24,
+    14,20,24,27,
+    20,24,27,30,
+    24,27,30,34
+};
+
+unsigned char Default_8x8_Intra[64] =
+{
+    6,10,13,16,18,23,25,27,
+    10,11,16,18,23,25,27,29,
+    13,16,18,23,25,27,29,31,
+    16,18,23,25,27,29,31,33,
+    18,23,25,27,29,31,33,36,
+    23,25,27,29,31,33,36,38,
+    25,27,29,31,33,36,38,40,
+    27,29,31,33,36,38,40,42
+};
+
+unsigned char Default_8x8_Inter[64] =
+{
+    9,13,15,17,19,21,22,24,
+    13,13,17,19,21,22,24,25,
+    15,17,19,21,22,24,25,27,
+    17,19,21,22,24,25,27,28,
+    19,21,22,24,25,27,28,30,
+    21,22,24,25,27,28,30,32,
+    22,24,25,27,28,30,32,33,
+    24,25,27,28,30,32,33,35
+};
+
+unsigned char quant_flat[16] =
+{
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16
+};
+
+unsigned char quant8_flat[64] =
+{
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16
+};
+
+unsigned char* UseDefaultList[8] =
+{
+    Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+    Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+    Default_8x8_Intra,
+    Default_8x8_Inter
+};
+
+static uint8 h264_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    {4, 3},
+    {3, 2},
+    {2, 1},
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->is_wkld_done = NULL;
+#else
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done");
+    if (NULL == pcontext->parser_ops->is_wkld_done)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+
+    pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264_flush");;
+    if (NULL == pcontext->parser_ops->flush)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_frame_start = NULL;
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+    vbp_data_h264 *query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_h264, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+    vbp_h264_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_h264_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->NAL_length_size = 0;
+
+    parser_private->length_prefix_verified = 0;
+
+    parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_h264(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264(vbp_context *pcontext)
+{
+    if (NULL != pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    int i;
+    vbp_data_h264 *query_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].pic_parms);
+        }
+        free(query_data->pic_data);
+    }
+
+    free(query_data->IQ_matrix_buf);
+    free(query_data->codec_data);
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+    uint16_t i = ((*p) << 8) + ((*(p+1)));
+    return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+    uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+    return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264(
+    int curr_picture_structure,
+    int bottom_field,
+    frame_store* store,
+    VAPictureH264* pic)
+{
+    if (FRAME == curr_picture_structure)
+    {
+        if (FRAME != viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("Reference picture structure is not frame for current frame picture!");
+        }
+        pic->flags = 0;
+        pic->TopFieldOrderCnt = store->top_field.poc;
+        pic->BottomFieldOrderCnt = store->bottom_field.poc;
+    }
+    else
+    {
+        if (FRAME == viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("reference picture structure is frame for current field picture!");
+        }
+        if (bottom_field)
+        {
+            pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+        else
+        {
+            pic->flags = VA_PICTURE_H264_TOP_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+    }
+}
+
+static inline void vbp_set_slice_ref_list_h264(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    int i, j;
+    int num_ref_idx_active = 0;
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    uint8_t* p_list = NULL;
+    VAPictureH264* refPicListX = NULL;
+    frame_store* fs = NULL;
+
+    /* initialize ref picutre list, set picture id and flags to invalid. */
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+        for (j = 0; j < 32; j++)
+        {
+            refPicListX->picture_id = VA_INVALID_SURFACE;
+            refPicListX->frame_idx = 0;
+            refPicListX->flags = VA_PICTURE_H264_INVALID;
+            refPicListX->TopFieldOrderCnt = 0;
+            refPicListX->BottomFieldOrderCnt = 0;
+            refPicListX++;
+        }
+    }
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+        if ((i == 0) &&
+            ((h264_PtypeB == slice_header->slice_type) ||
+             (h264_PtypeP == slice_header->slice_type)))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+            if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list0;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_0;
+            }
+        }
+        else if ((i == 1) && (h264_PtypeB == slice_header->slice_type))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+            if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list1;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_1;
+            }
+        }
+        else
+        {
+            num_ref_idx_active = 0;
+            p_list = NULL;
+        }
+
+
+        for (j = 0; j < num_ref_idx_active; j++)
+        {
+            fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+            /* bit 5 indicates if reference picture is bottom field */
+            vbp_set_VAPicture_h264(
+                h264_parser->info.img.structure,
+                (p_list[j] & 0x20) >> 5,
+                fs,
+                refPicListX);
+
+            refPicListX->frame_idx = fs->frame_num;
+            refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            refPicListX++;
+        }
+    }
+}
+
+static inline void vbp_set_pre_weight_table_h264(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    int i, j;
+
+    if ((((h264_PtypeP == slice_header->slice_type) ||
+          (h264_PtypeB == slice_header->slice_type)) &&
+          h264_parser->info.active_PPS.weighted_pred_flag) ||
+         ((h264_PtypeB == slice_header->slice_type) &&
+         (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+    {
+        slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+        slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+        slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+        slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+        slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+        slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 	slice_header->sh_predwttbl.luma_weight_l0[i];
+            slc_parms->luma_offset_l0[i] = 	slice_header->sh_predwttbl.luma_offset_l0[i];
+            slc_parms->luma_weight_l1[i] = 	slice_header->sh_predwttbl.luma_weight_l1[i];
+            slc_parms->luma_offset_l1[i] = 	slice_header->sh_predwttbl.luma_offset_l1[i];
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+                slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+                slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+                slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+            }
+        }
+    }
+    else
+    {
+        /* default weight table */
+        slc_parms->luma_log2_weight_denom = 5;
+        slc_parms->chroma_log2_weight_denom = 5;
+        slc_parms->luma_weight_l0_flag = 0;
+        slc_parms->luma_weight_l1_flag = 0;
+        slc_parms->chroma_weight_l0_flag = 0;
+        slc_parms->chroma_weight_l1_flag = 0;
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 0;
+            slc_parms->luma_offset_l0[i] = 0;
+            slc_parms->luma_weight_l1[i] = 0;
+            slc_parms->luma_offset_l1[i] = 0;
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = 0;
+                slc_parms->chroma_offset_l0[i][j] = 0;
+                slc_parms->chroma_weight_l1[i][j] = 0;
+                slc_parms->chroma_offset_l1[i][j] = 0;
+            }
+        }
+    }
+}
+
+
+static inline void vbp_set_reference_frames_h264(
+    struct h264_viddec_parser *parser,
+    VAPictureParameterBufferH264* pic_parms)
+{
+    int buffer_idx;
+    int frame_idx;
+    frame_store* store = NULL;
+    h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+    /* initialize reference frames */
+    for (frame_idx = 0; frame_idx < 16; frame_idx++)
+    {
+        pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+        pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+        pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+        pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+        pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+    }
+    pic_parms->num_ref_frames = 0;
+
+    frame_idx = 0;
+
+    /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer);  */
+    /* set short term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+
+        store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+        /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0) */
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    /* set long term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+        store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+        if (!viddec_h264_get_is_long_term(store))
+        {
+            WTRACE("long term frame is not marked as long term.");
+        }
+        /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0)*/
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    pic_parms->num_ref_frames = frame_idx;
+
+    if (frame_idx > parser->info.active_SPS.num_ref_frames)
+    {
+        WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+               frame_idx, parser->info.active_SPS.num_ref_frames);
+    }
+}
+
+
+static inline void vbp_set_scaling_list_h264(
+    struct h264_viddec_parser *parser,
+    VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+    int i;
+    int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0);
+
+    if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+    {
+        for (i = 0; i < lists_to_set; i++)
+        {
+            if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+            {
+                if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                        ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+                {
+                    /* use default scaling list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                    }
+                }
+                else
+                {
+                    /* use PPS list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+                    }
+                }
+            }
+            else /* pic_scaling_list not present */
+            {
+                if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+                {
+                    /* SPS matrix present - use fallback rule B */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+                               16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+                               64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        //g_warning("invalid scaling list index.");
+                        break;
+                    }
+                }
+                else /* seq_scaling_matrix not present */
+                {
+                    /* SPS matrix not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                } /* end of seq_scaling_matrix not present */
+            } /* end of  pic_scaling_list not present */
+        } /* for loop for each index from 0 to 7 */
+    } /* end of pic_scaling_matrix present */
+    else
+    {
+        /* PPS matrix not present, use SPS information */
+        if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+        {
+            for (i = 0; i < lists_to_set; i++)
+            {
+                if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+                {
+                    if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                            ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+                    {
+                        /* use default scaling list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        }
+                    }
+                    else
+                    {
+                        /* use SPS list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+                        }
+                    }
+                }
+                else
+                {
+                    /* SPS list not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                }
+            }
+        }
+        else
+        {
+            /* SPS matrix not present - use flat lists */
+            for (i = 0; i < 6; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+            }
+            for (i = 0; i < 2; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+            }
+        }
+    }
+
+    if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+            (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+             parser->info.active_SPS.seq_scaling_matrix_present_flag))
+    {
+        for (i = 0; i < 2; i++)
+        {
+            memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+        }
+    }
+}
+
+static void vbp_set_codec_data_h264(
+    struct h264_viddec_parser *parser,
+     vbp_data_h264 *query_data)
+{
+    vbp_codec_data_h264* codec_data = query_data->codec_data;
+
+    /* The following variables are used to detect if there is new SPS or PPS */
+    uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id;
+    uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id;
+    int frame_width = codec_data->frame_width;
+    int frame_height = codec_data->frame_height;
+
+    /* parameter id */
+    codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+    codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+    /* profile and level */
+    codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+    codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+    /*constraint flag sets (h.264 Spec v2009)*/
+    codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4;
+    codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3;
+    codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+    codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1;
+    codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1;
+
+    /* reference frames */
+    codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+        !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+    {
+        /* no longer necessary: two fields share the same interlaced surface */
+        /* codec_data->num_ref_frames *= 2; */
+    }
+
+    codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+    /* frame coding */
+    codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+    codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+    /* frame dimension */
+    codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+    codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                               (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+    /* cropping information */
+    codec_data->crop_left = 0;
+    codec_data->crop_right = 0;
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = 0;
+    if(parser->info.active_SPS.sps_disp.frame_cropping_flag) {
+        int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0;
+        int ChromaArrayType = 0;
+        if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) {
+            if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) {
+                SubWidthC = 2;
+                SubHeightC = 2;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) {
+                SubWidthC = 2;
+                SubHeightC = 1;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) {
+                SubWidthC = 1;
+                SubHeightC = 1;
+            }
+            ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        }
+
+        if(ChromaArrayType == 0) {
+            CropUnitX = 1;
+            CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        } else {
+            CropUnitX = SubWidthC;
+            CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag);
+        }
+
+        codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+        codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1;
+        codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+        codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1;
+    }
+
+    /* aspect ratio */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+    {
+        codec_data->aspect_ratio_idc =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+        if (codec_data->aspect_ratio_idc < 17)
+        {
+            codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0];
+            codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1];
+        }
+        else if (codec_data->aspect_ratio_idc == 255)
+        {
+            codec_data->sar_width =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+            codec_data->sar_height =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+        }
+        else
+        {
+            codec_data->sar_width = 0;
+            codec_data->sar_height = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        codec_data->aspect_ratio_idc = 0;
+        codec_data->sar_width = 0;
+        codec_data->sar_height = 0;
+    }
+
+    /* video format */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+    {
+        codec_data->video_format =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format = 5;
+    }
+
+    codec_data->video_full_range_flag =
+        parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag;
+
+
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+    {
+        codec_data->matrix_coefficients =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients;
+    }
+    else
+    {
+        // Unspecified
+        codec_data->matrix_coefficients = 2;
+    }
+
+    codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value;
+
+    /* picture order type and count */
+    codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+    codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+
+
+    /* udpate sps and pps status */
+    query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0;
+    query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0;
+    query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff;
+    query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff;
+    if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height)
+    {
+        query_data->new_sps = 1;
+        query_data->new_pps = 1;
+    }
+}
+
+
+static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    struct h264_viddec_parser* parser = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+    VAPictureParameterBufferH264* pic_parms = NULL;
+
+    parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+    if (0 == parser->info.SliceHeader.first_mb_in_slice)
+    {
+        /* a new picture is parsed */
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        /* partial frame */
+        query_data->num_pictures = 1;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        WTRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    pic_parms = pic_data->pic_parms;
+
+    // relax this condition to support partial frame parsing
+
+    //if (parser->info.SliceHeader.first_mb_in_slice == 0)
+    {
+        /**
+        * picture parameter only needs to be set once,
+        * even multiple slices may be encoded
+        */
+
+        /* VAPictureParameterBufferH264 */
+        pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+        pic_parms->CurrPic.frame_idx = 0;
+        if (parser->info.img.field_pic_flag == 1)
+        {
+            if (parser->info.img.bottom_field_flag)
+            {
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            }
+            else
+            {
+                /* also OK set to 0 (from test suite) */
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+            }
+        }
+        else
+        {
+            pic_parms->CurrPic.flags = 0; /* frame picture */
+        }
+        pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+        pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+        pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+
+        /* don't care if current frame is used as long term reference */
+        if (parser->info.SliceHeader.nal_ref_idc != 0)
+        {
+            pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+
+        pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+        /* frame height in MBS */
+        pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+        pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+        pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+        pic_parms->seq_fields.value = 0;
+        pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+        pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+        pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+        /* new fields in libva 0.31 */
+        pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+        pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+        pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+        pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+        pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+        /* referened from UMG_Moorstown_TestSuites */
+        pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+        pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+        pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+        pic_parms->slice_group_change_rate_minus1 = 0;
+        pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+        pic_parms->pic_init_qs_minus26 = 0;
+        pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+        pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+        pic_parms->pic_fields.value = 0;
+        pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+        pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+        pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+        pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+        /* new LibVA fields in v0.31*/
+        pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+        pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+        pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+        pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+        /* all slices in the pciture have the same field_pic_flag */
+        pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+        pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+        pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+    }
+
+
+    /* set reference frames, and num_ref_frames */
+    vbp_set_reference_frames_h264(parser, pic_parms);
+    if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        int frame_idx;
+        for (frame_idx = 0; frame_idx < 16; frame_idx++)
+        {
+            pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+            pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+            pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+        }
+        /* num of reference frame is 0 if current picture is IDR */
+        pic_parms->num_ref_frames = 0;
+    }
+    else
+    {
+        /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */
+    }
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_data->buffer_addr = cxt->parse_cubby.buf;
+    slc_parms = &(slc_data->slc_parms);
+
+    /* byte: how many bytes have been parsed */
+    /* bit: bits parsed within the current parsing position */
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    slc_data->nal_unit_type = h264_parser->info.nal_unit_type;
+
+    slc_parms->slice_data_size = slc_data->slice_size =
+                                     pcontext->parser_cxt->list.data[index].edpos -
+                                     pcontext->parser_cxt->list.data[index].stpos;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = cxt->list.data[index].stpos;
+    slc_parms->slice_data_offset = 0;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* bit offset from NAL start code to the beginning of slice data */
+    slc_parms->slice_data_bit_offset = bit + byte * 8;
+
+    if (is_emul)
+    {
+        WTRACE("next byte is emulation prevention byte.");
+        /*slc_parms->slice_data_bit_offset += 8; */
+    }
+
+    if (cxt->getbits.emulation_byte_counter != 0)
+    {
+        slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8;
+    }
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    slc_parms->slice_type = slice_header->slice_type;
+
+    slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag;
+
+    slc_parms->num_ref_idx_l0_active_minus1 = 0;
+    slc_parms->num_ref_idx_l1_active_minus1 = 0;
+    if (slice_header->slice_type == h264_PtypeI)
+    {
+    }
+    else if (slice_header->slice_type == h264_PtypeP)
+    {
+        slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+    }
+    else if (slice_header->slice_type == h264_PtypeB)
+    {
+        slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+        slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1;
+    }
+    else
+    {
+        WTRACE("slice type %d is not supported.", slice_header->slice_type);
+    }
+
+    slc_parms->cabac_init_idc = slice_header->cabac_init_idc;
+    slc_parms->slice_qp_delta = slice_header->slice_qp_delta;
+    slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc;
+    slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2;
+    slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2;
+
+
+    vbp_set_pre_weight_table_h264(h264_parser, slc_parms);
+    vbp_set_slice_ref_list_h264(h264_parser, slc_parms);
+
+
+    pic_data->num_slices++;
+
+    //vbp_update_reference_frames_h264_methodB(pic_data);
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    /*if (pic_data->num_slices > 1)
+    {
+        ITRACE("number of slices per picture is %d.", pic_data->num_slices);
+    }*/
+    return VBP_OK;
+}
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264(vbp_context* pcontext)
+{
+    /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+    uint8 configuration_version = 0;
+    uint8 AVC_profile_indication = 0;
+    uint8 profile_compatibility = 0;
+    uint8 AVC_level_indication = 0;
+    uint8 length_size_minus_one = 0;
+    uint8 num_of_sequence_parameter_sets = 0;
+    uint8 num_of_picture_parameter_sets = 0;
+    uint16 sequence_parameter_set_length = 0;
+    uint16 picture_parameter_set_length = 0;
+
+    int i = 0;
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private;
+    //Enable emulation prevention
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* check if configuration data is start code prefix */
+    viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    int ret = ops->parse_sc((void *)&cubby,
+                            NULL, /* context, not used */
+                            &(cxt->sc_prefix_info));
+    if (ret == 1)
+    {
+        WTRACE("configuration data is start-code prefixed.\n");
+        parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+        return vbp_parse_start_code_h264(pcontext);
+    }
+
+
+    uint8* cur_data = cxt->parse_cubby.buf;
+
+
+    if (cxt->parse_cubby.size < 6)
+    {
+        /* need at least 6 bytes to start parsing the structure, see spec 15 */
+        return VBP_DATA;
+    }
+
+    configuration_version = *cur_data++;
+    AVC_profile_indication = *cur_data++;
+
+    /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+    profile_compatibility = *cur_data++;
+    AVC_level_indication = *cur_data++;
+
+    /* ITRACE("Level indication: %d", AVC_level_indication);*/
+    /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+    length_size_minus_one = (*cur_data) & 0x3;
+
+    if (length_size_minus_one != 3)
+    {
+        WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+    }
+
+    parser_private->NAL_length_size = length_size_minus_one + 1;
+
+    cur_data++;
+
+    /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+    num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+    if (num_of_sequence_parameter_sets > 1)
+    {
+        WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+    }
+    if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+    {
+        /* this would never happen as MAX_NUM_SPS = 32 */
+        WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+    }
+    cur_data++;
+
+    cxt->list.num_items = 0;
+    for (i = 0; i < num_of_sequence_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse sequence_parameter_set_length */
+            ETRACE("Not enough data to parse SPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least sequence_parameter_set_length bytes for SPS */
+            ETRACE("Not enough data to parse SPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += sequence_parameter_set_length;
+    }
+
+    if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+    {
+        /* need at least one more byte to parse num_of_picture_parameter_sets */
+        ETRACE("Not enough data to parse number of PPS.");
+        return VBP_DATA;
+    }
+
+    num_of_picture_parameter_sets = *cur_data++;
+    if (num_of_picture_parameter_sets > 1)
+    {
+        /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+    }
+
+    for (i = 0; i < num_of_picture_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse picture_parameter_set_length */
+            ETRACE("Not enough data to parse PPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least picture_parameter_set_length bytes for PPS */
+            ETRACE("Not enough data to parse PPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += picture_parameter_set_length;
+    }
+
+    if ((cur_data - cxt->parse_cubby.buf) !=  cxt->parse_cubby.size)
+    {
+        WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+               cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+    }
+
+    parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED;
+    return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size)
+{
+    switch (*NAL_length_size)
+    {
+    case 4:
+        return vbp_utils_ntohl(p);
+
+    case 3:
+    {
+        uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+        return i;
+    }
+
+    case 2:
+        return vbp_utils_ntohs(p);
+
+    case 1:
+        return *p;
+
+    default:
+        WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+        /* default to 4 bytes for length */
+        *NAL_length_size = 4;
+        return vbp_utils_ntohl(p);
+    }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define H264_SC_SIZE 3
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private;
+
+    /* reset query data for the new sample buffer */
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+    int i;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    cxt->list.num_items = 0;
+
+    /* reset start position of first item to 0 in case there is only one item */
+    cxt->list.data[0].stpos = 0;
+
+    /* start code emulation prevention byte is present in NAL */
+    cxt->getbits.is_emul_reqd = 1;
+
+    if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+        int32_t size_left = 0;
+        int32_t size_parsed = 0;
+        int32_t NAL_length = 0;
+
+        cubby = &(cxt->parse_cubby);
+
+        size_left = cubby->size;
+
+        while (size_left >= parser_private->NAL_length_size)
+        {
+            NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size);
+            if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size)
+            {
+                ETRACE("Invalid NAL_length parsed.");
+                break;
+            }
+
+            size_parsed += parser_private->NAL_length_size;
+            cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+            size_parsed += NAL_length; /* skip NAL bytes */
+            /* end position is exclusive */
+            cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+            cxt->list.num_items++;
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+                break;
+            }
+
+            size_left = cubby->size - size_parsed;
+        }
+
+        if (size_left != 0 && parser_private->length_prefix_verified == 0)
+        {
+            WTRACE("Elementary stream is not aligned (%d).", size_left);
+
+            /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will
+                    * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed
+                    */
+            parser_private->length_prefix_verified = 1;
+            viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby;
+
+            viddec_parser_ops_t *ops = pcontext->parser_ops;
+            int ret = ops->parse_sc((void *)&temp_cubby,
+                                    NULL, /* context, not used */
+                                    &(cxt->sc_prefix_info));
+
+            /* found start code */
+            if (ret == 1)
+            {
+                WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed.");
+                parser_private->NAL_length_size = 0;
+                parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+                /* reset parsing data */
+                for (i = 0; i < MAX_NUM_PICTURES; i++)
+                {
+                    query_data->pic_data[i].num_slices = 0;
+                }
+                query_data->num_pictures = 0;
+                cxt->list.num_items = 0;
+            }
+        }
+    }
+
+
+    if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t cubby;
+        /*  memory copy without updating cxt->parse_cubby */
+        cubby = cxt->parse_cubby;
+        viddec_parser_ops_t *ops = pcontext->parser_ops;
+        int ret = 0;
+
+        while (1)
+        {
+            ret = ops->parse_sc((void *)&cubby,
+                                NULL, /* context, not used */
+                                &(cxt->sc_prefix_info));
+            if (ret == 1)
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.data[0].stpos = cubby.sc_end_pos;
+                }
+                else
+                {
+                    cxt->list.data[cxt->list.num_items].stpos =
+                        cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+                    cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE;
+                }
+
+                cubby.phase = 0;
+                cubby.buf = cxt->parse_cubby.buf +
+                            cxt->list.data[cxt->list.num_items].stpos;
+
+                cubby.size = cxt->parse_cubby.size -
+                             cxt->list.data[cxt->list.num_items].stpos;
+
+                cxt->list.num_items++;
+                if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+                {
+                    WTRACE("Num items exceeds the limit!");
+                    /* not fatal, just stop parsing */
+                    break;
+                }
+            }
+            else
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.num_items = 1;
+                    parser_private->bitstream_pattern = H264_BS_SINGLE_NAL;
+                    WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL.");
+                }
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+        }
+
+    }
+
+    if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL)
+    {
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i)
+{
+    if (i >= MAX_NUM_SLICES)
+    {
+        return VBP_PARM;
+    }
+
+    uint32 error = VBP_OK;
+
+    struct h264_viddec_parser* parser = NULL;
+    parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+    vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data;
+    switch (parser->info.nal_unit_type)
+    {
+    case h264_NAL_UNIT_TYPE_SLICE:
+        //ITRACE("slice header is parsed.");
+        error = vbp_add_pic_data_h264(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264(pcontext, i);
+        }
+        break;
+
+    case  h264_NAL_UNIT_TYPE_IDR:
+        //ITRACE("IDR header is parsed.");
+        error = vbp_add_pic_data_h264(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264(pcontext, i);
+        }
+        break;
+    case h264_NAL_UNIT_TYPE_SEI:
+        //ITRACE("SEI header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_SPS:
+        ITRACE("SPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_PPS:
+        ITRACE("PPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        //ITRACE("ACC unit delimiter is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+        ITRACE("EOSeq is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOstream:
+        ITRACE("EOStream is parsed");
+        break;
+
+    default:
+        WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+        break;
+    }
+
+    if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1)
+    {
+        WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures);
+        return (error == VBP_OK ? VBP_MULTI : error);
+    }
+    return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext)
+{
+    vbp_data_h264 *query_data = NULL;
+    struct h264_viddec_parser *parser = NULL;
+    struct vbp_h264_parser_private_t* private = NULL;
+
+    parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+    private = (struct vbp_h264_parser_private_t *)pcontext->parser_private;
+
+    vbp_set_codec_data_h264(parser, query_data);
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* VQIAMatrixBufferH264 */
+    vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf);
+
+    if (query_data->num_pictures > 0)
+    {
+        /*
+        * picture parameter buffer and slice parameter buffer have been populated
+        */
+    }
+    else
+    {
+        /**
+        * add a dummy picture that contains picture parameters parsed
+          from SPS and PPS.
+        */
+        vbp_add_pic_data_h264(pcontext, 0);
+    }
+
+    return VBP_OK;
+}
+
+
+
diff --git a/mixvbp/vbp_manager/vbp_h264_parser.h b/mixvbp/vbp_manager/vbp_h264_parser.h
new file mode 100755
index 0000000..0094edb
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_h264_parser.h
@@ -0,0 +1,67 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_H264_PARSER_H
+#define VBP_H264_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext);
+
+
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_loader.c b/mixvbp/vbp_manager/vbp_loader.c
new file mode 100755
index 0000000..972ab2d
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_loader.c
@@ -0,0 +1,205 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+
+/**
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext)
+{
+    vbp_context **ppcontext;
+    uint32 error;
+
+    if (NULL == hcontext)
+    {
+        return VBP_PARM;
+    }
+
+    *hcontext = NULL;  /* prepare for failure. */
+
+    ppcontext = (vbp_context **)hcontext;
+
+    // TODO: check if vbp context has been created.
+
+
+    error = vbp_utils_create_context(parser_type, ppcontext);
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to create context: %d.", error);
+    }
+
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_close(Handle hcontext)
+{
+    uint32 error;
+
+    if (NULL == hcontext)
+    {
+        return VBP_PARM;
+    }
+
+    vbp_context *pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        /* not a valid vbp context. */
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+    error = vbp_utils_destroy_context(pcontext);
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to destroy context: %d.", error);
+    }
+
+    return error;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == data) || (0 == size))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to parse buffer: %d.", error);
+    }
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == data))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_query(pcontext, data);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to query parsing result: %d.", error);
+    }
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_flush(Handle hcontext)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if (NULL == hcontext)
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_flush(pcontext);
+
+    return error;
+}
+
+#ifdef USE_AVC_SHORT_FORMAT
+uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == newdata) || (0 == size) || (NULL == data))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_update(pcontext, newdata, size, data);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to query parsing result: %d.", error);
+    }
+    return error;
+}
+#endif
diff --git a/mixvbp/vbp_manager/vbp_loader.h b/mixvbp/vbp_manager/vbp_loader.h
new file mode 100755
index 0000000..ad4b106
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_loader.h
@@ -0,0 +1,476 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_LOADER_H
+#define VBP_LOADER_H
+
+#include <va/va.h>
+
+#ifdef USE_HW_VP8
+#include <va/va_dec_vp8.h>
+#endif
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+
+#ifndef uint8
+typedef unsigned char uint8;
+#endif
+#ifndef uint16
+typedef unsigned short uint16;
+#endif
+#ifndef uint32
+typedef unsigned int uint32;
+#endif
+
+typedef void *Handle;
+
+/*
+ * MPEG-4 Part 2 data structure
+ */
+
+typedef struct _vbp_codec_data_mp42
+{
+    uint8  profile_and_level_indication;
+    uint32 video_object_layer_width;
+    uint32 video_object_layer_height;
+
+    // 0 for unspecified, PAL/NTSC/SECAM
+    uint8  video_format;
+
+    // 0 short range, 1 full range
+    uint8  video_range;
+
+    // default 2 (unspecified), 1 for BT709.
+    uint8  matrix_coefficients;
+
+    uint8  short_video_header;
+
+    // always exist for mpeg-4,
+    uint8   aspect_ratio_info;
+    uint8   par_width;
+    uint8   par_height;
+
+    // bit rate
+    int bit_rate;
+} vbp_codec_data_mp42;
+
+typedef struct _vbp_slice_data_mp42
+{
+    uint8* buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferMPEG4 slice_param;
+} vbp_slice_data_mp42;
+
+typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42;
+
+struct _vbp_picture_data_mp42
+{
+    uint8 vop_coded;
+    uint16 vop_time_increment;
+    /* indicates if current buffer contains parameter for the first slice of the picture */
+    uint8 new_picture_flag;
+    VAPictureParameterBufferMPEG4 picture_param;
+    vbp_slice_data_mp42 slice_data;
+
+    vbp_picture_data_mp42* next_picture_data;
+};
+
+typedef struct _vbp_data_mp42
+{
+    vbp_codec_data_mp42 codec_data;
+    VAIQMatrixBufferMPEG4 iq_matrix_buffer;
+
+    uint32 number_picture_data;
+    uint32 number_pictures;
+
+    vbp_picture_data_mp42 *picture_data;
+
+} vbp_data_mp42;
+
+/*
+ * H.264 data structure
+ */
+
+typedef struct _vbp_codec_data_h264
+{
+    uint8 pic_parameter_set_id;
+    uint8 seq_parameter_set_id;
+
+    uint8 profile_idc;
+    uint8 level_idc;
+    /*constraint flag sets (h.264 Spec v2009)*/
+    uint8 constraint_set0_flag;
+    uint8 constraint_set1_flag;
+    uint8 constraint_set2_flag;
+    uint8 constraint_set3_flag;
+    uint8 constraint_set4_flag;
+
+    uint8 num_ref_frames;
+    uint8 gaps_in_frame_num_value_allowed_flag;
+
+    uint8 frame_mbs_only_flag;
+    uint8 mb_adaptive_frame_field_flag;
+
+    int frame_width;
+    int frame_height;
+
+    uint8 vui_parameters_present_flag;
+
+    /* aspect ratio */
+    uint8 aspect_ratio_idc;
+    uint16 sar_width;
+    uint16 sar_height;
+
+    /* cropping information */
+    int crop_top;
+    int crop_bottom;
+    int crop_left;
+    int crop_right;
+
+    /* video fromat */
+
+    // default 5 unspecified
+    uint8 video_format;
+    uint8 video_full_range_flag;
+
+    // default 2 unspecified
+    uint8 matrix_coefficients;
+
+    uint8 pic_order_cnt_type;
+    int log2_max_pic_order_cnt_lsb_minus4;
+
+    int bit_rate;
+
+} vbp_codec_data_h264;
+
+typedef struct _vbp_slice_data_h264
+{
+    uint8* buffer_addr;
+
+    uint32 slice_offset; /* slice data offset */
+
+    uint32 slice_size; /* slice data size */
+
+    uint8 nal_unit_type;
+
+    VASliceParameterBufferH264 slc_parms;
+
+} vbp_slice_data_h264;
+
+
+typedef struct _vbp_picture_data_h264
+{
+    VAPictureParameterBufferH264* pic_parms;
+
+    uint32 num_slices;
+
+    vbp_slice_data_h264* slc_data;
+
+} vbp_picture_data_h264;
+
+
+typedef struct _vbp_data_h264
+{
+    /* rolling counter of buffers sent by vbp_parse */
+    uint32 buf_number;
+
+    uint32 num_pictures;
+
+    /* if SPS has been received */
+    uint8  has_sps;
+
+    /* if PPS has been received */
+    uint8  has_pps;
+
+    uint8  new_sps;
+
+    uint8  new_pps;
+
+    vbp_picture_data_h264* pic_data;
+
+    /**
+        * do we need to send matrix to VA for each picture? If not, we need
+        * a flag indicating whether it is updated.
+        */
+    VAIQMatrixBufferH264* IQ_matrix_buf;
+
+    vbp_codec_data_h264* codec_data;
+
+} vbp_data_h264;
+
+/*
+ * vc1 data structure
+ */
+typedef struct _vbp_codec_data_vc1
+{
+    /* Sequence layer. */
+    uint8  PROFILE;
+    uint8  LEVEL;
+    uint8  POSTPROCFLAG;
+    uint8  PULLDOWN;
+    uint8  INTERLACE;
+    uint8  TFCNTRFLAG;
+    uint8  FINTERPFLAG;
+    uint8  PSF;
+
+    // default 2: unspecified
+    uint8  MATRIX_COEF;
+
+    /* Entry point layer. */
+    uint8  BROKEN_LINK;
+    uint8  CLOSED_ENTRY;
+    uint8  PANSCAN_FLAG;
+    uint8  REFDIST_FLAG;
+    uint8  LOOPFILTER;
+    uint8  FASTUVMC;
+    uint8  EXTENDED_MV;
+    uint8  DQUANT;
+    uint8  VSTRANSFORM;
+    uint8  OVERLAP;
+    uint8  QUANTIZER;
+    uint16 CODED_WIDTH;
+    uint16 CODED_HEIGHT;
+    uint8  EXTENDED_DMV;
+    uint8  RANGE_MAPY_FLAG;
+    uint8  RANGE_MAPY;
+    uint8  RANGE_MAPUV_FLAG;
+    uint8  RANGE_MAPUV;
+
+    /* Others. */
+    uint8  RANGERED;
+    uint8  MAXBFRAMES;
+    uint8  MULTIRES;
+    uint8  SYNCMARKER;
+    uint8  RNDCTRL;
+    uint8  REFDIST;
+    uint16 widthMB;
+    uint16 heightMB;
+
+    uint8  INTCOMPFIELD;
+    uint8  LUMSCALE2;
+    uint8  LUMSHIFT2;
+
+    // aspect ratio
+
+    // default unspecified
+    uint8 ASPECT_RATIO;
+
+    uint8 ASPECT_HORIZ_SIZE;
+    uint8 ASPECT_VERT_SIZE;
+    // bit rate
+    int bit_rate;
+} vbp_codec_data_vc1;
+
+typedef struct _vbp_slice_data_vc1
+{
+    uint8 *buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferVC1 slc_parms;     /* pointer to slice parms */
+} vbp_slice_data_vc1;
+
+
+typedef struct _vbp_picture_data_vc1
+{
+    uint32 picture_is_skipped;                /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */
+    VAPictureParameterBufferVC1 *pic_parms;   /* current parsed picture header */
+    uint32 size_bitplanes;                    /* based on number of MBs */
+    uint8 *packed_bitplanes;                  /* contains up to three bitplanes packed for libVA */
+    uint32 num_slices;                        /* number of slices.  always at least one */
+    vbp_slice_data_vc1 *slc_data;             /* pointer to array of slice data */
+} vbp_picture_data_vc1;
+
+typedef struct _vbp_data_vc1
+{
+    uint32 buf_number;                        /* rolling counter of buffers sent by vbp_parse */
+    vbp_codec_data_vc1 *se_data;              /* parsed SH/EPs */
+
+    uint32 num_pictures;
+
+    vbp_picture_data_vc1* pic_data;
+} vbp_data_vc1;
+
+#ifdef USE_HW_VP8
+typedef struct _vbp_codec_data_vp8
+{
+    uint8 frame_type;
+    uint8 version_num;
+    int show_frame;
+
+    uint32 frame_width;
+    uint32 frame_height;
+
+    int refresh_alt_frame;
+    int refresh_golden_frame;
+    int refresh_last_frame;
+
+    /* cropping information */
+    int crop_top;
+    int crop_bottom;
+    int crop_left;
+    int crop_right;
+
+    int golden_copied;
+    int altref_copied;
+} vbp_codec_data_vp8;
+
+typedef struct _vbp_slice_data_vp8
+{
+    uint8 *buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferVP8 slc_parms;     /* pointer to slice parms */
+} vbp_slice_data_vp8;
+
+typedef struct _vbp_picture_data_vp8
+{
+    VAPictureParameterBufferVP8* pic_parms;   /* current parsed picture header */
+
+    uint32 num_slices;                        /* number of slices.  always one for VP8 */
+    vbp_slice_data_vp8 *slc_data;             /* pointer to array of slice data */
+} vbp_picture_data_vp8;
+
+typedef struct _vbp_data_vp8
+{
+    uint32 buf_number;                        /* rolling counter of buffers sent by vbp_parse */
+    vbp_codec_data_vp8 *codec_data;
+
+    uint32 num_pictures;
+
+    vbp_picture_data_vp8* pic_data;
+
+    VAProbabilityDataBufferVP8* prob_data;
+    VAIQMatrixBufferVP8* IQ_matrix_buf;
+} vbp_data_vp8;
+#endif
+
+enum _picture_type
+{
+    VC1_PTYPE_I,
+    VC1_PTYPE_P,
+    VC1_PTYPE_B,
+    VC1_PTYPE_BI,
+    VC1_PTYPE_SKIPPED
+};
+
+enum _vbp_parser_error
+{
+    VBP_OK,
+    VBP_TYPE,
+    VBP_LOAD,
+    VBP_INIT,
+    VBP_DATA,
+    VBP_DONE,
+    VBP_MEM,
+    VBP_PARM,
+    VBP_PARTIAL,
+    VBP_MULTI
+};
+
+enum _vbp_parser_type
+{
+    VBP_VC1,
+    VBP_MPEG2,
+    VBP_MPEG4,
+    VBP_H264,
+#ifdef USE_HW_VP8
+    VBP_VP8,
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+    VBP_H264SECURE,
+#endif
+};
+
+
+/*
+ * open video bitstream parser to parse a specific media type.
+ * @param  parser_type: one of the types defined in #vbp_parser_type
+ * @param  hcontext: pointer to hold returned VBP context handle.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext);
+
+/*
+ * close video bitstream parser.
+ * @param hcontext: VBP context handle.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_close(Handle hcontext);
+
+/*
+ * parse bitstream.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to bitstream buffer.
+ * @param size: size of bitstream buffer.
+ * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * Structure of data blob is determined by the media type.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data);
+
+
+/*
+ * flush any un-parsed bitstream.
+ * @param hcontext: handle to VBP context.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_flush(Handle hcontent);
+
+
+#ifdef USE_AVC_SHORT_FORMAT
+/*
+ * update the the vbp context using the new data
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to the new data buffer.
+ * @param size: size of new data buffer.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+*/
+uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data);
+#endif
+
+#endif /* VBP_LOADER_H */
diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c
new file mode 100755
index 0000000..6eff5a0
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_mp42_parser.c
@@ -0,0 +1,1483 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+
+#include <dlfcn.h>
+
+#include <string.h>
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_mp42_parser.h"
+#include "vbp_common.h"
+#include "viddec_mp4_parse.h"
+
+
+
+typedef struct vbp_mp42_parser_private_t vbp_mp42_parser_private;
+
+struct vbp_mp42_parser_private_t
+{
+    bool short_video_header;
+};
+
+static uint8 mp4_aspect_ratio_table[][2] =
+{
+    // forbidden
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+
+    // reserved
+    {0, 0}
+};
+
+
+/*
+ * Some divX avi files contains 2 frames in one gstbuffer.
+ */
+
+
+uint32 vbp_get_sc_pos_mp42(
+    uint8 *buf,
+    uint32 length,
+    uint32 *sc_end_pos,
+    uint8 *is_normal_sc,
+    uint8* resync_marker,
+    const bool svh_search);
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
+void vbp_fill_codec_data(vbp_context *pcontext);
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data);
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext);
+
+static inline uint32 vbp_sprite_trajectory_mp42(
+    void *parent,
+    mp4_VideoObjectLayer_t *vidObjLay,
+    mp4_VideoObjectPlane_t *vidObjPlane);
+
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
+    void * parent,
+    int32_t *dmv_length);
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        // absolutely impossible, just sanity check
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
+    if (pcontext->parser_ops->init == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->parse_sc = NULL;
+#else
+    pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4");
+    if (pcontext->parser_ops->parse_sc == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse");
+    if (pcontext->parser_ops->parse_syntax == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size");
+    if (pcontext->parser_ops->get_cxt_size == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->is_wkld_done = NULL;
+#else
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done");
+    if (pcontext->parser_ops->is_wkld_done == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+
+    /* entry point not needed */
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+
+/*
+ * For the codec_data passed by gstreamer
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
+{
+    uint32 ret = VBP_OK;
+    ret = vbp_parse_start_code_mp42(pcontext);
+    return ret;
+}
+
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_mp4_parser_t *parser =
+        (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+    uint8 is_svh = 0;
+    uint32 current_sc = parser->current_sc;
+    is_svh = parser->cur_sc_prefix ? false : true;
+
+    if (!is_svh)
+    {
+        // remove prefix from current_sc
+        current_sc &= 0x0FF;
+        switch (current_sc)
+        {
+        case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+            VTRACE ("Visual Object Sequence is parsed.\n");
+            query_data->codec_data.profile_and_level_indication
+                    = parser->info.profile_and_level_indication;
+            VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication);
+            break;
+
+        case MP4_SC_VIDEO_OBJECT_PLANE:
+            //VTRACE ("Video Object Plane is parsed.\n");
+            vbp_on_vop_mp42(pcontext, list_index);
+            break;
+
+        default:
+            if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) &&
+                (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX))
+            {
+                VTRACE ("Video Object Layer is parsed\n");
+                parser_private->short_video_header = FALSE;
+                vbp_fill_codec_data(pcontext);
+            }
+            else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX &&
+                     current_sc >= MP4_SC_VIDEO_OBJECT_MIN)
+            {
+                if (parser->sc_seen == MP4_SC_SEEN_SVH)
+                {
+                    // this should never happen!!!!
+                    WTRACE ("Short video header is parsed.\n");
+                    // vbp_on_vop_svh_mp42(pcontext, list_index);
+                    return VBP_TYPE;
+                }
+            }
+            break;
+        }
+    }
+    else
+    {
+        if (parser->sc_seen == MP4_SC_SEEN_SVH)
+        {
+            //VTRACE ("Short video header is parsed.\n");
+            vbp_on_vop_svh_mp42(pcontext, list_index);
+        }
+    }
+
+    return VBP_OK;
+}
+
+
+
+/*
+* partial frame handling:
+*
+* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be
+* conveyed in the packet  (RFC 4629) for each following GOB, otherwise,
+* picture can't be decoded.
+*
+* MPEG4:  VideoObjectPlane header is lost if the first slice is discarded. However, picture
+* is still decodable as long as the header_extension_code is 1 in video_packet_header.
+*
+*MPEG-4 with short header:   video_plane_with_short_header is lost if the first GOB
+* is discarded. As this header is not duplicated (RFC 3016), picture is not decodable.
+*
+* In sum:
+* If buffer contains the 32-bit start code (0x000001xx), proceed  as normal.
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263
+* picture start code or short_video_start_marker, proceed as normal.
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which
+* indicates  h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with
+* short header, we should report packet as a partial frame - no more parsing is needed.
+*
+* If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker,
+* the buffer will be immeidately parsed and num_items is set to 0.
+*/
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint8 *buf = NULL;
+    uint32 size = 0;
+    uint32 sc_end_pos = -1;
+    uint32 bytes_parsed = 0;
+    viddec_mp4_parser_t *pinfo = NULL;
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+
+    // reset query data for the new sample buffer
+    query_data->number_picture_data= 0;
+    query_data->number_pictures = 0;
+
+    // emulation prevention byte is not needed
+    cxt->getbits.is_emul_reqd = 0;
+
+    cxt->list.num_items = 0;
+    cxt->list.data[0].stpos = 0;
+    cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+    buf = cxt->parse_cubby.buf;
+    size = cxt->parse_cubby.size;
+
+    pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
+
+    uint8 is_normal_sc = 0;
+    uint8 resync_marker = 0;
+    uint32 found_sc = 0;
+    uint32 ret = VBP_OK;
+
+    while (1)
+    {
+        found_sc = vbp_get_sc_pos_mp42(
+                        buf + bytes_parsed,
+                        size - bytes_parsed,
+                        &sc_end_pos,
+                        &is_normal_sc,
+                        &resync_marker,
+                        parser_private->short_video_header);
+
+        if (found_sc)
+        {
+            cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + sc_end_pos - 3;
+            if (cxt->list.num_items != 0)
+            {
+                cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + sc_end_pos - 3;
+            }
+            bytes_parsed += sc_end_pos;
+
+            cxt->list.num_items++;
+            pinfo->cur_sc_prefix = is_normal_sc;
+        }
+        else
+        {
+            if (cxt->list.num_items != 0)
+            {
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+            else
+            {
+                WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size);
+                cxt->list.num_items = 1;
+                cxt->list.data[0].stpos = 0;
+                cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+                if (resync_marker)
+                {
+                    // either the first slice (GOB) is lost or parser receives a single slice (GOB)
+                    if (parser_private->short_video_header)
+                    {
+                        // TODO: revisit if HW supportd GOB layer decoding for h.263
+                        WTRACE("Partial frame: GOB buffer.\n");
+                        ret = VBP_PARTIAL;
+                    }
+                    else
+                    {
+                        WTRACE("Partial frame: video packet header buffer.\n");
+                        ret =  vbp_process_video_packet_mp42(pcontext);
+                    }
+
+                    // set num_items to 0 so buffer will not be parsed again
+                    cxt->list.num_items = 0;
+                }
+                else
+                {
+                    ETRACE("Invalid data received.\n");
+                    cxt->list.num_items = 0;
+                    return VBP_DATA;
+                }
+
+                break;
+            }
+        }
+    }
+
+    return ret;
+}
+
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext)
+{
+#if 0
+    vbp_dump_query_data(pcontext);
+#endif
+    return VBP_OK;
+}
+
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data)
+{
+    vbp_picture_data_mp42 *picture_data = query_data->picture_data;
+    int num_pictures = query_data->number_picture_data;
+    while (num_pictures > 1)
+    {
+        picture_data = picture_data->next_picture_data;
+        num_pictures--;
+    }
+
+    return picture_data;
+}
+
+void vbp_fill_codec_data(vbp_context *pcontext)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_codec_data_mp42* codec_data = &(query_data->codec_data);
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+    codec_data->bit_rate = parser->info.VisualObject.VideoObject.VOLControlParameters.bit_rate;
+
+    codec_data->profile_and_level_indication
+            = parser->info.profile_and_level_indication;
+
+    codec_data->video_object_layer_width =
+            parser->info.VisualObject.VideoObject.video_object_layer_width;
+
+    codec_data->video_object_layer_height =
+            parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+    if (parser->info.VisualObject.VideoSignalType.is_video_signal_type)
+    {
+        codec_data->video_format =
+                parser->info.VisualObject.VideoSignalType.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format =  5;
+    }
+
+    codec_data->video_range =
+            parser->info.VisualObject.VideoSignalType.video_range;
+
+    if (parser->info.VisualObject.VideoSignalType.is_colour_description)
+    {
+        codec_data->matrix_coefficients =
+                parser->info.VisualObject.VideoSignalType.matrix_coefficients;
+    }
+    else if (parser_private->short_video_header)
+    {
+        // SMPTE 170M
+        codec_data->matrix_coefficients = 6;
+    }
+    else
+    {
+        // ITU-R Recommendation BT.709
+        codec_data->matrix_coefficients = 1;
+    }
+
+    codec_data->short_video_header = parser_private->short_video_header;
+
+    // aspect ratio
+    codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info;
+    if (codec_data->aspect_ratio_info < 6)
+    {
+        codec_data->par_width = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][0];
+        codec_data->par_height = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][1];
+    }
+    else if (codec_data->aspect_ratio_info == 15)
+    {
+        codec_data->par_width = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_width;
+        codec_data->par_height = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_height;
+    }
+    else
+    {
+        codec_data->par_width = 0;
+        codec_data->par_height = 0;
+    }
+}
+
+void vbp_fill_slice_data(vbp_context *pcontext, int list_index)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+    if (!parser->info.VisualObject.VideoObject.short_video_header)
+    {
+        vbp_process_slices_mp42(pcontext, list_index);
+    }
+    else
+    {
+        vbp_process_slices_svh_mp42(pcontext, list_index);
+    }
+}
+
+void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    VAPictureParameterBufferMPEG4 *picture_param = NULL;
+
+    if (new_picture_flag)
+    {
+        query_data->number_pictures++;
+    }
+
+    picture_data = query_data->picture_data;
+    if (picture_data == NULL || query_data->number_picture_data == 0)
+    {
+        // first entry
+        if (picture_data == NULL)
+        {
+            picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1);
+            query_data->picture_data = picture_data;
+            if (picture_data == NULL) {
+                query_data->number_picture_data = 0;
+                return;
+            }
+        }
+        query_data->number_picture_data = 1;
+    }
+    else
+    {
+        // find the last active one
+        int i = query_data->number_picture_data;
+        while (i > 1)
+        {
+            picture_data = picture_data->next_picture_data;
+            i--;
+        }
+        if (picture_data->next_picture_data == NULL)
+        {
+            picture_data->next_picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1);
+            if (picture_data->next_picture_data == NULL) {
+                return;
+            }
+        }
+
+        query_data->number_picture_data++;
+
+        picture_data = picture_data->next_picture_data;
+    }
+
+    picture_param = &(picture_data->picture_param);
+
+    uint8 idx = 0;
+
+    picture_data->new_picture_flag = new_picture_flag;
+
+    picture_data->vop_coded
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
+
+
+
+    picture_data->vop_time_increment =
+            parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment;
+
+    // fill picture_param
+
+
+    /*
+     * NOTE: for short video header, the parser saves vop_width and vop_height
+     * to VOL->video_object_layer_width and VOL->video_object_layer_height
+     */
+    picture_param->vop_width
+            = parser->info.VisualObject.VideoObject.video_object_layer_width;
+    picture_param->vop_height
+            = parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+    picture_param->forward_reference_picture = VA_INVALID_SURFACE;
+    picture_param->backward_reference_picture = VA_INVALID_SURFACE;
+
+    // Fill VAPictureParameterBufferMPEG4::vol_fields
+
+    picture_param->vol_fields.bits.short_video_header
+            = parser->info.VisualObject.VideoObject.short_video_header;
+    picture_param->vol_fields.bits.chroma_format
+            = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
+
+    // TODO: find out why testsuite always set this value to be 0
+    picture_param->vol_fields.bits.chroma_format = 0;
+
+    picture_param->vol_fields.bits.interlaced
+            = parser->info.VisualObject.VideoObject.interlaced;
+    picture_param->vol_fields.bits.obmc_disable
+            = parser->info.VisualObject.VideoObject.obmc_disable;
+    picture_param->vol_fields.bits.sprite_enable
+            = parser->info.VisualObject.VideoObject.sprite_enable;
+    picture_param->vol_fields.bits.sprite_warping_accuracy
+            = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy;
+    picture_param->vol_fields.bits.quant_type
+            = parser->info.VisualObject.VideoObject.quant_type;
+    picture_param->vol_fields.bits.quarter_sample
+            = parser->info.VisualObject.VideoObject.quarter_sample;
+    picture_param->vol_fields.bits.data_partitioned
+            = parser->info.VisualObject.VideoObject.data_partitioned;
+    picture_param->vol_fields.bits.reversible_vlc
+            = parser->info.VisualObject.VideoObject.reversible_vlc;
+    picture_param->vol_fields.bits.resync_marker_disable
+            = parser->info.VisualObject.VideoObject.resync_marker_disable;
+    picture_param->no_of_sprite_warping_points
+            = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
+
+    for (idx = 0; idx < 3; idx++)
+    {
+        picture_param->sprite_trajectory_du[idx]
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
+        picture_param->sprite_trajectory_dv[idx]
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx];
+    }
+
+    picture_param->quant_precision
+            = parser->info.VisualObject.VideoObject.quant_precision;
+
+    // fill VAPictureParameterBufferMPEG4::vop_fields
+
+
+    if (!parser->info.VisualObject.VideoObject.short_video_header)
+    {
+        picture_param->vop_fields.bits.vop_coding_type
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
+    }
+    else
+    {
+        picture_param->vop_fields.bits.vop_coding_type
+                = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
+    }
+
+      // TODO: fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
+      // This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
+
+    if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B)
+    {
+        picture_param->vop_fields.bits.backward_reference_vop_coding_type
+                = picture_param->vop_fields.bits.vop_coding_type;
+    }
+
+    picture_param->vop_fields.bits.vop_rounding_type
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type;
+    picture_param->vop_fields.bits.intra_dc_vlc_thr
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr;
+    picture_param->vop_fields.bits.top_field_first
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first;
+    picture_param->vop_fields.bits.alternate_vertical_scan_flag
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag;
+
+    picture_param->vop_fcode_forward
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward;
+    picture_param->vop_fcode_backward
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward;
+    picture_param->vop_time_increment_resolution
+            = parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
+
+    // short header related
+    picture_param->num_gobs_in_vop
+            = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
+    picture_param->num_macroblocks_in_gob
+            = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
+
+    // for direct mode prediction
+    picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
+    picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
+}
+
+void vbp_fill_iq_matrix_buffer(vbp_context *pcontext)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+    mp4_VOLQuant_mat_t *quant_mat_info =
+            &(parser->info.VisualObject.VideoObject.quant_mat_info);
+
+    VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
+
+    iq_matrix = &(query_data->iq_matrix_buffer);
+
+    iq_matrix->load_intra_quant_mat = 1; //quant_mat_info->load_intra_quant_mat;
+    iq_matrix->load_non_intra_quant_mat = 1; // = quant_mat_info->load_nonintra_quant_mat;
+    memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
+    memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64);
+}
+
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_fill_codec_data(pcontext);
+    vbp_fill_picture_param(pcontext, 1);
+    vbp_fill_iq_matrix_buffer(pcontext);
+    vbp_fill_slice_data(pcontext, list_index);
+}
+
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_fill_codec_data(pcontext);
+    vbp_fill_picture_param(pcontext, 1);
+    vbp_fill_iq_matrix_buffer(pcontext);
+    vbp_fill_slice_data(pcontext, list_index);
+}
+
+uint32 vbp_get_sc_pos_mp42(
+    uint8 *buf,
+    uint32 length,
+    uint32 *sc_end_pos,
+    uint8 *is_normal_sc,
+    uint8 *resync_marker,
+    const bool svh_search)
+{
+    uint8 *ptr = buf;
+    uint32 size;
+    uint32 data_left = 0, phase = 0, ret = 0;
+    size = 0;
+
+    data_left = length;
+    *sc_end_pos = -1;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) && (phase < 3))
+    {
+        /* Check if we are byte aligned & phase=0, if thats the case we can check
+         work at a time instead of byte*/
+        if (((((uint32) ptr) & 0x3) == 0) && (phase == 0))
+        {
+            while (data_left > 3)
+            {
+                uint32 data;
+                char mask1 = 0, mask2 = 0;
+
+                data = *((uint32 *) ptr);
+#ifndef MFDBIGENDIAN
+                data = SWAP_WORD(data);
+#endif
+                mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+                mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+                /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+                 two consecutive zero bytes for a start code pattern */
+                if (mask1 && mask2)
+                {
+                    /* Success so skip 4 bytes and start over */
+                    ptr += 4;
+                    size += 4;
+                    data_left -= 4;
+                    continue;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+         two zero bytes in the word so we look one byte at a time*/
+        if (data_left > 0)
+        {
+            if (*ptr == FIRST_STARTCODE_BYTE)
+            {
+                /* Phase can be 3 only if third start code byte is found */
+                phase++;
+                ptr++;
+                size++;
+                data_left--;
+                if (phase > 2)
+                {
+                    phase = 2;
+
+                    if ((((uint32) ptr) & 0x3) == 0)
+                    {
+                        while (data_left > 3)
+                        {
+                            if (*((uint32 *) ptr) != 0)
+                            {
+                                break;
+                            }
+                            ptr += 4;
+                            size += 4;
+                            data_left -= 4;
+                        }
+                    }
+                }
+            }
+            else
+            {
+                uint8 normal_sc = 0, short_sc = 0;
+                if (phase == 2)
+                {
+                    normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+                    if (svh_search)
+                    {
+                       short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
+                    }
+                    *is_normal_sc = normal_sc;
+
+                    // at least 16-bit 0, may be GOB start code or
+                    // resync marker.
+                    *resync_marker = 1;
+                }
+
+                if (!(normal_sc | short_sc))
+                {
+                    phase = 0;
+                }
+                else
+                {
+                    /* Match for start code so update context with byte position */
+                    *sc_end_pos = size;
+                    phase = 3;
+                }
+                ptr++;
+                size++;
+                data_left--;
+            }
+        }
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        (*sc_end_pos)++;
+        phase++;
+        ret = 1;
+    }
+
+    // Return 1 only if phase is 4, else always return 0
+    return ret;
+}
+
+
+uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
+{
+    uint32 length = 0;
+    numOfMbs--;
+    do
+    {
+        numOfMbs >>= 1;
+        length++;
+    }
+    while (numOfMbs);
+    return length;
+}
+
+uint32 vbp_parse_video_packet_header_mp42(
+    void *parent,
+    viddec_mp4_parser_t *parser_cxt,
+    uint16_t *quant_scale,
+    uint32 *macroblock_number)
+{
+    uint32 ret = VBP_DATA;
+    mp4_Info_t *pInfo = &(parser_cxt->info);
+    mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+    mp4_VideoObjectPlane_t *vidObjPlane =
+            &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+    uint32 code = 0;
+    int32_t getbits = 0;
+
+    uint16_t _quant_scale = 0;
+    uint32 _macroblock_number = 0;
+    uint32 header_extension_codes = 0;
+    uint8 vop_coding_type = vidObjPlane->vop_coding_type;
+
+    if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+    {
+        return VBP_DATA;
+    }
+
+    do
+    {
+        // get macroblock_number
+        uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
+        uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
+        uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y);
+
+        getbits = viddec_pm_get_bits(parent, &code, length);
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        _macroblock_number = code;
+
+        // quant_scale
+        if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            _quant_scale = code;
+        }
+
+        // header_extension_codes
+        if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            header_extension_codes = code;
+        }
+
+        if (header_extension_codes)
+        {
+            // modulo time base
+            do
+            {
+                getbits = viddec_pm_get_bits(parent, &code, 1);
+                BREAK_GETBITS_FAIL(getbits, ret);
+            } while (code);
+
+            // marker_bit
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            // vop_time_increment
+            uint32 numbits = 0;
+            numbits = vidObjLay->vop_time_increment_resolution_bits;
+            if (numbits == 0)
+            {
+                // ??
+                numbits = 1;
+            }
+            getbits = viddec_pm_get_bits(parent, &code, numbits);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            vidObjPlane->vop_time_increment = code;
+
+
+            // marker_bit
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            // vop_coding_type
+            getbits = viddec_pm_get_bits(parent, &code, 2);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            vop_coding_type = code & 0x3;
+            vidObjPlane->vop_coding_type = vop_coding_type;
+
+
+            if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+            {
+                // intra_dc_vlc_thr
+                getbits = viddec_pm_get_bits(parent, &code, 3);
+                BREAK_GETBITS_FAIL(getbits, ret);
+
+                vidObjPlane->intra_dc_vlc_thr = code;
+                if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) &&
+                    (vop_coding_type == MP4_VOP_TYPE_S) &&
+                    (vidObjLay->sprite_info.no_of_sprite_warping_points> 0))
+                {
+                    if (vbp_sprite_trajectory_mp42(parent, vidObjLay, vidObjPlane) != VBP_OK)
+                    {
+                        break;
+                    }
+                }
+
+                if (vidObjLay->reduced_resolution_vop_enable &&
+                   (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+                   ((vop_coding_type == MP4_VOP_TYPE_I) ||
+                    (vop_coding_type == MP4_VOP_TYPE_P)))
+                {
+                    // vop_reduced_resolution
+                    getbits = viddec_pm_get_bits(parent, &code, 1);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                }
+
+                if (vop_coding_type != MP4_VOP_TYPE_I)
+                {
+                    // vop_fcode_forward
+                    getbits = viddec_pm_get_bits(parent, &code, 3);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                    vidObjPlane->vop_fcode_forward = code;
+                }
+
+                if (vop_coding_type == MP4_VOP_TYPE_B)
+                {
+                    // vop_fcode_backward
+                    getbits = viddec_pm_get_bits(parent, &code, 3);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                    vidObjPlane->vop_fcode_backward = code;
+                }
+            }
+        }
+
+        if (vidObjLay->newpred_enable)
+        {
+            // New pred mode not supported in HW, but, does libva support this?
+            ret = VBP_DATA;
+            break;
+        }
+
+        *quant_scale = _quant_scale;
+        *macroblock_number = _macroblock_number;
+
+        ret = VBP_OK;
+    }
+    while (0);
+    return ret;
+}
+
+uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
+{
+    mp4_Info_t *pInfo = &(parser_cxt->info);
+    mp4_VideoObjectPlane_t *vidObjPlane =
+            &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+    uint32 resync_marker_length = 0;
+    if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I)
+    {
+        resync_marker_length = 17;
+    }
+    else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B)
+    {
+        uint8 fcode_max = vidObjPlane->vop_fcode_forward;
+        if (fcode_max < vidObjPlane->vop_fcode_backward)
+        {
+            fcode_max = vidObjPlane->vop_fcode_backward;
+        }
+        resync_marker_length = 16 + fcode_max;
+
+        // resync_marker is max(15+fcode,17) zeros followed by a one
+        if (resync_marker_length < 18)
+            resync_marker_length = 18;
+    }
+    else
+    {
+        resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
+    }
+    return resync_marker_length;
+}
+
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
+{
+    uint32 ret = VBP_OK;
+
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt =
+            (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+    vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data);
+    vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data);
+    VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
+
+    uint8 is_emul = 0;
+    uint32 bit_offset = 0;
+    uint32 byte_offset = 0;
+
+    // The offsets are relative to parent->parse_cubby.buf
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+    slice_data->buffer_addr = parent->parse_cubby.buf;
+
+    slice_data->slice_offset =
+            byte_offset + parent->list.data[list_index].stpos;
+    slice_data->slice_size =
+            parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+    slice_param->slice_data_size = slice_data->slice_size;
+    slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+    slice_param->slice_data_offset = 0;
+    slice_param->macroblock_offset = bit_offset;
+    slice_param->macroblock_number = 0;
+    slice_param->quant_scale
+            = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
+
+    return ret;
+}
+#define SEARCH_SYNC_OPT
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    vbp_slice_data_mp42 *slice_data = NULL;
+    VASliceParameterBufferMPEG4* slice_param = NULL;
+
+    uint32 ret = VBP_OK;
+
+    uint8 is_emul = 0;
+    uint32 bit_offset = 0;
+    uint32 byte_offset = 0;
+
+    uint32 code = 0;
+    int32_t getbits = 0;
+    uint32 resync_marker_length = 0;
+
+    /* The offsets are relative to parent->parse_cubby.buf */
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+    picture_data = vbp_get_mp42_picture_data(query_data);
+    slice_data = &(picture_data->slice_data);
+    slice_param = &(slice_data->slice_param);
+
+    slice_data->buffer_addr = parent->parse_cubby.buf;
+
+    slice_data->slice_offset = byte_offset + parent->list.data[list_index].stpos;
+    slice_data->slice_size =
+            parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+    slice_param->slice_data_size = slice_data->slice_size;
+    slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+    slice_param->slice_data_offset = 0;
+    slice_param->macroblock_offset = bit_offset;
+    slice_param->macroblock_number = 0;
+    slice_param->quant_scale
+            = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
+
+    if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable)
+    {
+        // no resync_marker
+        return VBP_OK;
+    }
+
+    // scan for resync_marker
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+    if (bit_offset)
+    {
+        // byte-aligned
+        getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+        if (getbits == -1)
+        {
+            return VBP_DATA;
+        }
+    }
+
+    // get resync_marker_length
+    resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
+
+    uint16_t quant_scale = 0;
+    uint32 macroblock_number = 0;
+
+    while (1)
+    {
+#ifndef SEARCH_SYNC_OPT
+        getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
+
+        // return VBP_OK as resync_marker may not be present
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        if (code != 1)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 8);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            continue;
+        }
+#else
+
+        // read 3 bytes since resync_marker_length is between 17 bits and 23 bits
+        if (parent->getbits.bstrm_buf.buf_index + 3 > parent->getbits.bstrm_buf.buf_end)
+        {
+            break;
+        }
+
+        code = parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index] << 16 |
+                parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+1] << 8 |
+                parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+2];
+
+        if (code >> (24-resync_marker_length) != 1)
+        {
+            int byte0 = code & 0xff;
+            int byte1 = (code >> 8) & 0xff;
+            if (byte0 != 0)
+            {
+                parent->getbits.bstrm_buf.buf_index += 3;
+            }
+            else if (byte1 != 0)
+            {
+                parent->getbits.bstrm_buf.buf_index += 2;
+            }
+            else
+            {
+                parent->getbits.bstrm_buf.buf_index += 1;
+            }
+            continue;
+        }
+#endif
+        // We found resync_marker
+        viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+        // update slice data as we found resync_marker
+        slice_data->slice_size -=
+                (parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset);
+        slice_param->slice_data_size = slice_data->slice_size;
+
+        // skip resync marker
+        getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
+
+        // return VBP_DATA, this should never happen!
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        // parse video_packet_header
+        ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt,
+                &quant_scale, &macroblock_number);
+
+        if (ret != VBP_OK)
+        {
+            ETRACE("Failed to parse video packet header.\n");
+            return ret;
+        }
+
+        // new_picture_flag = 0, this is not the first slice of a picture
+        vbp_fill_picture_param(pcontext, 0);
+
+        picture_data = vbp_get_mp42_picture_data(query_data);
+        slice_data = &(picture_data->slice_data);
+        slice_param = &(slice_data->slice_param);
+
+
+        viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+        slice_data->buffer_addr = parent->parse_cubby.buf;
+
+        slice_data->slice_offset =
+                    byte_offset + parent->list.data[list_index].stpos;
+        slice_data->slice_size =
+                    parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+        slice_param->slice_data_size = slice_data->slice_size;
+        slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+        slice_param->slice_data_offset = 0;
+        slice_param->macroblock_offset = bit_offset;
+        slice_param->macroblock_number = macroblock_number;
+        slice_param->quant_scale = quant_scale;
+
+        if (bit_offset)
+        {
+            // byte-align parsing position
+            getbits = viddec_pm_skip_bits(parent,  8 - bit_offset);
+            if (getbits == -1)
+            {
+                ETRACE("Failed to align parser to byte position.\n");
+                return VBP_DATA;
+            }
+        }
+
+    }
+
+    return VBP_OK;
+}
+
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+    uint32 code = 0;
+    int32_t getbits = 0;
+
+    uint32 ret = VBP_DATA;
+
+
+    // setup bitstream parser
+    parent->getbits.list = &(parent->list);
+
+    parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf;
+    parent->getbits.bstrm_buf.buf_index = 0;
+    parent->getbits.bstrm_buf.buf_st = 0;
+    parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size;
+    parent->getbits.bstrm_buf.buf_bitoff = 0;
+
+    parent->getbits.au_pos = 0;
+    parent->getbits.list_off = 0;
+    parent->getbits.phase = 0;
+    parent->getbits.emulation_byte_counter = 0;
+
+    parent->list.start_offset = 0;
+    parent->list.end_offset = parent->parse_cubby.size;
+    parent->list.total_bytes = parent->parse_cubby.size;
+
+
+    // skip leading zero-byte
+    while (code == 0)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 8);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        getbits = viddec_pm_peek_bits(parent, &code, 8);
+        BREAK_GETBITS_FAIL(getbits, ret);
+    }
+
+    if (getbits != 0)
+    {
+        return VBP_DATA;
+    }
+
+    // resync-marker is represented as 17-23 bits. (16-22 bits of 0)
+    // as 16-bit '0' has been skipped, we try to parse buffer bit by bit
+    // until bit 1 is encounted or up to 7 bits are parsed.
+    code = 0;
+    uint8 count = 0;
+    while (code == 0  && count < 7)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        count++;
+    }
+
+    if (code == 0 || getbits != 0)
+    {
+        ETRACE("no resync-marker in the buffer.\n");
+        return ret;
+    }
+
+    // resync marker is skipped
+    uint16_t quant_scale = 0;
+    uint32 macroblock_number = 0;
+
+    // parse video_packet_header
+    vbp_parse_video_packet_header_mp42(parent, parser_cxt, &quant_scale, &macroblock_number);
+
+    // new_picture_flag = 0, this is not the first slice of a picture
+    vbp_fill_picture_param(pcontext, 0);
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    vbp_slice_data_mp42 *slice_data = NULL;
+    VASliceParameterBufferMPEG4* slice_param = NULL;
+
+    picture_data = vbp_get_mp42_picture_data(query_data);
+    slice_data = &(picture_data->slice_data);
+    slice_param = &(slice_data->slice_param);
+
+    ret = vbp_process_slices_mp42(pcontext, 0);
+
+    // update slice's QP and macro_block number as it is set to 0 by default.
+    slice_param->macroblock_number = macroblock_number;
+    slice_param->quant_scale = quant_scale;
+
+    // VOP must be coded!
+    picture_data->vop_coded = 1;
+    return ret;
+
+}
+
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
+    void * parent,
+    int32_t *dmv_length)
+{
+    uint32 code, skip;
+    int32_t getbits = 0;
+    uint32 ret = VBP_DATA;
+    *dmv_length = 0;
+    skip = 3;
+    do
+    {
+        getbits = viddec_pm_peek_bits(parent, &code, skip);
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        if (code == 7)
+        {
+            viddec_pm_skip_bits(parent, skip);
+            getbits = viddec_pm_peek_bits(parent, &code, 9);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            skip = 1;
+            while ((code & 256) != 0)
+            {
+                // count number of 1 bits
+                code <<= 1;
+                skip++;
+            }
+            *dmv_length = 5 + skip;
+        }
+        else
+        {
+            skip = (code <= 1) ? 2 : 3;
+            *dmv_length = code - 1;
+        }
+        viddec_pm_skip_bits(parent, skip);
+        ret = VBP_OK;
+
+    }
+    while (0);
+    return ret;
+}
+
+
+static inline uint32 vbp_sprite_trajectory_mp42(
+    void *parent,
+    mp4_VideoObjectLayer_t *vidObjLay,
+    mp4_VideoObjectPlane_t *vidObjPlane)
+{
+    uint32 code, i;
+    int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
+    uint32 ret = VBP_OK;
+    for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++)
+    {
+        ret = VBP_DATA;
+        ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+        if (ret != VBP_OK)
+        {
+            break;
+        }
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            dmv_code = (int32_t) code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        if (code != 1)
+        {
+            ret = VBP_DATA;
+            break;
+        }
+        vidObjPlane->warping_mv_code_du[i] = dmv_code;
+        // TODO: create another inline function to avoid code duplication
+        ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+        if (ret != VBP_OK)
+        {
+            break;
+        }
+        // reset return value in case early break
+        ret = VBP_DATA;
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            dmv_code = (int32_t) code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        if (code != 1)
+        {
+            break;
+        }
+        vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+        // set to VBP_OK
+        ret = VBP_OK;
+
+    }
+    return ret;
+}
+
+
+/*
+ * free memory of vbp_data_mp42 structure and its members
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_picture_data_mp42* current = NULL;
+    vbp_picture_data_mp42* next = NULL;
+
+    if (pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+    if (query_data)
+    {
+        current = query_data->picture_data;
+        while (current != NULL)
+        {
+            next = current->next_picture_data;
+            free(current);
+            current = next;
+        }
+
+        free(query_data);
+    }
+
+    pcontext->query_data = NULL;
+    return VBP_OK;
+}
+
+/*
+ * Allocate memory for vbp_data_mp42 structure and all its members.
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data;
+    pcontext->query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_mp42, 1);
+    if (query_data == NULL)
+    {
+        goto cleanup;
+    }
+
+    pcontext->query_data = (void *) query_data;
+    query_data->picture_data = NULL;
+    query_data->number_picture_data = 0;
+    query_data->number_pictures = 0;
+
+    pcontext->parser_private = NULL;
+    vbp_mp42_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_mp42_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->short_video_header = TRUE;
+    return VBP_OK;
+
+cleanup:
+
+    vbp_free_query_data_mp42(pcontext);
+
+    return VBP_MEM;
+}
diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.h b/mixvbp/vbp_manager/vbp_mp42_parser.h
new file mode 100755
index 0000000..93416b7
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_mp42_parser.h
@@ -0,0 +1,66 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_MP42_PARSER_H
+#define VBP_MP42_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+
+uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext);
+
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse start code.
+ */
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext);
+
+#endif /*VBP_MP42_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c
new file mode 100755
index 0000000..72548f0
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_utils.c
@@ -0,0 +1,618 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+
+#include "vc1.h"
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+#include "vbp_h264_parser.h"
+#include "vbp_mp42_parser.h"
+#ifdef USE_HW_VP8
+#include "vbp_vp8_parser.h"
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+#include "vbp_h264secure_parser.h"
+#endif
+
+
+/* buffer counter */
+uint32 buffer_counter = 0;
+
+
+void* vbp_try_malloc0(uint32 size) {
+    void* pMem = malloc(size);
+    if (pMem)
+        memset(pMem, 0, size);
+    return pMem;
+}
+
+/**
+ *
+ * uninitialize parser context
+ *
+ */
+static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+
+    if (NULL == pcontext)
+    {
+        return error;
+    }
+
+    /* not need to reset parser entry points. */
+
+    free(pcontext->parser_ops);
+    pcontext->parser_ops = NULL;
+
+
+    if (pcontext->fd_parser)
+    {
+        dlclose(pcontext->fd_parser);
+        pcontext->fd_parser = NULL;
+    }
+
+    return error;
+}
+
+/**
+ *
+ * initialize parser context
+ *
+ */
+static uint32 vbp_utils_initialize_context(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+    char *parser_name;
+
+    switch (pcontext->parser_type)
+    {
+    case VBP_VC1:
+#ifndef ANDROID
+        parser_name = "libmixvbp_vc1.so.0";
+#else
+        parser_name = "libmixvbp_vc1.so";
+#endif
+        break;
+
+        /* MPEG-2 parser is not supported. */
+
+        /*  case VBP_MPEG2:
+        parser_name = "libmixvbp_mpeg2.so.0";
+        break;*/
+
+    case VBP_MPEG4:
+#ifndef ANDROID
+        parser_name = "libmixvbp_mpeg4.so.0";
+#else
+        parser_name = "libmixvbp_mpeg4.so";
+#endif
+        break;
+
+    case VBP_H264:
+#ifndef ANDROID
+        parser_name = "libmixvbp_h264.so.0";
+#else
+        parser_name = "libmixvbp_h264.so";
+#endif
+        break;
+#ifdef USE_HW_VP8
+    case VBP_VP8:
+#ifndef ANDROID
+        parser_name = "libmixvbp_vp8.so.0";
+#else
+        parser_name = "libmixvbp_vp8.so";
+#endif
+        break;
+#endif
+
+#ifdef USE_AVC_SHORT_FORMAT
+    case VBP_H264SECURE:
+        parser_name = "libmixvbp_h264secure.so";
+        break;
+#endif
+
+    default:
+        WTRACE("Unsupported parser type!");
+        return VBP_TYPE;
+    }
+
+    pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY);
+    if (NULL == pcontext->fd_parser)
+    {
+        ETRACE("Failed to load parser %s.", parser_name);
+        error =  VBP_LOAD;
+        goto cleanup;
+    }
+
+    pcontext->parser_ops = vbp_malloc(viddec_parser_ops_t, 1);
+    if (NULL == pcontext->parser_ops)
+    {
+        ETRACE("Failed to allocate memory");
+        error =  VBP_MEM;
+        goto cleanup;
+    }
+
+#define SET_FUNC_POINTER(X, Y)\
+    case X:\
+    pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\
+    pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\
+    pcontext->func_free_query_data = vbp_free_query_data_##Y;\
+    pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\
+    pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\
+    pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\
+    pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\
+    break;
+
+    switch (pcontext->parser_type)
+    {
+        SET_FUNC_POINTER(VBP_VC1, vc1);
+        SET_FUNC_POINTER(VBP_MPEG4, mp42);
+        SET_FUNC_POINTER(VBP_H264, h264);
+#ifdef USE_HW_VP8
+        SET_FUNC_POINTER(VBP_VP8, vp8);
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+        SET_FUNC_POINTER(VBP_H264SECURE, h264secure);
+#endif
+    }
+#ifdef USE_AVC_SHORT_FORMAT
+    if (pcontext->parser_type == VBP_H264SECURE) {
+        pcontext->func_update_data = vbp_update_data_h264secure;
+    }
+#endif
+
+    /* set entry points for parser operations:
+    	init
+    	parse_sc
+    	parse_syntax
+    	get_cxt_size
+    	is_wkld_done
+    	is_frame_start
+    */
+    error = pcontext->func_init_parser_entries(pcontext);
+
+cleanup:
+
+    if (VBP_OK != error)
+    {
+        /* no need to log error.  the loader would have done so already. */
+        vbp_utils_uninitialize_context(pcontext);
+    }
+
+    return error;
+}
+
+/**
+*
+* free allocated memory.
+*
+*/
+static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext)
+{
+    if (NULL == pcontext)
+    {
+        return VBP_OK;
+    }
+
+    if (pcontext->func_free_query_data)
+    {
+        pcontext->func_free_query_data(pcontext);
+    }
+
+    free(pcontext->workload2);
+    pcontext->workload2 = NULL;
+
+    free(pcontext->workload1);
+    pcontext->workload1 = NULL;
+
+    free(pcontext->persist_mem);
+    pcontext->persist_mem = NULL;
+
+    free(pcontext->parser_cxt);
+    pcontext->parser_cxt = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * allocate memory
+ *
+ */
+static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext)
+{
+    /* pcontext is guaranteed to be valid input. */
+    uint32 error = VBP_OK;
+    viddec_parser_memory_sizes_t sizes;
+
+    pcontext->parser_cxt = vbp_malloc(viddec_pm_cxt_t, 1);
+    if (NULL == pcontext->parser_cxt)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* invoke parser entry to get context size */
+    /* no return value, should always succeed. */
+    pcontext->parser_ops->get_cxt_size(&sizes);
+
+    /* allocate persistent memory for parser */
+    if (sizes.persist_size)
+    {
+        pcontext->persist_mem = malloc(sizes.persist_size);
+        if (NULL == pcontext->persist_mem)
+        {
+            ETRACE("Failed to allocate memory");
+            error = VBP_MEM;
+            goto cleanup;
+        }
+    }
+    else
+    {
+        /* OK for VC-1, MPEG2 and MPEG4. */
+        if ((VBP_VC1 == pcontext->parser_type) ||
+            (VBP_MPEG2 == pcontext->parser_type) ||
+            (VBP_MPEG4 == pcontext->parser_type)
+#ifdef USE_HW_VP8
+            || (VBP_VP8 == pcontext->parser_type)
+#endif
+)
+        {
+            pcontext->persist_mem = NULL;
+        }
+        else
+        {
+            /* mandatory for H.264 */
+            ETRACE("Failed to allocate memory");
+            error =  VBP_TYPE;
+            goto cleanup;
+        }
+    }
+
+    /* allocate a new workload with 1000 items. */
+    pcontext->workload1 = malloc(sizeof(viddec_workload_t) +
+                                       (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+    if (NULL == pcontext->workload1)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* allocate a second workload with 1000 items. */
+    pcontext->workload2 = malloc(sizeof(viddec_workload_t) +
+                                       (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+    if (NULL == pcontext->workload2)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* allocate format-specific query data */
+    error = pcontext->func_allocate_query_data(pcontext);
+
+cleanup:
+    if (error != VBP_OK)
+    {
+        vbp_utils_free_parser_memory(pcontext);
+    }
+    return error;
+}
+
+
+
+/**
+ *
+ * parse the elementary sample buffer or codec configuration data
+ *
+ */
+static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    uint32 error = VBP_OK;
+    int i;
+
+    /* reset list number. func_parse_init_data or func_parse_start_code will
+    * set it equal to number of sequence headers, picture headers or slices headers
+    * found in the sample buffer
+    */
+    cxt->list.num_items = 0;
+
+    /**
+    * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1
+    * for H.264 and MPEG-4, VC1 advanced profile and set to 0
+    * for VC1 simple or main profile when parsing the frame
+    * buffer. When parsing the sequence header, it must be set to 1
+    * always.
+    *
+    * PARSER IMPLEMENTOR: set this flag in the parser.
+    */
+
+    /*
+    if ((codec_type == VBP_H264)  || (codec_type == VBP_MPEG4))
+    {
+    	cxt->getbits.is_emul_reqd = 1;
+    }
+    */
+
+
+    /* populate the list.*/
+    if (init_data_flag)
+    {
+        error = pcontext->func_parse_init_data(pcontext);
+    }
+    else
+    {
+        error = pcontext->func_parse_start_code(pcontext);
+    }
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to parse the start code!");
+        return error;
+    }
+
+    /* set up bitstream buffer */
+    cxt->getbits.list = &(cxt->list);
+
+    /* setup buffer pointer */
+    cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf;
+
+    // TODO: check if cxt->getbits.is_emul_reqd is set properly
+
+    for (i = 0; i < cxt->list.num_items; i++)
+    {
+        /* setup bitstream parser */
+        cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos;
+        cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos;
+        cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos;
+
+        /* It is possible to end up with buf_offset not equal zero. */
+        cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+        cxt->getbits.au_pos = 0;
+        cxt->getbits.list_off = 0;
+        cxt->getbits.phase = 0;
+        cxt->getbits.emulation_byte_counter = 0;
+
+        cxt->list.start_offset = cxt->list.data[i].stpos;
+        cxt->list.end_offset = cxt->list.data[i].edpos;
+        cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos;
+
+        /* invoke parse entry point to parse the buffer */
+        error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+
+        /* can't return error for now. Neet further investigation */
+#if 0
+        if (0 != error)
+        {
+            ETRACE("failed to parse the syntax: %d!", error);
+            return error;
+        }
+#endif
+
+        /* process parsing result */
+        error = pcontext->func_process_parsing_result(pcontext, i);
+
+        if (VBP_MULTI == error) {
+            return VBP_OK;
+        }
+        else if (0 != error)
+        {
+            ETRACE("Failed to process parsing result.");
+            return error;
+        }
+    }
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * create the parser context
+ *
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext)
+{
+    uint32 error = VBP_OK;
+    vbp_context *pcontext = NULL;
+
+    /* prevention from the failure */
+    *ppcontext =  NULL;
+
+    pcontext = vbp_malloc_set0(vbp_context, 1);
+    if (NULL == pcontext)
+    {
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    pcontext->parser_type = parser_type;
+
+    /* load parser, initialize parser operators and entry points */
+    error = vbp_utils_initialize_context(pcontext);
+    if (VBP_OK != error)
+    {
+        goto cleanup;
+    }
+
+    /* allocate parser context, persistent memory, query data and workload */
+    error = vbp_utils_allocate_parser_memory(pcontext);
+    if (VBP_OK != error)
+    {
+        goto cleanup;
+    }
+
+    viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0);
+    pcontext->parser_cxt->cur_buf.list_index = -1;
+    pcontext->parser_cxt->parse_cubby.phase = 0;
+
+    /* invoke the entry point to initialize the parser. */
+    pcontext->parser_ops->init(
+        (uint32_t *)pcontext->parser_cxt->codec_data,
+        (uint32_t *)pcontext->persist_mem,
+        FALSE);
+
+    /* set up to find the first start code. */
+    pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1;
+
+    /* indicates initialized OK. */
+    pcontext->identifier = MAGIC_NUMBER;
+    *ppcontext = pcontext;
+    error = VBP_OK;
+
+cleanup:
+
+    if (VBP_OK != error)
+    {
+        vbp_utils_free_parser_memory(pcontext);
+        vbp_utils_uninitialize_context(pcontext);
+        free(pcontext);
+        pcontext = NULL;
+    }
+
+    return error;
+}
+
+/**
+ *
+ * destroy the context.
+ *
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext)
+{
+    /* entry point, not need to validate input parameters. */
+    vbp_utils_free_parser_memory(pcontext);
+    vbp_utils_uninitialize_context(pcontext);
+    free(pcontext);
+    pcontext = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * parse the sample buffer or parser configuration data.
+ *
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size,  uint8 init_data_flag)
+{
+    /* entry point, not need to validate input parameters. */
+
+    uint32 error = VBP_OK;
+
+    //ITRACE("buffer counter: %d",buffer_counter);
+
+    /* reset bit offset */
+    pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+
+    /* set up cubby. */
+    pcontext->parser_cxt->parse_cubby.buf = data;
+    pcontext->parser_cxt->parse_cubby.size = size;
+    pcontext->parser_cxt->parse_cubby.phase = 0;
+
+    error = vbp_utils_parse_es_buffer(pcontext, init_data_flag);
+
+    /* rolling count of buffers. */
+    if (0 == init_data_flag)
+    {
+        buffer_counter++;
+    }
+    return error;
+}
+
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data)
+{
+    /* entry point, not need to validate input parameters. */
+    uint32 error = VBP_OK;
+
+    error = pcontext->func_populate_query_data(pcontext);
+    if (VBP_OK == error)
+    {
+        *data = pcontext->query_data;
+    }
+    else
+    {
+        *data = NULL;
+    }
+    return error;
+}
+
+/**
+ *
+ * flush parsing buffer. Currently always succeed.
+ *
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    if (ops->flush != NULL) {
+        ops->flush((void *)cxt, (void *)&(cxt->codec_data[0]));
+    }
+    return VBP_OK;
+}
+
+
+#ifdef USE_AVC_SHORT_FORMAT
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_update(vbp_context *pcontext, void *newdata, uint32 size, void **data)
+{
+    /* entry point, not need to validate input parameters. */
+    uint32 error = VBP_OK;
+
+    error = pcontext->func_update_data(pcontext,newdata,size);
+
+    if (VBP_OK == error)
+    {
+        *data = pcontext->query_data;
+    }
+    else
+    {
+        *data = NULL;
+    }
+    return error;
+}
+#endif
diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h
new file mode 100755
index 0000000..7761c26
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_utils.h
@@ -0,0 +1,140 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_UTILS_H
+#define VBP_UTILS_H
+
+#include "viddec_parser_ops.h"
+#include "viddec_pm_parse.h"
+#include "viddec_pm.h"
+#include "vbp_trace.h"
+#include <stdlib.h>
+
+#define MAGIC_NUMBER 0x0DEADBEEF
+#define MAX_WORKLOAD_ITEMS 1000
+
+/* maximum 256 slices per sample buffer */
+#define MAX_NUM_SLICES 256
+
+/* maximum two pictures per sample buffer */
+#define MAX_NUM_PICTURES 2
+
+
+#define vbp_malloc(struct_type, n_structs) \
+    ((struct_type *) malloc(sizeof(struct_type) * n_structs))
+
+#define vbp_malloc_set0(struct_type, n_structs) \
+    ((struct_type *) vbp_try_malloc0(sizeof(struct_type) * n_structs))
+
+
+
+extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+
+/* rolling counter of sample buffer */
+extern uint32 buffer_counter;
+
+typedef struct vbp_context_t vbp_context;
+
+typedef uint32 (*function_init_parser_entries)(vbp_context* cxt);
+typedef uint32 (*function_allocate_query_data)(vbp_context* cxt);
+typedef uint32 (*function_free_query_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_init_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_start_code)(vbp_context* cxt);
+typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i);
+typedef uint32 (*function_populate_query_data)(vbp_context* cxt);
+#ifdef USE_AVC_SHORT_FORMAT
+typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size);
+#endif
+
+struct vbp_context_t
+{
+    /* magic number */
+    uint32 identifier;
+
+    /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */
+    uint32 parser_type;
+
+    /* handle to parser (shared object) */
+    void *fd_parser;
+
+    /* parser (shared object) entry points */
+    viddec_parser_ops_t *parser_ops;
+
+    /* parser context */
+    viddec_pm_cxt_t *parser_cxt;
+
+    /* work load */
+    viddec_workload_t *workload1, *workload2;
+
+    /* persistent memory for parser */
+    uint32 *persist_mem;
+
+    /* format specific query data */
+    void *query_data;
+
+    /* parser type specific data*/
+    void *parser_private;
+
+    function_init_parser_entries func_init_parser_entries;
+    function_allocate_query_data func_allocate_query_data;
+    function_free_query_data func_free_query_data;
+    function_parse_init_data func_parse_init_data;
+    function_parse_start_code func_parse_start_code;
+    function_process_parsing_result func_process_parsing_result;
+    function_populate_query_data func_populate_query_data;
+#ifdef USE_AVC_SHORT_FORMAT
+    function_update_data func_update_data;
+#endif
+};
+
+
+void* vbp_try_malloc0(uint32 size);
+
+/**
+ * create VBP context
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext);
+
+/*
+ * destroy VBP context
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext);
+
+/*
+ * parse bitstream
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data);
+
+/*
+ * flush un-parsed bitstream
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext);
+
+#endif /* VBP_UTILS_H */
diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c
new file mode 100755
index 0000000..4a8d6d3
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vc1_parser.c
@@ -0,0 +1,1126 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include <dlfcn.h>
+#include <string.h>
+
+#include "vc1.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+
+/* maximum number of Macroblock divided by 2, see va.h */
+#define MAX_BITPLANE_SIZE 16384
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define PREFIX_SIZE 3
+
+static uint32 b_fraction_table[][9] = {
+    /* num       0  1  2  3  4  5   6   7   8   den */
+    /* 0 */    { 0, 0, 0, 0, 0, 0,  0,  0,  0 },
+    /* 1 */    { 0, 0, 0, 1, 3, 5,  9, 11, 17 },
+    /* 2 */    { 0, 0, 0, 2, 0, 6,  0, 12,  0 },
+    /* 3 */    { 0, 0, 0, 0, 4, 7,  0, 13, 18 },
+    /* 4 */    { 0, 0, 0, 0, 0, 8,  0, 14,  0 },
+    /* 5 */    { 0, 0, 0, 0, 0, 0, 10, 15, 19 },
+    /* 6 */    { 0, 0, 0, 0, 0, 0,  0, 16,  0 },
+    /* 7 */    { 0, 0, 0, 0, 0, 0,  0,  0, 20 }
+};
+
+
+static uint8 vc1_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ * set parser entry points
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        /* impossible, just sanity check */
+        return VBP_PARM;
+    }
+
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done");
+    if (NULL == pcontext->parser_ops->is_wkld_done)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame");
+    if (NULL == pcontext->parser_ops->is_frame_start)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+/**
+ * allocate query data structure
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        /* impossible, just sanity check */
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+
+    vbp_data_vc1 *query_data = NULL;
+    query_data = vbp_malloc_set0(vbp_data_vc1, 1);
+    if (NULL == query_data)
+    {
+        return VBP_MEM;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->se_data = vbp_malloc_set0(vbp_codec_data_vc1, 1);
+    if (NULL == query_data->se_data)
+    {
+        goto cleanup;
+    }
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vc1, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVC1, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+
+        query_data->pic_data[i].packed_bitplanes = vbp_try_malloc0(MAX_BITPLANE_SIZE);
+        if (NULL == query_data->pic_data[i].packed_bitplanes)
+        {
+            goto cleanup;
+        }
+
+        query_data->pic_data[i].slc_data = vbp_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1));
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_vc1(pcontext);
+
+    return VBP_MEM;
+}
+
+
+/**
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext)
+{
+    vbp_data_vc1 *query_data = NULL;
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        int i = 0;
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].packed_bitplanes);
+            free(query_data->pic_data[i].pic_parms);
+        }
+    }
+
+    free(query_data->pic_data);
+
+    free(query_data->se_data);
+
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ * We want to create a list of buffer segments where each segment is a start
+ * code followed by all the data up to the next start code or to the end of
+ * the buffer.  In VC-1, it is common to get buffers with no start codes.  The
+ * parser proper, doesn't really handle the situation where there are no SCs.
+ * In this case, I will bypass the stripping of the SC code and assume a frame.
+ */
+static uint32 vbp_parse_start_code_helper_vc1(
+    viddec_pm_cxt_t *cxt,
+    viddec_parser_ops_t *ops,
+    int init_data_flag)
+{
+    uint32_t ret = VBP_OK;
+    viddec_sc_parse_cubby_cxt_t cubby;
+
+    /* make copy of cubby */
+    /* this doesn't copy the buffer, merely the structure that holds the buffer */
+    /* pointer.  Below, where we call parse_sc() the code starts the search for */
+    /* SCs at the beginning of the buffer pointed to by the cubby, so in our */
+    /* cubby copy we increment the pointer as we move through the buffer.  If */
+    /* you think of each start code followed either by another start code or the */
+    /* end of the buffer, then parse_sc() is returning information relative to */
+    /* current segment. */
+
+    cubby = cxt->parse_cubby;
+
+    cxt->list.num_items = 0;
+    cxt->list.data[0].stpos = 0;
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* codec initialization data is always start code prefixed. (may not start at position 0)
+     * sample buffer for AP has three start code patterns here:
+     * pattern 0: no start code at all, the whole buffer is a single segment item
+     * pattern 1: start codes for all segment items
+     * pattern 2: no start code for the first segment item, start codes for the rest segment items
+     */
+
+    bool is_pattern_two = FALSE;
+
+    unsigned char start_code = 0;
+
+    while (1)
+    {
+        /* parse the created buffer for sc */
+        ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+        if (ret == 1)
+        {
+            cubby.phase = 0;
+            start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos);
+#if 1
+            if (0 == init_data_flag &&
+                    PREFIX_SIZE != cubby.sc_end_pos &&
+                    0 == cxt->list.num_items)
+            {
+                /* buffer does not have start code at the beginning */
+                vc1_viddec_parser_t *parser = NULL;
+                vc1_metadata_t *seqLayerHeader = NULL;
+
+                parser = (vc1_viddec_parser_t *)cxt->codec_data;
+                seqLayerHeader = &(parser->info.metadata);
+                if (1 == seqLayerHeader->INTERLACE)
+                {
+                    /* this is a hack for interlaced field coding */
+                    /* handle field interlace coding. One sample contains two fields, where:
+                     * the first field does not have start code prefix,
+                     * the second field has start code prefix.
+                     */
+                    cxt->list.num_items = 1;
+                    cxt->list.data[0].stpos = 0;
+                    is_pattern_two = TRUE;
+                }
+            }
+#endif
+            if (cxt->list.num_items == 0)  /* found first SC. */
+            {
+                /* sc_end_pos gets us to the SC type.  We need to back up to the first zero */
+                cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE;
+            }
+            else
+            {
+                /* First we set the end position of the last segment. */
+                /* Since the SC parser searches from SC type to SC type and the */
+                /* sc_end_pos is relative to this segment only, we merely add */
+                /* sc_end_pos to the start to find the end. */
+                cxt->list.data[cxt->list.num_items - 1].edpos =
+                    cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+
+                /* Then we set the start position of the current segment. */
+                /* So I need to subtract 1 ??? */
+                cxt->list.data[cxt->list.num_items].stpos =
+                    cxt->list.data[cxt->list.num_items - 1].edpos;
+
+                if (is_pattern_two)
+                {
+                    cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE;
+                    /* restore to normal pattern */
+                    is_pattern_two = FALSE;
+                }
+            }
+            /* We need to set up the cubby buffer for the next time through parse_sc(). */
+            /* But even though we want the list to contain a segment as described */
+            /* above, we want the cubby buffer to start just past the prefix, or it will */
+            /* find the same SC again.  So I bump the cubby buffer past the prefix. */
+            cubby.buf = /*cubby.buf +*/
+                cxt->parse_cubby.buf +
+                cxt->list.data[cxt->list.num_items].stpos +
+                PREFIX_SIZE;
+
+            cubby.size = cxt->parse_cubby.size -
+                         cxt->list.data[cxt->list.num_items].stpos -
+                         PREFIX_SIZE;
+
+            if (start_code >= 0x0A && start_code <= 0x0F)
+            {
+                /* only put known start code to the list
+                 * 0x0A: end of sequence
+                 * 0x0B: slice header
+                 * 0x0C: frame header
+                 * 0x0D: field header
+                 * 0x0E: entry point header
+                 * 0x0F: sequence header
+                 */
+                cxt->list.num_items++;
+            }
+            else
+            {
+                ITRACE("skipping unknown start code :%d", start_code);
+            }
+
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                WTRACE("Num items exceeds the limit!");
+                /* not fatal, just stop parsing */
+                break;
+            }
+        }
+        else
+        {
+            /* we get here, if we reach the end of the buffer while looking or a SC. */
+            /* If we never found a SC, then num_items will never get incremented. */
+            if (cxt->list.num_items == 0)
+            {
+                /* If we don't find a SC we probably still have a frame of data. */
+                /* So let's bump the num_items or else later we will not parse the */
+                /* frame.   */
+                cxt->list.num_items = 1;
+            }
+            /* now we can set the end position of the last segment. */
+            cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+            break;
+        }
+    }
+    return VBP_OK;
+}
+
+/*
+* parse initialization data (decoder configuration data)
+* for VC1 advanced profile, data is sequence header and
+* entry pointer header.
+* for VC1 main/simple profile, data format
+* is defined in VC1 spec: Annex J, (Decoder initialization metadata
+* structure 1 and structure 3
+*/
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext)
+{
+    /**
+    * init data (aka decoder configuration data) must
+    * be start-code prefixed
+    */
+
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    return vbp_parse_start_code_helper_vc1(cxt, ops, 1);
+}
+
+
+
+/**
+* Parse start codes, VC1 main/simple profile does not have start code;
+* VC1 advanced may not have start code either.
+*/
+uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+
+    vc1_viddec_parser_t *parser = NULL;
+    vc1_metadata_t *seqLayerHeader = NULL;
+
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data;
+
+    /* Reset query data for the new sample buffer */
+    int i = 0;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->num_pictures = 0;
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].picture_is_skipped = 0;
+    }
+
+    parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    seqLayerHeader = &(parser->info.metadata);
+
+
+    /* WMV codec data will have a start code, but the WMV picture data won't. */
+    if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE)
+    {
+        return vbp_parse_start_code_helper_vc1(cxt, ops, 0);
+    }
+    else
+    {
+        /* WMV: vc1 simple or main profile. No start code present. */
+
+        /* must set is_emul_reqd to 0! */
+        cxt->getbits.is_emul_reqd = 0;
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit)
+{
+    uint8 value;
+
+    value = (data[*current_word] >> *current_bit) & 1;
+
+    /* Fix up bit/byte offsets.  endianess?? */
+    if (*current_bit < 31)
+    {
+        ++(*current_bit);
+    }
+    else
+    {
+        ++(*current_word);
+        *current_bit = 0;
+    }
+
+    return value;
+}
+
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplane_vc1(
+    uint32 *from_plane,
+    uint8 *to_plane,
+    uint32 width,
+    uint32 height,
+    uint32 nibble_shift)
+{
+    uint32 error = VBP_OK;
+    uint32 current_word = 0;
+    uint32 current_bit = 0;  /* must agree with number in vbp_get_bit_vc1 */
+    uint32 i, j, n;
+    uint8 value;
+    uint32 stride = 0;
+
+    stride = 32 * ((width + 31) / 32);
+
+    for (i = 0, n = 0; i < height; i++)
+    {
+        for (j = 0; j < stride; j++)
+        {
+            if (j < width)
+            {
+                value = vbp_get_bit_vc1(
+                            from_plane,
+                            &current_word,
+                            &current_bit);
+
+                to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4));
+                n++;
+            }
+            else
+            {
+                break;
+            }
+        }
+        if (stride > width)
+        {
+            current_word++;
+            current_bit = 0;
+        }
+    }
+
+    return error;
+}
+
+
+/**
+ *
+ */
+static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator)
+{
+    uint32 b_fraction = 0;
+
+    if ((numerator < 8) && (denominator < 9))
+    {
+        b_fraction = b_fraction_table[numerator][denominator];
+    }
+
+    return b_fraction;
+}
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplanes_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    uint32 error = VBP_OK;
+    if (0 == pic_data->pic_parms->bitplane_present.value)
+    {
+        /* return if bitplane is not present */
+        pic_data->size_bitplanes = 0;
+        memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE);
+        return error;
+    }
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+    vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+    /* set bit plane size */
+    pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2;
+
+
+    memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes);
+
+    /* see libva library va.h for nibble bit */
+    switch (picLayerHeader->PTYPE)
+    {
+    case VC1_I_FRAME:
+    case VC1_BI_FRAME:
+        if (picLayerHeader->OVERFLAGS.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->OVERFLAGS.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->ACPRED.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->ACPRED.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->FIELDTX.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->FIELDTX.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->MVTYPEMB.imode ||
+                picLayerHeader->DIRECTMB.imode ||
+                picLayerHeader->SKIPMB.imode ||
+                picLayerHeader->FORWARDMB.imode)
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+
+    case VC1_P_FRAME:
+        if (picLayerHeader->MVTYPEMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->MVTYPEMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->SKIPMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->SKIPMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->DIRECTMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->DIRECTMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->FIELDTX.imode ||
+                picLayerHeader->FORWARDMB.imode ||
+                picLayerHeader->ACPRED.imode ||
+                picLayerHeader->OVERFLAGS.imode )
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+
+    case VC1_B_FRAME:
+        if (picLayerHeader->FORWARDMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->FORWARDMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->SKIPMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->SKIPMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->DIRECTMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->DIRECTMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->MVTYPEMB.imode ||
+                picLayerHeader->FIELDTX.imode ||
+                picLayerHeader->ACPRED.imode ||
+                picLayerHeader->OVERFLAGS.imode)
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+    }
+    return error;
+}
+
+
+/**
+ * fill the query data structure after sequence header, entry point header
+ * or a complete frame is parsed.
+ * NOTE: currently partial frame is not handled properly
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    /* first we get the SH/EP data.  Can we cut down on this? */
+    vbp_codec_data_vc1 *se_data = query_data->se_data;
+
+
+    uint32_t curHrdNum = seqLayerHeader->HRD_NUM_LEAKY_BUCKETS;
+
+    se_data->bit_rate = curHrdNum ?
+                        seqLayerHeader->hrd_initial_state.sLeakyBucket[curHrdNum -1].HRD_RATE :
+                        seqLayerHeader->hrd_initial_state.sLeakyBucket[0].HRD_RATE;
+
+    se_data->PROFILE = seqLayerHeader->PROFILE;
+    se_data->LEVEL = seqLayerHeader->LEVEL;
+    se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG;
+    se_data->PULLDOWN = seqLayerHeader->PULLDOWN;
+    se_data->INTERLACE = seqLayerHeader->INTERLACE;
+    se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG;
+    se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG;
+    se_data->PSF = seqLayerHeader->PSF;
+
+    // color matrix
+    if (seqLayerHeader->COLOR_FORMAT_FLAG)
+    {
+        se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF;
+    }
+    else
+    {
+        //ITU-R BT. 601-5.
+        se_data->MATRIX_COEF = 6;
+    }
+
+    // aspect ratio
+    if (seqLayerHeader->ASPECT_RATIO_FLAG == 1)
+    {
+        se_data->ASPECT_RATIO = seqLayerHeader->ASPECT_RATIO;
+        if (se_data->ASPECT_RATIO < 14)
+        {
+            se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0];
+            se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1];
+        }
+        else if (se_data->ASPECT_RATIO == 15)
+        {
+            se_data->ASPECT_HORIZ_SIZE = seqLayerHeader->ASPECT_HORIZ_SIZE;
+            se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE;
+        }
+        else  // se_data->ASPECT_RATIO == 14
+        {
+            se_data->ASPECT_HORIZ_SIZE = 0;
+            se_data->ASPECT_VERT_SIZE = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        se_data->ASPECT_RATIO = 0;
+        se_data->ASPECT_HORIZ_SIZE = 0;
+        se_data->ASPECT_VERT_SIZE = 0;
+    }
+
+    se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK;
+    se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY;
+    se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG;
+    se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG;
+    se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER;
+    se_data->FASTUVMC = seqLayerHeader->FASTUVMC;
+    se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV;
+    se_data->DQUANT = seqLayerHeader->DQUANT;
+    se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM;
+    se_data->OVERLAP = seqLayerHeader->OVERLAP;
+    se_data->QUANTIZER = seqLayerHeader->QUANTIZER;
+    se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1;
+    se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1;
+    se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV;
+    se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG;
+    se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY;
+    se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG;
+    se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV;
+    se_data->RANGERED = seqLayerHeader->RANGERED;
+    se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES;
+    se_data->MULTIRES = seqLayerHeader->MULTIRES;
+    se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER;
+    se_data->RNDCTRL = seqLayerHeader->RNDCTRL;
+    se_data->REFDIST = seqLayerHeader->REFDIST;
+    se_data->widthMB = seqLayerHeader->widthMB;
+    se_data->heightMB = seqLayerHeader->heightMB;
+    se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD;
+    se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2;
+    se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2;
+
+    /* update buffer number */
+    query_data->buf_number = buffer_counter;
+
+    if (query_data->num_pictures > 2)
+    {
+        WTRACE("sampe buffer contains %d pictures", query_data->num_pictures);
+    }
+    return error;
+}
+
+
+
+static void vbp_pack_picture_params_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+    vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+    VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms;
+
+    /* Then we get the picture header data.  Picture type need translation. */
+    pic_parms->forward_reference_picture = VA_INVALID_SURFACE;
+    pic_parms->backward_reference_picture = VA_INVALID_SURFACE;
+    pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE;
+
+    pic_parms->sequence_fields.value = 0;
+    pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN;
+    pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE;
+    pic_parms->sequence_fields.bits.tfcntrflag =  seqLayerHeader->TFCNTRFLAG;
+    pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG;
+    pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF;
+    pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES;
+    pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
+    pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER;
+    pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED;
+    pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES;
+
+    pic_parms->coded_width = (seqLayerHeader->width + 1) << 1;
+    pic_parms->coded_height = (seqLayerHeader->height + 1) << 1;
+
+    pic_parms->entrypoint_fields.value = 0;
+    pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY;
+    pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK;
+    pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER;
+    pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG;
+
+    pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER;
+    pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC;
+
+    pic_parms->range_mapping_fields.value = 0;
+    pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG;
+    pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY;
+    pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG;
+    pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV;
+
+    pic_parms->b_picture_fraction =
+        vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN);
+
+    pic_parms->cbp_table = picLayerHeader->CBPTAB;
+    pic_parms->mb_mode_table = picLayerHeader->MBMODETAB;
+    pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM;
+    pic_parms->rounding_control = picLayerHeader->RNDCTRL;
+    pic_parms->post_processing = picLayerHeader->POSTPROC;
+    /* fix this.  Add RESPIC to parser.  */
+    pic_parms->picture_resolution_index = 0;
+    pic_parms->luma_scale = picLayerHeader->LUMSCALE;
+    pic_parms->luma_shift = picLayerHeader->LUMSHIFT;
+
+    pic_parms->picture_fields.value = 0;
+    switch (picLayerHeader->PTYPE)
+    {
+    case VC1_I_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I;
+        break;
+
+    case VC1_P_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P;
+        break;
+
+    case VC1_B_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B;
+        break;
+
+    case VC1_BI_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI;
+        break;
+
+    case VC1_SKIPPED_FRAME:
+        pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED;
+        break;
+
+    default:
+        // TODO: handle this case
+        break;
+    }
+    pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM;
+    if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE)
+    {
+        /* simple or main profile, top field flag is not present, default to 1.*/
+        pic_parms->picture_fields.bits.top_field_first = 1;
+    }
+    else
+    {
+        pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF;
+    }
+
+    pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField);
+    /* This seems to be set based on the MVMODE and MVMODE2 syntax. */
+    /* This is a hack.  Probably will need refining. */
+    if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) ||
+            (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2))
+    {
+        pic_parms->picture_fields.bits.intensity_compensation = 1;
+    }
+    else
+    {
+        pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP;
+    }
+
+    /* Lets store the raw-mode BP bits. */
+    pic_parms->raw_coding.value = 0;
+    pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB;
+    pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB;
+    pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB;
+    pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX;
+    pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB;
+    pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED;
+    pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS;
+
+    /* imode 1/0 indicates bitmap presence in Pic Hdr. */
+    pic_parms->bitplane_present.value = 0;
+
+    pic_parms->bitplane_present.flags.bp_mv_type_mb =
+        pic_parms->raw_coding.flags.mv_type_mb ? 1 :
+        (picLayerHeader->MVTYPEMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_direct_mb =
+        pic_parms->raw_coding.flags.direct_mb ? 1 :
+        (picLayerHeader->DIRECTMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_skip_mb =
+        pic_parms->raw_coding.flags.skip_mb ? 1 :
+        (picLayerHeader->SKIPMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_field_tx =
+        pic_parms->raw_coding.flags.field_tx ? 1 :
+        (picLayerHeader->FIELDTX.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_forward_mb =
+        pic_parms->raw_coding.flags.forward_mb ? 1 :
+        (picLayerHeader->FORWARDMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_ac_pred =
+        pic_parms->raw_coding.flags.ac_pred ? 1 :
+        (picLayerHeader->ACPRED.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_overflags =
+        pic_parms->raw_coding.flags.overflags ? 1 :
+        (picLayerHeader->OVERFLAGS.imode ? 1: 0);
+
+    pic_parms->reference_fields.value = 0;
+    pic_parms->reference_fields.bits.reference_distance_flag =
+        seqLayerHeader->REFDIST_FLAG;
+
+    pic_parms->reference_fields.bits.reference_distance =
+        seqLayerHeader->REFDIST;
+
+    pic_parms->reference_fields.bits.num_reference_pictures =
+        picLayerHeader->NUMREF;
+
+    pic_parms->reference_fields.bits.reference_field_pic_indicator =
+        picLayerHeader->REFFIELD;
+
+    pic_parms->mv_fields.value = 0;
+    pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE;
+    pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2;
+
+    pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB;
+    pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB;
+    pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH;
+    pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB;
+    pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV;
+    pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE;
+    pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV;
+    pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE;
+
+    pic_parms->pic_quantizer_fields.value = 0;
+    pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT;
+    pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER;
+    pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP;
+    pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT;
+    pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant;
+    pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM;
+    pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE;
+    pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE;
+    pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE;
+    pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL;
+    pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT;
+
+    pic_parms->transform_fields.value = 0;
+    pic_parms->transform_fields.bits.variable_sized_transform_flag =
+        seqLayerHeader->VSTRANSFORM;
+
+    pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF;
+    pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM;
+
+    pic_parms->transform_fields.bits.transform_ac_codingset_idx1 =
+        (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0;
+
+    pic_parms->transform_fields.bits.transform_ac_codingset_idx2 =
+        (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0;
+
+    pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB;
+    pic_parms->sequence_fields.bits.profile = seqLayerHeader->PROFILE;
+}
+
+
+static void vbp_pack_slice_data_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos;
+    uint32 bit;
+    uint32 byte;
+    uint8 is_emul;
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms);
+
+    /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/
+
+    slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos;
+    slc_data->slice_size = slice_size;
+    slc_data->slice_offset = 0;
+
+    slc_parms->slice_data_size = slc_data->slice_size;
+    slc_parms->slice_data_offset = 0;
+
+    /* fix this.  we need to be able to handle partial slices. */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    slc_parms->macroblock_offset = bit + byte * 8;
+
+    /* fix this.  we need o get the slice_vertical_position from the code */
+    slc_parms->slice_vertical_position = pic_data->num_slices;
+
+    pic_data->num_slices++;
+}
+
+/**
+ * process parsing result
+ */
+uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 error = VBP_OK;
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    if (parser->start_code != VC1_SC_FRM && 
+        parser->start_code != VC1_SC_FLD &&
+        parser->start_code != VC1_SC_SLC)
+    {
+        /* only handle frame data, field data and slice data here
+         */
+        return VBP_OK;
+    }
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+    {
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        ETRACE("Unexpected num of pictures.");
+        return VBP_DATA;
+    }
+
+    /* start packing data */
+    int picture_index = query_data->num_pictures - 1;
+    vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]);
+
+    if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+    {
+        /* setup picture parameter first*/
+        vbp_pack_picture_params_vc1(pcontext, index, pic_data);
+
+        /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */
+        error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data);
+        if (VBP_OK != error)
+        {
+            ETRACE("Failed to pack bitplane.");
+            return error;
+        }
+
+    }
+
+    /* Always pack slice parameter. The first macroblock in the picture CANNOT
+     * be preceeded by a slice header, so we will have first slice parsed always.
+     *
+     */
+
+    if (pic_data->num_slices >= MAX_NUM_SLICES)
+    {
+        ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    /* set up slice parameter */
+    vbp_pack_slice_data_vc1(pcontext, index, pic_data);
+
+
+    return VBP_OK;
+}
diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.h b/mixvbp/vbp_manager/vbp_vc1_parser.h
new file mode 100755
index 0000000..aec7a56
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vc1_parser.h
@@ -0,0 +1,70 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef VBP_VC1_PARSER_H
+#define VBP_VC1_PARSER_H
+
+
+/*
+ * setup parser's entry pointer
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext);
+
+/*
+ * allocate query data structure - vbp_vc1_data
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream configuration data
+ */
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream start code and fill the viddec_input_buffer_t list.
+ * WMV has no start code so the whole buffer will be treated as a single frame.
+ * For VC1 progressive, if start code is not found, the whole buffer will be treated as a
+ * single frame as well.
+ * For VC1 interlace, the first field is not start code prefixed, but the second field
+ * is always start code prefixed.
+ */
+uint32 vbp_parse_start_code_vc1(vbp_context *pcontext);
+
+/*
+ * processe parsing result
+ */
+uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index);
+
+/*
+ * populate query data structure
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext);
+
+
+#endif /*VBP_VC1_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c
new file mode 100755
index 0000000..72dcfa9
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vp8_parser.c
@@ -0,0 +1,532 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include <dlfcn.h>
+
+#include "vp8.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vp8_parser.h"
+
+uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vp8_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = NULL;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vp8_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vp8_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_wkld_done = NULL;
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_frame_start = NULL;
+
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    vbp_data_vp8 *query_data = vbp_malloc_set0(vbp_data_vp8, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vp8, VP8_MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i = 0;
+    for (i = 0; i < VP8_MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVP8, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_vp8, VP8_MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_vp8, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    query_data->prob_data = vbp_malloc_set0(VAProbabilityDataBufferVP8, 1);
+    if (NULL == query_data->prob_data)
+    {
+        goto cleanup;
+    }
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferVP8, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_vp8(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_vp8(vbp_context *pcontext)
+{
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    vbp_data_vp8 *query_data = (vbp_data_vp8 *)pcontext->query_data;
+    if (query_data->pic_data)
+    {
+        int i = 0;
+        for (i = 0; i < VP8_MAX_NUM_PICTURES; i++)
+        {
+            if (query_data->pic_data[i].pic_parms)
+            {
+                free(query_data->pic_data[i].pic_parms);
+                query_data->pic_data[i].pic_parms = NULL;
+            }
+            if (query_data->pic_data[i].slc_data)
+            {
+                free(query_data->pic_data[i].slc_data);
+                query_data->pic_data[i].slc_data = NULL;
+            }
+        }
+        free(query_data->pic_data);
+        query_data->pic_data = NULL;
+    }
+
+    if (query_data->codec_data)
+    {
+        free(query_data->codec_data);
+        query_data->codec_data = NULL;
+    }
+
+    if (query_data->prob_data)
+    {
+        free(query_data->prob_data);
+        query_data->prob_data = NULL;
+    }
+
+    if (query_data->IQ_matrix_buf)
+    {
+        free(query_data->IQ_matrix_buf);
+        query_data->IQ_matrix_buf = NULL;
+    }
+
+    free(query_data);
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_vp8(vbp_context* pcontext)
+{
+    // could never be there
+    return VBP_OK;
+}
+
+uint32 vbp_parse_start_code_vp8(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint8 *buf = cxt->parse_cubby.buf;
+    uint32 length = cxt->parse_cubby.size;
+    if (length < 3)
+    {
+        return VBP_DATA;
+    }
+
+    // check whether it is a key frame
+    if ((length >= 10) && !(buf[0] & 0x01))
+    {
+        uint8 *c = buf + 3;
+
+        // check start code
+        if ((c[0] != 0x9d) || (c[1] != 0x01) || (c[2] != 0x2a))
+        {
+            return VBP_PARM;
+        }
+    }
+
+    // ugly behavior
+    cxt->list.num_items = 1;
+
+    vbp_data_vp8 *query_data = (vbp_data_vp8*)pcontext->query_data;
+    query_data->num_pictures = 0;
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_vp8( vbp_context *pcontext, int i)
+{
+    vp8_viddec_parser *parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data;
+    switch (parser->info.frame_tag.frame_type)
+    {
+    case KEY_FRAME:
+        //ITRACE("This is a key frame.");
+        parser->info.decoded_frame_number++;
+        break;
+    case INTER_FRAME:
+        //ITRACE("This is an inter frame.");
+        parser->info.decoded_frame_number++;
+        break;
+    case SKIPPED_FRAME:
+        WTRACE("This is skipped frame. We have done nothing.");
+        break;
+    default:
+        ETRACE("Unknown frame type %d", parser->info.frame_tag.frame_type);
+        break;
+    }
+
+    //ITRACE("Decoded frame ID = %d", parser->info.decoded_frame_number);
+
+    return VBP_OK;
+}
+
+static void vbp_add_quantization_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    VAIQMatrixBufferVP8 *IQ_buf = query_data->IQ_matrix_buf;
+
+    int i = 0;
+    if (pi->Segmentation.Enabled)
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta)
+            {
+                IQ_buf->quantization_index[i][0] = pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i];
+            }
+            else
+            {
+                int temp = pi->Quantization.Y1_AC + pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i];
+                IQ_buf->quantization_index[i][0] = (temp >= 0) ? ((temp <= MAX_QINDEX) ? temp : MAX_QINDEX) : 0;
+            }
+        }
+    }
+    else
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            IQ_buf->quantization_index[i][0] = pi->Quantization.Y1_AC;
+        }
+    }
+
+    for (i = 0; i < MAX_MB_SEGMENTS; i++)
+    {
+        IQ_buf->quantization_index[i][1] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y1_DC_Delta;
+        IQ_buf->quantization_index[i][2] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_DC_Delta;
+        IQ_buf->quantization_index[i][3] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_AC_Delta;
+        IQ_buf->quantization_index[i][4] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_DC_Delta;
+        IQ_buf->quantization_index[i][5] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_AC_Delta;
+    }
+}
+
+static void vbp_add_probs_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    FrameContextData *fc = &(parser->info.FrameContext);
+    VAProbabilityDataBufferVP8 *prob_data = query_data->prob_data;
+
+    /* DCT coefficients probability */
+    memcpy(prob_data->dct_coeff_probs, fc->DCT_Coefficients, 4*8*3*11*sizeof(uint8_t));
+}
+
+static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8* codec_data)
+{
+    vp8_Info *pi = &(parser->info);
+
+    codec_data->frame_type = pi->frame_tag.frame_type;
+    codec_data->version_num = pi->frame_tag.version;
+    codec_data->show_frame = pi->frame_tag.show_frame;
+
+    codec_data->frame_width = ((pi->width + 15) / 16) * 16;
+    codec_data->frame_height = ((pi->height + 15) / 16) * 16;
+
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = codec_data->frame_height - pi->height;
+    codec_data->crop_left = 0;
+    codec_data->crop_right = codec_data->frame_width - pi->width;
+
+    codec_data->refresh_alt_frame = pi->refresh_af;
+    codec_data->refresh_golden_frame = pi->refresh_gf;
+    codec_data->refresh_last_frame = pi->refresh_lf;
+
+    codec_data->golden_copied = pi->golden_copied;
+    codec_data->altref_copied = pi->altref_copied;
+}
+
+static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    query_data->num_pictures++;
+
+    if (query_data->num_pictures > 1)
+    {
+        ETRACE("Num of pictures (%d) per sample buffer exceeds the limit %d.", query_data->num_pictures, VP8_MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int i = 0;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_data_index]);
+    VAPictureParameterBufferVP8 *pic_parms = pic_data->pic_parms;
+
+    pic_parms->frame_width = pi->width;
+    pic_parms->frame_height = pi->height;
+
+    pic_parms->pic_fields.value = 0;
+    pic_parms->pic_fields.bits.key_frame = pi->frame_tag.frame_type;
+    pic_parms->pic_fields.bits.version = pi->frame_tag.version;
+
+    /* Segmentation */
+    pic_parms->pic_fields.bits.segmentation_enabled = pi->Segmentation.Enabled;
+    pic_parms->pic_fields.bits.update_mb_segmentation_map = pi->Segmentation.UpdateMap;
+    pic_parms->pic_fields.bits.update_segment_feature_data = pi->Segmentation.UpdateData;
+    memcpy(pic_parms->mb_segment_tree_probs, pi->Segmentation.TreeProbs, sizeof(unsigned char) * MB_FEATURE_TREE_PROBS);
+
+    /* Loop filter data */
+    pic_parms->pic_fields.bits.filter_type = pi->LoopFilter.Type;
+    pic_parms->pic_fields.bits.sharpness_level = pi->LoopFilter.Sharpness;
+    pic_parms->pic_fields.bits.loop_filter_adj_enable = pi->LoopFilter.DeltaEnabled;
+    pic_parms->pic_fields.bits.mode_ref_lf_delta_update = pi->LoopFilter.DeltaUpdate;
+
+    int baseline_filter_level[MAX_MB_SEGMENTS];
+    if (pi->Segmentation.Enabled)
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta)
+            {
+                baseline_filter_level[i] = pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i];
+            }
+            else
+            {
+                baseline_filter_level[i] = pi->LoopFilter.Level + pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i];
+                baseline_filter_level[i] = (baseline_filter_level[i] >= 0) ? ((baseline_filter_level[i] <= MAX_LOOP_FILTER) ? baseline_filter_level[i] : MAX_LOOP_FILTER) : 0;  /* Clamp to valid range */
+            }
+        }
+    }
+    else
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            baseline_filter_level[i] = pi->LoopFilter.Level;
+        }
+    }
+    for (i = 0; i < MAX_MB_SEGMENTS; i++)
+    {
+        pic_parms->loop_filter_level[i] = baseline_filter_level[i];
+    }
+    if ((pic_parms->pic_fields.bits.version == 0) || (pic_parms->pic_fields.bits.version == 1))
+    {
+        pic_parms->pic_fields.bits.loop_filter_disable = pic_parms->loop_filter_level[0] > 0 ? true : false;
+    }
+    memcpy(pic_parms->loop_filter_deltas_ref_frame, pi->LoopFilter.DeltasRef, sizeof(char) * MAX_REF_LF_DELTAS);
+    memcpy(pic_parms->loop_filter_deltas_mode, pi->LoopFilter.DeltasMode, sizeof(char) * MAX_MODE_LF_DELTAS);
+
+    pic_parms->pic_fields.bits.sign_bias_golden = pi->sign_bias_golden;
+    pic_parms->pic_fields.bits.sign_bias_alternate = pi->sign_bias_alternate;
+
+    pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip;
+    pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff;
+
+    pic_parms->prob_skip_false = pi->prob_skip_false;
+    pic_parms->prob_intra = pi->prob_intra;
+    pic_parms->prob_last = pi->prob_lf;
+    pic_parms->prob_gf = pi->prob_gf;
+
+    FrameContextData *fc = &(parser->info.FrameContext);
+    memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4);
+    memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3);
+    /* Motion vector context */
+    for (i = 0; i < 2; i++)
+    {
+        memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19);
+    }
+
+    /* Bool coder */
+    pic_parms->bool_coder_ctx.range = pi->bool_coder.range;
+    pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF;
+    pic_parms->bool_coder_ctx.count = pi->bool_coder.count;
+
+    //pic_parms->current_picture = VA_INVALID_SURFACE;
+    pic_parms->last_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->golden_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->alt_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->out_of_loop_frame = VA_INVALID_SURFACE; //Reserved for future use
+
+    /* specify the slice number */
+    pic_data->num_slices = 0;
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    uint32_t pic_index = query_data->num_pictures - 1;
+    uint32_t part_index = 0;
+    if (pic_index < 0)
+    {
+        ETRACE("Invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]);
+    vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+
+    slc_data->buffer_addr = pi->source;
+    slc_data->slice_offset = 0;
+    slc_data->slice_size = pi->source_sz;
+
+    VASliceParameterBufferVP8 *slc_parms = &(slc_data->slc_parms);
+    /* number of bytes in the slice data buffer for this slice */
+    slc_parms->slice_data_size = slc_data->slice_size;
+
+    /* the offset to the first byte of slice data */
+    slc_parms->slice_data_offset = 0;
+
+    /* see VA_SLICE_DATA_FLAG_XXX definitions */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the first bit of MB from the first byte of slice data */
+    slc_parms->macroblock_offset = pi->header_bits;
+
+    /* Token Partitions */
+    slc_parms->num_of_partitions = pi->partition_count;
+    slc_parms->partition_size[0] = pi->frame_tag.first_part_size;
+    for (part_index = 1; part_index < 9; part_index++)
+    {
+        slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1];
+    }
+
+    pic_data->num_slices++;
+    if (pic_data->num_slices > VP8_MAX_NUM_SLICES) {
+        ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+    return VBP_OK;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_vp8(vbp_context *pcontext)
+{
+    int32_t error = VBP_OK;
+
+    vbp_data_vp8 *query_data = NULL;
+    vp8_viddec_parser *parser = NULL;
+
+    parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_vp8 *)pcontext->query_data;
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* Populate picture data */
+    error = vbp_add_pic_data_vp8(parser, query_data);
+
+    /* Populate slice data */
+    if (error == VBP_OK)
+    {
+        error = vbp_add_slice_data_vp8(parser, query_data);
+        if (error != VBP_OK)
+            return error;
+    }
+
+    /* Populate codec data */
+    vbp_set_codec_data_vp8(parser, query_data->codec_data);
+
+    /* Populate probability table */
+    vbp_add_probs_data_vp8(parser, query_data);
+
+    /* Populate quantization */
+    vbp_add_quantization_data_vp8(parser, query_data);
+
+    return VBP_OK;
+}
diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.h b/mixvbp/vbp_manager/vbp_vp8_parser.h
new file mode 100755
index 0000000..3b6407e
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vp8_parser.h
@@ -0,0 +1,67 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_VP8_PARSER_H
+#define VBP_VP8_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_vp8(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_vp8(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_vp8(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_vp8(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_vp8(vbp_context *pcontext);
+
+
+
+#endif /*VBP_VP8_PARSER_H*/
diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c
new file mode 100755
index 0000000..b3f8d0b
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_parse_sc.c
@@ -0,0 +1,218 @@
+#include "viddec_pm_parse.h"
+
+#ifndef MFDBIGENDIAN
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+    uint8_t *ptr;
+    uint32_t data_left=0, phase = 0, ret = 0;
+    uint32_t single_byte_table[3][2] = {{1, 0}, {2, 0}, {2, 3}};
+    viddec_sc_parse_cubby_cxt_t *cxt;
+    /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+       Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+       if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+       we are looking for. Its incremented to 4 once we see a byte after this pattern */
+    cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+    data_left = cxt->size;
+    ptr = cxt->buf;
+    phase = cxt->phase;
+    cxt->sc_end_pos = -1;
+    pcxt=pcxt;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) && (phase < 3))
+    {
+        /* Check if we are 16 bytes aligned & phase=0 & more than 16 bytes left,
+           if thats the case we can check work at a time instead of byte */
+
+        if (((((uint32_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF))
+        {
+            // 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01 00      -- check 16 bytes at one time
+            // 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ??      -- if no 00 at byte position: 15,13,11,09,07,05,03,01
+            // it is impossible to have 0x010000 at these 16 bytes.
+            // so we cound drop 16 bytes one time (increase ptr, decrease data_left and keep phase = 0)
+            __asm__(
+            //Data input
+            "movl %1, %%ecx\n\t"                   //ptr-->ecx
+            "movl %0, %%eax\n\t"                   //data_left-->eax
+
+            //Main compare loop
+            "MATCH_8_ZERO:\n\t"
+            "pxor %%xmm0,%%xmm0\n\t"               //0 --> xmm0
+            "pcmpeqb (%%ecx),%%xmm0\n\t"           //uint128_data[ptr] eq xmm0 --> xmm0 , For each byte do calculation,  (byte == 0x00)?0xFF:0x00
+            "pmovmskb %%xmm0, %%edx\n\t"           //xmm0(128)-->edx(32), edx[0]=xmm0[7], edx[1]=xmm0[15], ... , edx[15]=xmm0[127], edx[31-16]=0x0000
+            "test $0xAAAA, %%edx\n\t"              //edx& 1010 1010 1010 1010b
+            "jnz DATA_RET\n\t"                     //Not equal to zero means that at least one byte is 0x00.
+
+            "PREPARE_NEXT_MATCH:\n\t"
+            "add $0x10, %%ecx\n\t"                 //16 + ecx --> ecx
+            "sub $0x10, %%eax\n\t"                 //eax-16 --> eax
+            "cmp $0x10, %%eax\n\t"                 //eax >= 16?
+            "jge MATCH_8_ZERO\n\t"                 //search next 16 bytes
+
+            "DATA_RET:\n\t"
+            "movl %%ecx, %1\n\t"                   //ecx --> ptr
+            "movl %%eax, %0\n\t"                   //eax --> data_left
+            : "+m"(data_left), "+m"(ptr)           //data_left --> eax, ptr -> ecx
+            :
+            :"eax", "ecx", "edx", "xmm0"
+            );
+
+            if (data_left <= 0)
+            {
+                 break;
+            }
+        }
+
+        //check byte one by one
+        //  (*ptr)    0       1      >=2
+        // phase=0    1       0      0
+        // phase=1    2       0      0
+        // phase=2    2       3      0
+        if (*ptr >= 2)
+        {
+            phase = 0;
+        }
+        else
+        {
+            phase = single_byte_table[phase][*ptr];
+        }
+        ptr ++;
+        data_left --;
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+        cxt->sc_end_pos = cxt->size - data_left;
+        state->next_sc = cxt->buf[cxt->sc_end_pos];
+        state->second_scprfx_length = 3;
+        phase++;
+        ret = 1;
+    }
+    cxt->phase = phase;
+    /* Return SC found only if phase is 4, else always success */
+    return ret;
+}
+
+#else
+#define FIRST_STARTCODE_BYTE        0x00
+#define SECOND_STARTCODE_BYTE       0x00
+#define THIRD_STARTCODE_BYTE        0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0               0x00ff0000  /* little-endian */
+#define SC_BYTE_MASK1               0x000000ff  /* little-endian */
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+   The conext is updated with current phase and sc_code position in the buffer.
+*/
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+    uint8_t *ptr;
+    uint32_t size;
+    uint32_t data_left=0, phase = 0, ret = 0;
+    viddec_sc_parse_cubby_cxt_t *cxt;
+    /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+       Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+       if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+       we are looking for. Its incremented to 4 once we see a byte after this pattern */
+    cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+    size = 0;
+    data_left = cxt->size;
+    ptr = cxt->buf;
+    phase = cxt->phase;
+    cxt->sc_end_pos = -1;
+    pcxt=pcxt;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) &&(phase < 3))
+    {
+        /* Check if we are byte aligned & phase=0, if thats the case we can check
+           work at a time instead of byte*/
+        if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+        {
+            while (data_left > 3)
+            {
+                uint32_t data;
+                char mask1 = 0, mask2=0;
+
+                data = *((uint32_t *)ptr);
+                mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+                mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+                /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+                   two consecutive zero bytes for a start code pattern */
+                if (mask1 && mask2)
+                {/* Success so skip 4 bytes and start over */
+                    ptr+=4;
+                    size+=4;
+                    data_left-=4;
+                    continue;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+           two zero bytes in the word so we look one byte at a time*/
+        if (data_left > 0)
+        {
+            if (*ptr == FIRST_STARTCODE_BYTE)
+            {/* Phase can be 3 only if third start code byte is found */
+                phase++;
+                ptr++;
+                size++;
+                data_left--;
+                if (phase > 2)
+                {
+                    phase = 2;
+
+                    if ( (((uint32_t)ptr) & 0x3) == 0 )
+                    {
+                        while ( data_left > 3 )
+                        {
+                            if (*((uint32_t *)ptr) != 0)
+                            {
+                                break;
+                            }
+                            ptr+=4;
+                            size+=4;
+                            data_left-=4;
+                        }
+                    }
+                }
+            }
+            else
+            {
+                if ((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2))
+                {/* Match for start code so update context with byte position */
+                    phase = 3;
+                    cxt->sc_end_pos = size;
+                }
+                else
+                {
+                    phase = 0;
+                }
+                ptr++;
+                size++;
+                data_left--;
+            }
+        }
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+        cxt->sc_end_pos++;
+        state->next_sc = cxt->buf[cxt->sc_end_pos];
+        state->second_scprfx_length = 3;
+        phase++;
+        ret = 1;
+    }
+    cxt->phase = phase;
+    /* Return SC found only if phase is 4, else always success */
+    return ret;
+}
+#endif
diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c
new file mode 100755
index 0000000..6879a6a
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c
@@ -0,0 +1,97 @@
+#include <stdint.h>
+#include <vbp_common.h>
+#include "viddec_pm.h"
+#include "viddec_parser_ops.h"
+#include "viddec_pm_utils_bstream.h"
+
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1);
+    if (ret == -1)
+    {
+        DEB("FAILURE!!!! getbits returned %d\n", ret);
+    }
+
+    return ret;
+}
+
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0);
+    return ret;
+}
+
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits);
+    return ret;
+}
+
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits));
+    viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul);
+
+    return ret;
+
+}
+
+int32_t viddec_pm_is_nomoredata(void *parent)
+{
+    int32_t ret=0;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits));
+    return ret;
+}
+
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte)
+{
+    int32_t ret=-1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte);
+    return ret;
+}
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error)
+{
+    viddec_pm_cxt_t *cxt;
+    cxt = (viddec_pm_cxt_t *)parent;
+    cxt->next_workload_error_eos = error;
+}
+
+void viddec_pm_set_late_frame_detect(void *parent)
+{
+    viddec_pm_cxt_t *cxt;
+    cxt = (viddec_pm_cxt_t *)parent;
+    cxt->late_frame_detect = true;
+}
+
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next)
+{
+    return 1;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi)
+{
+    wi=wi;
+}
diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c
new file mode 100755
index 0000000..6939fef
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c
@@ -0,0 +1,500 @@
+#include <stdint.h>
+#include <vbp_common.h>
+#include "viddec_pm_utils_bstream.h"
+
+/* Internal data structure for calculating required bits. */
+typedef union
+{
+    uint8_t byte[8];
+    uint32_t word[2];
+} viddec_pm_utils_getbits_t;
+
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt);
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+/* Bytes left in cubby buffer which were not consumed yet */
+static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt)
+{
+    return (cxt->buf_end - cxt->buf_index);
+}
+
+/*
+  This function checks to see if we are at the last valid byte for current access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    uint32_t data_remaining = 0;
+    uint8_t ret = 0;
+
+    /* How much data is remaining including current byte to be processed.*/
+    data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st));
+
+    /* Start code prefix can be 000001 or 0000001. We always only check for 000001.
+       data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte.
+       data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001.
+       NOTE: This is used for H264 only.
+    */
+    switch (data_remaining)
+    {
+    case 2:
+        /* If next byte is 0 and its the last byte in access unit */
+        ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0);
+        break;
+    case 1:
+        /* if the current byte is last byte */
+        ret = 1;
+        break;
+    default:
+        break;
+    }
+    return ret;
+}
+
+#ifndef VBP
+/*
+  This function returns true if cubby buffer has the last byte of access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    uint32_t last_byte_offset_plus_one=0;
+    uint8_t ret = 0;
+    /* Check to see if the last byte Acces unit offset is the last byte for current access unit.
+     End represents the first invalid byte, so (end - st) will give number of bytes.*/
+    last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st);
+    if ((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes)
+    {
+        ret = 1;
+    }
+    return ret;
+}
+#endif
+
+/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */
+static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt)
+{
+    cxt->st = cxt->size = cxt->bitoff=0;
+}
+#ifndef VBP
+/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if
+   we need to go to next es buffer
+*/
+static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset)
+{
+    uint32_t ret=0;
+    int32_t val=0;
+    val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes;
+    val = val - (int32_t)offset;
+    if (val > 0) ret = (uint32_t)val;
+    return val;
+}
+
+/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by
+ lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter
+ at returns index of ES buffer in list which has byte_offset
+*/
+static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt,
+        uint32_t *lst_index,
+        uint32_t byte_offset,
+        uint32_t *physaddr)
+{
+    viddec_pm_utils_list_t *list;
+    uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */
+
+    list = cxt->list;
+    while (*lst_index < list->num_items)
+    {
+        /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */
+        last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes;
+        if (byte_offset < last_byte_offst)
+        {/* Found a match so return with data remaining */
+            bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset);
+            *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index);
+            break;
+        }
+        *lst_index+=1;
+    }
+    return bytes_left;
+}
+
+/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes)
+{
+    uint32_t i=0;
+    for (i=0; i<num_bytes; i++)
+    {
+        cxt->buf_scratch[i] = *data;
+        data++;
+        cxt->size++;
+    }
+}
+
+/* This function is for copying trailing bytes from scratch buffer to  bitstream buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data)
+{
+    uint32_t i=0;
+    for (i=0; i<cxt->size; i++)
+    {
+        *data = cxt->buf_scratch[i];
+        data++;
+    }
+}
+#endif
+
+/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */
+static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream,
+        viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/
+        uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/
+        uint32_t *phase,    /* Phase for emulation */
+        uint32_t num_bytes,/* requested number of bytes*/
+        uint32_t emul_reqd, /* On true we look for emulation prevention */
+        uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/
+                                              )
+{
+    int32_t ret = 1;
+    uint8_t cur_byte = 0, valid_bytes_read = 0;
+    *act_bytes = 0;
+
+    while (valid_bytes_read < num_bytes)
+    {
+        cur_byte = bstream->buf[bstream->buf_index + *act_bytes];
+        if (emul_reqd && (cur_byte == 0x3) &&(*phase == 2))
+        {/* skip emulation byte. we update the phase only if emulation prevention is enabled */
+            *phase = 0;
+        }
+        else
+        {
+            data->byte[valid_bytes_read] = cur_byte;
+            /*
+              We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past.
+              From second byte onwards we always look to update phase.
+             */
+            if ((*act_bytes != 0) || (is_offset_zero))
+            {
+                if (cur_byte == 0)
+                {
+                    /* Update phase only if emulation prevention is required */
+                    *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 );
+                }
+                else
+                {
+                    *phase=0;
+                }
+            }
+            valid_bytes_read++;
+        }
+        *act_bytes +=1;
+    }
+    /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array
+       has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */
+    if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end)
+    {
+        ret = -1;
+    }
+    return ret;
+}
+
+/*
+  This function checks to see if we have minimum amount of data else tries to reload as much as it can.
+  Always returns the data left in current buffer in parameter.
+*/
+static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left)
+{
+#ifdef VBP
+    *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+#else
+    uint8_t isReload=0;
+
+    *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+    /* If we have minimum data we should continue, else try to read more data */
+    if (*data_left <MIN_DATA)
+    {
+        /* Check to see if we already read last byte of current access unit */
+        isReload = !(viddec_pm_utils_bstream_nomoredata(cxt) == 1);
+        while (isReload)
+        {
+            /* We have more data in access unit so keep reading until we get at least minimum data */
+            viddec_pm_utils_bstream_reload(cxt);
+            *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+            /* Break out of loop if we reached last byte or we have enough data */
+            isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1));
+        }
+    }
+#endif
+}
+
+/*
+  This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were
+  read for this N bits(can be different due to emulation bytes).
+*/
+static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes)
+{
+    if ((bits & 0x7) == 0)
+    {
+        bstream->buf_bitoff = 0;
+        bstream->buf_index +=bytes;
+    }
+    else
+    {
+        bstream->buf_bitoff = bits & 0x7;
+        bstream->buf_index +=(bytes - 1);
+    }
+}
+
+/*
+  This function skips emulation byte if necessary.
+  During Normal flow we skip emulation byte only if we read at least one bit after the the two zero bytes.
+  However in some cases we might send data to HW without reading the next bit, in which case we are on
+  emulation byte. To avoid sending invalid data, this function has to be called first to skip.
+*/
+
+void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    viddec_pm_utils_bstream_buf_cxt_t *bstream = &(cxt->bstrm_buf);
+
+    if (cxt->is_emul_reqd &&
+            (cxt->phase >= 2) &&
+            (bstream->buf_bitoff == 0) &&
+            (bstream->buf[bstream->buf_index] == 0x3) )
+    {
+        bstream->buf_index += 1;
+        cxt->phase = 0;
+    }
+}
+
+#ifndef VBP
+/*
+  This function gets physical address of the requested au offset(pos).
+*/
+
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index)
+{
+    uint32_t ret = 0, last_byte_offst=0;
+    viddec_pm_utils_list_t *list;
+
+    list = cxt->list;
+    while (lst_index < list->num_items)
+    {
+        last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes;
+        if (pos < last_byte_offst)
+        {
+#ifndef MFDBIGENDIAN
+            ret = (uint32_t)list->sc_ibuf[lst_index].buf;
+#else
+            ret = list->sc_ibuf[lst_index].phys;
+#endif
+            ret +=(pos - list->data[lst_index].stpos);
+            if (lst_index == 0) ret+=list->start_offset;
+            break;
+        }
+        lst_index++;
+    }
+    return ret;
+}
+
+/*
+  Actual reload function which uses dma to refill bitstream buffer.
+*/
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+
+    /* Update current offset positions */
+    cxt->au_pos +=  (bstream->buf_index - bstream->buf_st);
+    bstream->buf_st = bstream->buf_index;
+    /* copy leftover bytes into scratch */
+    {
+        int32_t cur_bytes=0;
+        viddec_pm_utils_bstream_scratch_init(&(cxt->scratch));
+        cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+        if (cur_bytes > 0)
+        {
+            viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes);
+            cxt->scratch.bitoff = bstream->buf_bitoff;
+        }
+    }
+    /* Initiate DMA and copyback scratch data */
+    {
+        uint32_t data_left = 0, ddr_mask=0;
+        /* calculate necesary aligmnets and copy data */
+        {
+            uint32_t ddr_addr=0, data_wrote=0;
+            uint32_t byte_pos;
+            /* byte pos points to the position from where we want to read data.*/
+            byte_pos = cxt->au_pos + cxt->scratch.size;
+            data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr);
+            if (data_left > CUBBY_SIZE)
+            {
+                data_left = CUBBY_SIZE;
+            }
+            if (data_left != 0)
+            {
+                ddr_mask = ddr_addr & 0x3;
+                ddr_addr = ddr_addr & ~0x3;
+                data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1);
+            }
+        }
+        /* copy scratch data back to buffer and update offsets */
+        {
+            uint32_t index=0;
+            index = MIN_DATA + ddr_mask;
+            index -= cxt->scratch.size;
+            viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index]));
+            bstream->buf_st = bstream->buf_index = index;
+            bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st;
+            bstream->buf_bitoff = cxt->scratch.bitoff;
+        }
+    }
+}
+#endif
+
+/*
+  Init function called by parser manager after sc code detected.
+*/
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul)
+{
+#ifdef VBP
+    cxt->emulation_byte_counter = 0;
+#endif
+
+    cxt->au_pos = 0;
+    cxt->list = list;
+    cxt->list_off = 0;
+    cxt->phase = 0;
+    cxt->is_emul_reqd = is_emul;
+    cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0;
+}
+
+/* Get the requested byte position. If the byte is already present in cubby its returned
+   else we seek forward and get the requested byte.
+   Limitation:Once we seek forward we can't return back.
+*/
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte)
+{
+    int32_t ret = -1;
+    uint32_t data_left=0;
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+    if (data_left != 0)
+    {
+        *byte = bstream->buf[bstream->buf_index];
+        ret = 1;
+    }
+    return ret;
+}
+
+/*
+  Function to skip N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits)
+{
+    int32_t ret = -1;
+    uint32_t data_left=0;
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+    if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+    {
+        uint8_t bytes_required=0;
+
+        bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+        if (bytes_required <= data_left)
+        {
+            viddec_pm_utils_getbits_t data;
+            uint32_t act_bytes =0;
+            if (viddec_pm_utils_getbytes(bstream, &data,  &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+            {
+                uint32_t total_bits=0;
+                total_bits=num_bits+bstream->buf_bitoff;
+                viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+                ret=1;
+
+#ifdef VBP
+                if (act_bytes > bytes_required)
+                {
+                    cxt->emulation_byte_counter = act_bytes - bytes_required;
+                }
+#endif
+            }
+        }
+    }
+    return ret;
+}
+
+/*
+  Function to get N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip)
+{
+    uint32_t data_left=0;
+    int32_t ret = -1;
+    /* STEP 1: Make sure that we have at least minimum data before we calculate bits */
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+
+    if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+    {
+        uint32_t bytes_required=0;
+        viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+        bstream = &(cxt->bstrm_buf);
+        bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+
+        /* Step 2: Make sure we have bytes for requested bits */
+        if (bytes_required <= data_left)
+        {
+            uint32_t act_bytes, phase;
+            viddec_pm_utils_getbits_t data;
+            phase = cxt->phase;
+            /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */
+            if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+            {
+                uint32_t total_bits=0;
+                uint32_t shift_by=0;
+                /* zero out upper bits */
+                /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts
+                   in single statement */
+                data.byte[0] <<= bstream->buf_bitoff;
+                data.byte[0] >>= bstream->buf_bitoff;
+
+#ifndef MFDBIGENDIAN
+                data.word[0] = SWAP_WORD(data.word[0]);
+                data.word[1] = SWAP_WORD(data.word[1]);
+#endif
+                total_bits = num_bits+bstream->buf_bitoff;
+                if (total_bits > 32)
+                {
+                    /* We have to use both the words to get required data */
+                    shift_by = total_bits - 32;
+                    data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by));
+                }
+                else
+                {
+                    shift_by = 32 - total_bits;
+                    data.word[0] = data.word[0] >> shift_by;
+                }
+                *out = data.word[0];
+                if (skip)
+                {
+                    /* update au byte position if needed */
+                    viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+                    cxt->phase = phase;
+
+#ifdef VBP
+                    if (act_bytes > bytes_required)
+                    {
+                        cxt->emulation_byte_counter += act_bytes - bytes_required;
+                    }
+#endif
+                }
+
+                ret =1;
+            }
+        }
+    }
+    return ret;
+}
diff --git a/mixvbp/vbp_plugin/common/README b/mixvbp/vbp_plugin/common/README
new file mode 100644
index 0000000..938f24a
--- /dev/null
+++ b/mixvbp/vbp_plugin/common/README
@@ -0,0 +1 @@
+Some common codes will be added here.
diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk
new file mode 100755
index 0000000..f015988
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/Android.mk
@@ -0,0 +1,66 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES :=			\
+	h264parse.c			\
+	h264parse_bsd.c			\
+	h264parse_math.c		\
+	h264parse_mem.c			\
+	h264parse_sei.c			\
+	h264parse_sh.c			\
+	h264parse_pps.c			\
+	h264parse_sps.c			\
+	h264parse_dpb.c			\
+	viddec_h264_parse.c		\
+	mix_vbp_h264_stubs.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES :=							   \
+	$(LOCAL_PATH)/include		   \
+	$(MIXVBP_DIR)/include		   \
+	$(MIXVBP_DIR)/vbp_manager/include	   \
+	$(MIXVBP_DIR)/vbp_manager/h264/include
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_h264
+
+LOCAL_SHARED_LIBRARIES :=		\
+	libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),)
+LOCAL_SRC_FILES := \
+        h264parse.c \
+        h264parse_bsd.c \
+        h264parse_math.c \
+        h264parse_mem.c \
+        h264parse_sei.c \
+        h264parse_pps.c \
+        h264parse_sps.c \
+        h264parse_dpb.c \
+        h264parse_sh.c \
+        secvideo/baytrail/viddec_h264secure_parse.c \
+        mix_vbp_h264_stubs.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY -DUSE_AVC_SHORT_FORMAT
+
+LOCAL_C_INCLUDES :=   \
+	$(LOCAL_PATH)/include   \
+	$(MIXVBP_DIR)/include    \
+	$(MIXVBP_DIR)/vbp_manager/include   \
+	$(MIXVBP_DIR)/vbp_manager/h264/include
+
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_h264secure
+LOCAL_SHARED_LIBRARIES := libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c
new file mode 100755
index 0000000..9e36b7c
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse.c
@@ -0,0 +1,795 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    h264 parser
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+
+h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo)
+{
+    int32_t j, scanj;
+    int32_t delta_scale, lastScale, nextScale;
+
+    const uint8_t ZZ_SCAN[16]  =
+        {  0,  1,  4,  8,  5,  2,  3,  6,  9, 12, 13, 10,  7, 11, 14, 15
+        };
+
+    const uint8_t ZZ_SCAN8[64] =
+        {  0,  1,  8, 16,  9,  2,  3, 10, 17, 24, 32, 25, 18, 11,  4,  5,
+           12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13,  6,  7, 14, 21, 28,
+           35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+           58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+        };
+
+    lastScale      = 8;
+    nextScale      = 8;
+    scanj = 0;
+
+    for (j=0; j<sizeOfScalingList; j++)
+    {
+        scanj = (sizeOfScalingList==16)?ZZ_SCAN[j]:ZZ_SCAN8[j];
+
+        if (nextScale!=0)
+        {
+            delta_scale = h264_GetVLCElement(parent, pInfo, true);
+            nextScale = (lastScale + delta_scale + 256) % 256;
+            *UseDefaultScalingMatrix = (uint8_t) (scanj==0 && nextScale==0);
+        }
+
+        scalingList[scanj] = (nextScale==0) ? lastScale:nextScale;
+        lastScale = scalingList[scanj];
+    }
+
+    return H264_STATUS_OK;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader)
+{
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+
+    ///////////////////////////////////////////////////
+    // Reload SPS/PPS while
+    // 1) Start of Frame (in case of context switch)
+    // 2) PPS id changed
+    ///////////////////////////////////////////////////
+    if ((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id))
+    {
+#ifndef WIN32
+        h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id);
+
+        if (pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)
+        {
+            return H264_PPS_INVALID_PIC_ID;			/// Invalid PPS detected
+        }
+
+        if (pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id)
+        {
+            pInfo->Is_SPS_updated =1;
+            h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+            h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+        }
+        else
+        {
+            if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id))
+            {
+                pInfo->Is_SPS_updated =1;
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+                h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+            }
+        }
+
+#else
+        pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id];
+        pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id];
+#endif
+
+        if (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)
+        {
+            return H264_PPS_INVALID_PIC_ID;			//// Invalid SPS detected
+        }
+    }
+    else {
+        if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)  || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS))
+        {
+            return H264_PPS_INVALID_PIC_ID;			/// Invalid PPS detected
+        }
+    }
+
+
+    pInfo->img.PicWidthInMbs    = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1);
+    //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1);
+    pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag?				\
+                                  (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1):	\
+                                  ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1);
+
+
+    return H264_STATUS_OK;
+};   //// End of h264_active_par_set
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////
+// Parse slice header info
+//////////////////////////////////////////////////
+h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status retStatus = H264_STATUS_ERROR;
+
+    ////////////////////////////////////////////////////
+    //// Parse slice header info
+    //// Part1: not depend on the active PPS/SPS
+    //// Part2/3: depend on the active parset
+    //////////////////////////////////////////////////
+
+    //retStatus = h264_Parse_Slice_Header_1(pInfo);
+
+    SliceHeader->sh_error = 0;
+
+    if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK)
+    {
+        //////////////////////////////////////////
+        //// Active parameter set for this slice
+        //////////////////////////////////////////
+        retStatus = h264_active_par_set(pInfo, SliceHeader);
+    }
+
+    if (retStatus == H264_STATUS_OK) {
+        switch (pInfo->active_SPS.profile_idc)
+        {
+        case h264_ProfileBaseline:
+        case h264_ProfileMain:
+        case h264_ProfileExtended:
+            pInfo->active_PPS.transform_8x8_mode_flag=0;
+            pInfo->active_PPS.pic_scaling_matrix_present_flag =0;
+            pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset;
+
+        default:
+            break;
+        }
+
+        if ( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+        {
+            SliceHeader->sh_error |= 2;
+        }
+        else	if ( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+        {
+            SliceHeader->sh_error |= 4;
+        }
+
+    } else 	{
+        SliceHeader->sh_error |= 1;
+    }
+
+
+    //if(SliceHeader->sh_error) {
+    //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+    //}
+
+
+
+    //////////////////////////////////
+    //// Parse slice data (MB loop)
+    //////////////////////////////////
+    //retStatus = h264_Parse_Slice_Data(pInfo);
+    {
+        //uint32_t data = 0;
+        //if( viddec_pm_peek_bits(parent, &data, 32) == -1)
+        //retStatus = H264_STATUS_ERROR;
+    }
+    //h264_Parse_rbsp_trailing_bits(pInfo);
+
+    return retStatus;
+}
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc)
+{
+    h264_Status ret = H264_STATUS_ERROR;
+
+    //h264_NAL_Unit_t* NAL = &pInfo->NAL;
+    uint32_t code;
+#if 0
+    viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24);
+    viddec_pm_get_bits(parent, &code, 1);   //forbidden_zero_bit
+
+    viddec_pm_get_bits(parent, &code, 2);
+    SliceHeader->nal_ref_idc = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code, 5);
+    pInfo->nal_unit_type = (uint8_t)code;
+#else
+#ifdef VBP
+    if ( viddec_pm_get_bits(parent, &code, 8) != -1)
+#else
+    //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type
+    if ( viddec_pm_get_bits(parent, &code, 32) != -1)
+#endif
+    {
+        *nal_ref_idc = (uint8_t)((code>>5)&0x3);
+        pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f);
+        ret = H264_STATUS_OK;
+    }
+#endif
+
+    return ret;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/*!
+ ************************************************************************
+ * \brief
+ *    set defaults for old_slice
+ *    NAL unit of a picture"
+ ************************************************************************
+ */
+#ifndef INT_MAX
+#define INT_MAX 0xFFFFFFFF
+#endif
+
+#ifndef UINT_MAX
+#define UINT_MAX 0x7FFFFFFF
+#endif
+
+void h264_init_old_slice(h264_Info* pInfo)
+{
+    pInfo->SliceHeader.field_pic_flag = 0;
+
+    pInfo->SliceHeader.pic_parameter_id = 0xFF;
+
+    pInfo->SliceHeader.frame_num = INT_MAX;
+
+    pInfo->SliceHeader.nal_ref_idc = 0xFF;
+
+    pInfo->SliceHeader.idr_flag = 0;
+
+    pInfo->SliceHeader.pic_order_cnt_lsb          = UINT_MAX;
+    pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX;
+
+    pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX;
+    pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX;
+
+    return;
+}
+
+
+void h264_init_img(h264_Info* pInfo)
+{
+    h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+
+    return;
+}
+
+
+void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem)
+{
+    int32_t i;
+
+    h264_Info * pInfo = &(parser->info);
+
+    parser->sps_pps_ddr_paddr = (uint32_t)persist_mem;
+
+    pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr;
+    pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all);
+    pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set);
+    pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL +
+                                        MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+
+    h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) );
+    h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) );
+
+    /* Global for SPS   & PPS */
+    for (i=0; i<MAX_NUM_SPS; i++)
+    {
+        pInfo->active_SPS.seq_parameter_set_id = 0xff;
+        h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i);
+    }
+    for (i=0; i<MAX_NUM_PPS; i++)
+    {
+        pInfo->active_PPS.seq_parameter_set_id = 0xff;
+        h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i);
+    }
+
+    pInfo->active_SPS.seq_parameter_set_id = 0xff;
+    pInfo->sps_valid = 0;
+    pInfo->got_start = 0;
+
+    return;
+}
+
+
+void h264_init_Info_under_sps_pps_level(h264_Info* pInfo)
+{
+    int32_t i=0;
+
+    h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) );
+    h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) );
+    h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) );
+    h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) );
+    h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+    pInfo->h264_list_replacement = 0;
+
+    pInfo->h264_pwt_start_byte_offset = 0;
+    pInfo->h264_pwt_start_bit_offset = 0;
+    pInfo->h264_pwt_end_byte_offset = 0;
+    pInfo->h264_pwt_end_bit_offset = 0;
+    pInfo->h264_pwt_enabled = 0;
+
+    for (i=0; i<32; i++)
+    {
+        pInfo->slice_ref_list0[i] = 0;
+        pInfo->slice_ref_list1[i] = 0;
+    }
+
+    pInfo->qm_present_list = 0;
+
+    pInfo->nal_unit_type = 0;
+    pInfo->old_nal_unit_type = 0xff;
+
+    pInfo->push_to_cur = 0;
+    pInfo->Is_first_frame_in_stream = 1;
+    pInfo->Is_SPS_updated = 0;
+    pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+
+    pInfo->is_frame_boundary_detected_by_non_slice_nal = 0;
+    pInfo->is_frame_boundary_detected_by_slice_nal = 0;
+    pInfo->is_current_workload_done = 0;
+
+    pInfo->sei_rp_received = 0;
+    pInfo->last_I_frame_idc = 255;
+    pInfo->wl_err_curr = 0;
+    pInfo->wl_err_next = 0;
+
+    pInfo->primary_pic_type_plus_one = 0;
+    pInfo->sei_b_state_ready = 0;
+
+    /* Init old slice structure  */
+    h264_init_old_slice(pInfo);
+
+    /* init_dpb */
+    h264_init_dpb(&(pInfo->dpb));
+
+    /* init_sei	*/
+    h264_sei_stream_initialise(pInfo);
+
+}
+
+void h264_init_Info(h264_Info* pInfo)
+{
+    h264_memset(pInfo, 0x0, sizeof(h264_Info));
+
+    pInfo->old_nal_unit_type = 0xff;
+
+    pInfo->Is_first_frame_in_stream =1;
+    pInfo->img.frame_count = 0;
+    pInfo->last_I_frame_idc = 255;
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/////////////////////////////////////////////////////
+//
+// Judge whether it is the first VCL of a new picture
+//
+/////////////////////////////////////////////////////
+int32_t h264_is_second_field(h264_Info * pInfo)
+{
+    h264_Slice_Header_t cur_slice = pInfo->SliceHeader;
+    OldSliceParams old_slice = pInfo->old_slice;
+
+    int result = 0;
+
+    //pInfo->img.second_field = 0;
+
+    /// is it second field?
+
+    //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used);
+
+    if (cur_slice.structure != FRAME)
+    {
+        if ( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )
+                &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ))
+        {
+            if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag))
+            {
+
+                if (old_slice.structure != cur_slice.structure)
+                {
+
+                    if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1:
+                            (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) &&    \
+                            ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0)              || // Condition 2:
+                             (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0)))
+                    {
+                        //pInfo->img.second_field = 1;
+                        result = 1;
+                    }
+                }
+            }
+
+
+        }
+
+
+    }
+
+
+
+    return result;
+
+} //// End of h264_is_second_field
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice)
+{
+    int result = 0;
+
+    if (pInfo->number_of_first_au_info_nal_before_first_slice)
+    {
+        pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+        return 1;
+    }
+
+
+
+    result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id);
+    result |= (old_slice.frame_num != cur_slice.frame_num);
+    result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag);
+    if (cur_slice.field_pic_flag && old_slice.field_pic_flag)
+    {
+        result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag);
+    }
+
+    result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \
+              ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0));
+    result |= ( old_slice.idr_flag != cur_slice.idr_flag);
+
+    if (cur_slice.idr_flag && old_slice.idr_flag)
+    {
+        result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id);
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 0)
+    {
+        result |=  (old_slice.pic_order_cnt_lsb          != cur_slice.pic_order_cnt_lsb);
+        result |=  (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom);
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 1)
+    {
+        result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]);
+        result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]);
+    }
+
+    return result;
+}
+
+
+int32_t h264_check_previous_frame_end(h264_Info * pInfo)
+{
+    int result = 0;
+
+    if ( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) )
+    {
+
+        switch ( pInfo->nal_unit_type )
+        {
+        case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        case h264_NAL_UNIT_TYPE_SPS:
+        case h264_NAL_UNIT_TYPE_PPS:
+        case h264_NAL_UNIT_TYPE_SEI:
+        case h264_NAL_UNIT_TYPE_EOSeq:
+        case h264_NAL_UNIT_TYPE_EOstream:
+        case h264_NAL_UNIT_TYPE_Reserved1:
+        case h264_NAL_UNIT_TYPE_Reserved2:
+        case h264_NAL_UNIT_TYPE_Reserved3:
+        case h264_NAL_UNIT_TYPE_Reserved4:
+        case h264_NAL_UNIT_TYPE_Reserved5:
+        {
+            pInfo->img.current_slice_num = 0;
+
+            if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) {
+                pInfo->is_frame_boundary_detected_by_non_slice_nal =1;
+                pInfo->is_current_workload_done=1;
+                result=1;
+            }
+            break;
+        }
+        default:
+            break;
+        }
+
+    }
+
+    return result;
+
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////
+// 1) Update old slice structure for frame boundary detection
+//////////////////////////////////////////////////////////////
+void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader)
+{
+    pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id;
+
+    pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num;
+
+    pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+
+    if (pInfo->SliceHeader.field_pic_flag)
+    {
+        pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+    }
+
+    pInfo->old_slice.nal_ref_idc   = pInfo->SliceHeader.nal_ref_idc;
+
+    pInfo->old_slice.structure = pInfo->SliceHeader.structure;
+
+    pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag;
+    if (pInfo->SliceHeader.idr_flag)
+    {
+        pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id;
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 0)
+    {
+        pInfo->old_slice.pic_order_cnt_lsb          = pInfo->SliceHeader.pic_order_cnt_lsb;
+        pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 1)
+    {
+        pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+        pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+    }
+
+    ////////////////////////////// Next to current
+    memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t));
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// Initialization for new picture
+//////////////////////////////////////////////////////////////////////////////
+void h264_update_img_info(h264_Info * pInfo )
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    pInfo->img.frame_num = pInfo->SliceHeader.frame_num;
+    pInfo->img.structure = pInfo->SliceHeader.structure;
+
+    pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+    pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+
+    pInfo->img.MbaffFrameFlag  = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag));
+    pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type;
+
+    if (pInfo->img.pic_order_cnt_type == 1) {
+        pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle;
+        pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag;
+        pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic;
+        pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field;
+    }
+
+    pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+    //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb;
+    pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+    pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+    pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+
+
+    pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num;
+
+    pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag;
+
+    ////////////////////////////////////////////////// Check SEI recovery point
+    if (pInfo->sei_information.recovery_point) {
+        int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+        pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum;
+    }
+
+    if (pInfo->SliceHeader.idr_flag)
+        pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num;
+
+
+
+    /////////////////////////////////////////////////Resolution Change
+    pInfo->img.curr_has_mmco_5 = 0;
+
+    if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)||
+            (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) )
+    {
+        int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0;
+
+        // If resolution changed, reset the soft DPB here
+        h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics);
+    }
+
+    return;
+
+} ///// End of init new frame
+
+
+void h264_update_frame_type(h264_Info * pInfo )
+{
+
+//update frame type
+    if (pInfo->img.structure == FRAME)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            if (pInfo->dpb.fs_dec_idc < NUM_DPB_FRAME_STORES)
+            {
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET);
+            //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff;
+            //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc;
+            }
+
+        }
+        else
+        {
+#if 1
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET);
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET);
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET);
+                }
+                pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+
+                break;
+            default:
+                break;
+
+            }
+#endif
+
+        }
+
+    }
+    else if (pInfo->img.structure == TOP_FIELD)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));;
+        }
+        else
+        {
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                }
+                if (pInfo->sei_rp_received)
+                    pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+                else
+                    pInfo->last_I_frame_idc = 255;
+                break;
+            default:
+                break;
+
+            }
+
+        }
+
+
+    } else if (pInfo->img.structure == BOTTOM_FIELD)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));;
+        }
+        else
+        {
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                }
+                if (pInfo->sei_rp_received)
+                    pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1);
+                else
+                    pInfo->last_I_frame_idc = 255;
+
+                break;
+            default:
+                break;
+
+            }
+
+        }
+
+    }
+    return;
+
+}
+
+
+//////#endif ///////////// IFDEF H264_PARSE_C///////////////////
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c
new file mode 100755
index 0000000..40c7559
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c
@@ -0,0 +1,228 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    h264 bistream decoding
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_parser_ops.h"
+
+
+
+
+
+/**
+   get_codeNum     :Get codenum based on sec 9.1 of H264 spec.
+   @param      cxt : Buffer adress & size are part inputs, the cxt is updated
+                     with codeNum & sign on sucess.
+                     Assumption: codeNum is a max of 32 bits
+
+   @retval       1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code.
+   @retval       0 : Couldn't find a code in the current buffer.
+   be freed.
+*/
+
+uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo)
+{
+    int32_t    leadingZeroBits= 0;
+    uint32_t    temp = 0, match = 0, noOfBits = 0, count = 0;
+    uint32_t   codeNum =0;
+    uint32_t   bits_offset =0, byte_offset =0;
+    uint8_t    is_emul =0;
+    uint8_t    is_first_byte = 1;
+    uint32_t   length =0;
+    uint32_t   bits_need_add_in_first_byte =0;
+    int32_t    bits_operation_result=0;
+
+    //remove warning
+    pInfo = pInfo;
+
+    ////// Step 1: parse through zero bits until we find a bit with value 1.
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+
+    while (!match)
+    {
+        if ((bits_offset != 0) && ( is_first_byte == 1))
+        {
+            //we handle byte at a time, if we have offset then for first
+            //   byte handle only 8 - offset bits
+            noOfBits = (uint8_t)(8 - bits_offset);
+            bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits);
+
+
+            temp = (temp << bits_offset);
+            if (temp!=0)
+            {
+                bits_need_add_in_first_byte = bits_offset;
+            }
+            is_first_byte =0;
+        }
+        else
+        {
+            noOfBits = 8;/* always 8 bits as we read a byte at a time */
+            bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8);
+
+        }
+
+        if (-1==bits_operation_result)
+        {
+            return MAX_INT32_VALUE;
+        }
+
+        if (temp != 0)
+        {
+            // if byte!=0 we have at least one bit with value 1.
+            count=1;
+            while (((temp & 0x80) != 0x80) && (count <= noOfBits))
+            {
+                count++;
+                temp = temp <<1;
+            }
+            //At this point we get the bit position of 1 in current byte(count).
+
+            match = 1;
+            leadingZeroBits += count;
+        }
+        else
+        {
+            // we don't have a 1 in current byte
+            leadingZeroBits += noOfBits;
+        }
+
+        if (!match)
+        {
+            //actually move the bitoff by viddec_pm_get_bits
+            viddec_pm_get_bits(parent, &temp, noOfBits);
+        }
+        else
+        {
+            //actually move the bitoff by viddec_pm_get_bits
+            viddec_pm_get_bits(parent, &temp, count);
+        }
+
+    }
+    ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value.
+
+
+    if (match)
+    {
+
+        viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+        /* bit position in current byte */
+        //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7);
+        count = ((count + bits_need_add_in_first_byte)& 0x7);
+
+        leadingZeroBits --;
+        length =  leadingZeroBits;
+        codeNum = 0;
+        noOfBits = 8 - count;
+
+
+        while (leadingZeroBits > 0)
+        {
+            if (noOfBits < (uint32_t)leadingZeroBits)
+            {
+                viddec_pm_get_bits(parent, &temp, noOfBits);
+
+
+                codeNum = (codeNum << noOfBits) | temp;
+                leadingZeroBits -= noOfBits;
+            }
+            else
+            {
+                viddec_pm_get_bits(parent, &temp, leadingZeroBits);
+
+                codeNum = (codeNum << leadingZeroBits) | temp;
+                leadingZeroBits = 0;
+            }
+
+
+            noOfBits = 8;
+        }
+        // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits).
+        codeNum = codeNum + (1 << length) -1;
+
+    }
+
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+    if (bits_offset!=0)
+    {
+        viddec_pm_peek_bits(parent, &temp, 8-bits_offset);
+    }
+
+    return codeNum;
+}
+
+
+/*---------------------------------------*/
+/*---------------------------------------*/
+int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned)
+{
+    int32_t sval = 0;
+    signed char sign;
+
+    sval = h264_get_codeNum(parent , pInfo);
+
+    if (bIsSigned) //get signed integer golomb code else the value is unsigned
+    {
+        sign = (sval & 0x1)?1:-1;
+        sval = (sval +1) >> 1;
+        sval = sval * sign;
+    }
+
+    return sval;
+} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned)
+
+///
+/// Check whether more RBSP data left in current NAL
+///
+uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo)
+{
+    uint8_t cnt = 0;
+
+    uint8_t  is_emul =0;
+    uint8_t 	cur_byte = 0;
+    int32_t  shift_bits =0;
+    uint32_t ctr_bit = 0;
+    uint32_t bits_offset =0, byte_offset =0;
+
+    //remove warning
+    pInfo = pInfo;
+
+    if (!viddec_pm_is_nomoredata(parent))
+        return 1;
+
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+    shift_bits = 7-bits_offset;
+
+    // read one byte
+    viddec_pm_get_cur_byte(parent, &cur_byte);
+
+    ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01;
+
+    // a stop bit has to be one
+    if (ctr_bit==0)
+        return 1;
+
+    while (shift_bits>=0 && !cnt)
+    {
+        cnt |= (((cur_byte)>> (shift_bits--)) & 0x01);   // set up control bit
+    }
+
+    return (cnt);
+}
+
+
+
+///////////// EOF/////////////////////
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c
new file mode 100755
index 0000000..13adb1b
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c
@@ -0,0 +1,4222 @@
+/*!
+ ***********************************************************************
+ *  \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+
+//#include <limits.h>
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+//#include "h264_debug.h"
+
+#ifndef NULL
+#define NULL 0
+#endif
+//#ifndef USER_MODE
+//#define NULL 0
+//#endif
+
+///////////////////////// DPB init //////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////
+// Init DPB
+// Description: init dpb, which should be called while open
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb)
+{
+    int32_t i;
+
+    //// Init DPB to zero
+    //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) );
+
+
+    for (i=0; i<NUM_DPB_FRAME_STORES; i++)
+    {
+        p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+        p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+    }
+    p_dpb->used_size = 0;
+    p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+    p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+    return;
+}
+
+
+///////////////////////// Reference list management //////////////////////////
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc;
+    p_dpb->ref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ltref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc;
+    p_dpb->ltref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting)
+//
+// Decide whether the current picture needs to be added to the reference lists
+// active_fs should be set-up prior to calling this function
+//
+// Check if we need to search the lists here
+// or can we go straight to adding to ref lists..
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting)
+{
+    if (NonExisting)
+        h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc);
+    else
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+    //if(active_fs->is_reference)
+    if (p_dpb->active_fs->frame.used_for_reference)
+    {
+        if (viddec_h264_get_is_long_term(p_dpb->active_fs))
+        {
+            if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME)
+                h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            else
+            {
+                uint32_t found_in_list = 0, i = 0;
+                for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) {
+                    if (p_dpb->fs_ltref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1;
+                }
+
+                if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            }
+        }
+        else
+        {
+            if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) {
+                h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            } else
+            {
+                uint32_t found_in_list = 0, i = 0;
+
+                for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++)
+                {
+                    if (p_dpb->fs_ref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1;
+                }
+
+                if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            }
+        }
+    }
+
+    return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Set active fs
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index)
+{
+    p_dpb->active_fs = &p_dpb->fs[index];
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Sort reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc)
+{
+    int32_t j, k, temp, idc;
+
+    // Dodgy looking for embedded code here...
+    if (size > 1)
+    {
+        for (j = 0; j < size-1; j = j + 1) {
+            for (k = j + 1; k < size; k = k + 1) {
+                if ((desc & (sort_indices[j] < sort_indices[k]))|
+                        (~desc & (sort_indices[j] > sort_indices[k])) )
+                {
+                    temp = sort_indices[k];
+                    sort_indices[k] = sort_indices[j];
+                    sort_indices[j] = temp;
+                    idc = list[k];
+                    list[k] = list[j];
+                    list[j] = idc;
+                }
+            }
+        }
+    }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_bottom_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_bottom_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term)
+{
+    int32_t temp;
+    if (long_term) temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && (p_dpb->active_fs->bottom_field.is_long_term))  ? 1 : 0;
+    else          temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && !(p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0;
+
+    return temp;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_top_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_top_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term)
+{
+    int32_t temp;
+    if (long_term)
+        temp = ((p_dpb->active_fs->top_field.used_for_reference) && (p_dpb->active_fs->top_field.is_long_term))  ? 1 : 0;
+    else
+        temp = ((p_dpb->active_fs->top_field.used_for_reference) && !(p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0;
+
+    return temp;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gen_pic_list_from_frame_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term)
+{
+    int32_t top_idx, bot_idx, got_pic, list_idx;
+    int32_t lterm;
+
+    list_idx = 0;
+    lterm = (long_term)? 1:0;
+
+    if (list_size) {
+
+
+        top_idx = 0;
+        bot_idx = 0;
+
+        if (currPicStructure == TOP_FIELD) {
+            while ((top_idx < list_size)||(bot_idx < list_size))
+            {
+                /////////////////////////////////////////// ref Top Field
+                got_pic = 0;
+                while ((top_idx < list_size) & ~got_pic)
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1)
+                    {
+                        if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term))
+                        {
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0);  // top_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    top_idx++;
+                }
+
+                /////////////////////////////////////////// ref Bottom Field
+                got_pic = 0;
+                while ((bot_idx < list_size) & ~got_pic)
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2)
+                    {
+                        if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term))
+                        {
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1);  // bottom_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    bot_idx++;
+                }
+            }
+        }
+
+        /////////////////////////////////////////////// current Bottom Field
+        if (currPicStructure == BOTTOM_FIELD)	{
+            while ((top_idx < list_size)||(bot_idx < list_size))
+            {
+                /////////////////////////////////////////// ref Top Field
+                got_pic = 0;
+                while ((bot_idx < list_size) && (!(got_pic)))
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) {
+                        if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) {
+                            // short term ref pic
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1);  // bottom_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    bot_idx++;
+                }
+
+                /////////////////////////////////////////// ref Bottom Field
+                got_pic = 0;
+                while ((top_idx < list_size) && (!(got_pic)))
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) {
+                        if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) {
+                            // short term ref pic
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0);  // top_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    top_idx++;
+                }
+            }
+        }
+    }
+
+    return list_idx;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ref_list ()
+//
+// Removes an idc from the refernce list and updates list after
+//
+
+void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    uint8_t idx   = 0;
+    int32_t Found = 0;
+
+    while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found)))
+    {
+        if (p_dpb->fs_ref_idc[idx] == ref_idc)
+            Found = 1;
+        else
+            idx++;
+    }
+
+    if (Found)
+    {
+        // Move the remainder of the list up one
+        while (idx < p_dpb->ref_frames_in_buffer - 1) {
+            p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1];
+            idx ++;
+        }
+
+        p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+        p_dpb->ref_frames_in_buffer--;
+    }
+
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ltref_list ()
+//
+// Removes an idc from the long term reference list and updates list after
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc)
+{
+    uint8_t idx   = 0;
+    int32_t Found = 0;
+
+    while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found)))
+    {
+        if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1;
+        else idx++;
+    }
+
+    if (Found)
+    {
+        // Move the remainder of the list up one
+        while (idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1))
+        {
+            p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1];
+            idx ++;
+        }
+        p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC;		// Clear the last one
+
+        p_dpb->ltref_frames_in_buffer--;
+    }
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_lists ()
+//
+// Used to initialise the reference lists
+// Also assigns picture numbers and long term picture numbers if P OR B slice
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_update_ref_lists(h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb;
+
+    int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+    uint8_t list0idx, list0idx_1, listltidx;
+    uint8_t idx;
+
+    uint8_t add_top, add_bottom, diff;
+    uint8_t list_idc;
+    uint8_t check_non_existing, skip_picture;
+
+
+    uint8_t gen_pic_fs_list0[16];
+    uint8_t gen_pic_fs_list1[16];
+    uint8_t gen_pic_fs_listlt[16];
+    uint8_t gen_pic_pic_list[32];  // check out these sizes...
+
+    uint8_t sort_fs_idc[16];
+    int32_t list_sort_number[16];
+
+#ifdef DUMP_HEADER_INFO
+    static int cc1 = 0;
+    //OS_INFO("-------------cc1= %d\n",cc1);    /////// DEBUG info
+    if (cc1 == 255)
+        idx = 0;
+#endif
+
+    list0idx = list0idx_1 = listltidx = 0;
+
+    if (pInfo->SliceHeader.structure == FRAME)
+    {
+        ////////////////////////////////////////////////// short term handling
+        for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+            if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3)&&(p_dpb->active_fs->frame.used_for_reference == 3))
+            {
+                if (p_dpb->active_fs->frame_num > pInfo->img.frame_num)
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum;
+                else
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num;
+
+                p_dpb->active_fs->frame.pic_num     = p_dpb->active_fs->frame_num_wrap;
+
+                // Use this opportunity to sort list for a p-frame
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+                {
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame.pic_num;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++)
+                p_dpb->listX_0[idx] = (sort_fs_idc[idx]);  // frame
+
+            p_dpb->listXsize[0] = list0idx;
+        }
+
+        ////////////////////////////////////////////////// long term handling
+        for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+            if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) && (p_dpb->active_fs->frame.used_for_reference == 3))
+            {
+                p_dpb->active_fs->frame.long_term_pic_num = p_dpb->active_fs->frame.long_term_frame_idx;
+
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+                {
+                    sort_fs_idc[list0idx-p_dpb->listXsize[0]]       = p_dpb->fs_ltref_idc[idx];
+                    list_sort_number[list0idx-p_dpb->listXsize[0]]  = p_dpb->active_fs->frame.long_term_pic_num;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0);
+            for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+            }
+            p_dpb->listXsize[0] = list0idx;
+        }
+    }
+    else   /// Field base
+    {
+        if (pInfo->SliceHeader.structure == TOP_FIELD)
+        {
+            add_top    = 1;
+            add_bottom = 0;
+        }
+        else
+        {
+            add_top    = 0;
+            add_bottom = 1;
+        }
+
+        ////////////////////////////////////////////P0: Short term handling
+        for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+            if (p_dpb->active_fs->frame.used_for_reference)
+            {
+                if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) {
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum;
+                } else {
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num;
+                }
+
+                if ((p_dpb->active_fs->frame.used_for_reference)&0x1) {
+                    p_dpb->active_fs->top_field.pic_num    = (p_dpb->active_fs->frame_num_wrap << 1) + add_top;
+                }
+
+                if ((p_dpb->active_fs->frame.used_for_reference)&0x2) {
+                    p_dpb->active_fs->bottom_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_bottom;
+                }
+
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP) {
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame_num_wrap;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++) {
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+            }
+
+            p_dpb->listXsize[0] = 0;
+            p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+            {
+                p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+            }
+        }
+
+        ////////////////////////////////////////////P0: long term handling
+        for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+            if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) {
+                p_dpb->active_fs->top_field.long_term_pic_num    = (p_dpb->active_fs->top_field.long_term_frame_idx << 1) + add_top;
+            }
+
+            if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) {
+                p_dpb->active_fs->bottom_field.long_term_pic_num = (p_dpb->active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom;
+            }
+
+            if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+            {
+                sort_fs_idc[listltidx]      = p_dpb->fs_ltref_idc[idx];
+                list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx;
+                listltidx++;
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+            for (idx = 0; idx < listltidx; idx++) {
+                gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+            }
+            list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+            for (idx = 0; idx < list0idx_1; idx++) {
+                p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+            }
+            p_dpb->listXsize[0] += list0idx_1;
+        }
+    }
+
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeI)
+    {
+        p_dpb->listXsize[0] = 0;
+        p_dpb->listXsize[1] = 0;
+        return;
+    }
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+    {
+        //// Forward done above
+        p_dpb->listXsize[1] = 0;
+    }
+
+
+    // B-Slice
+    // Do not include non-existing frames for B-pictures when cnt_type is zero
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeB)
+    {
+        list0idx = list0idx_1 = listltidx = 0;
+        skip_picture = 0;
+
+        if (pInfo->active_SPS.pic_order_cnt_type == 0)
+            check_non_existing = 1;
+        else
+            check_non_existing = 0;
+
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+                if (viddec_h264_get_is_used(p_dpb->active_fs) == 3)
+                {
+                    if (check_non_existing)
+                    {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1;
+                        else                           skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)
+                    {
+                        if ((p_dpb->active_fs->frame.used_for_reference==3) && (!(p_dpb->active_fs->frame.is_long_term)))
+                        {
+                            if (pInfo->img.framepoc >= p_dpb->active_fs->frame.poc)
+                            {
+                                sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                                list_sort_number[list0idx] = p_dpb->active_fs->frame.poc;
+                                list0idx++;
+                            }
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = sort_fs_idc[idx];
+            }
+
+            list0idx_1 = list0idx;
+
+            /////////////////////////////////////////B0:  Short term handling
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+                if (viddec_h264_get_is_used(p_dpb->active_fs) == 3)
+                {
+                    if (check_non_existing)
+                    {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))	skip_picture = 1;
+                        else							skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)
+                    {
+                        if ((p_dpb->active_fs->frame.used_for_reference) && (!(p_dpb->active_fs->frame.is_long_term)))
+                        {
+                            if (pInfo->img.framepoc < p_dpb->active_fs->frame.poc)
+                            {
+                                sort_fs_idc[list0idx-list0idx_1]      = p_dpb->fs_ref_idc[idx];
+                                list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc;
+                                list0idx++;
+                            }
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+            for (idx = list0idx_1; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1];
+            }
+
+            for (idx = 0; idx < list0idx_1; idx++) {
+                p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx];
+            }
+
+            for (idx = list0idx_1; idx < list0idx; idx++) {
+                p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx];
+            }
+
+            p_dpb->listXsize[0] = list0idx;
+            p_dpb->listXsize[1] = list0idx;
+
+            /////////////////////////////////////////B0:  long term handling
+            list0idx = 0;
+
+            // Can non-existent pics be set as long term??
+            for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+                if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3))
+                {
+                    // if we have two fields, both must be long-term
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ltref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame.long_term_pic_num;
+                    list0idx++;
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0);
+            for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1)
+            {
+                p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+                p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+            }
+
+            p_dpb->listXsize[0] += list0idx;
+            p_dpb->listXsize[1] += list0idx;
+        }
+        else  // Field
+        {
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+                if (viddec_h264_get_is_used(p_dpb->active_fs))	{
+                    if (check_non_existing) {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))
+                            skip_picture = 1;
+                        else
+                            skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)  {
+                        if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) {
+                            sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                            list_sort_number[list0idx] = p_dpb->active_fs->frame.poc;
+                            list0idx++;
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx = idx + 1) {
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+            }
+
+            list0idx_1 = list0idx;
+
+            ///////////////////////////////////////////// B1: Short term handling
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+                if (viddec_h264_get_is_used(p_dpb->active_fs))
+                {
+                    if (check_non_existing) {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))
+                            skip_picture = 1;
+                        else
+                            skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0) {
+                        if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) {
+                            sort_fs_idc[list0idx-list0idx_1]      = p_dpb->fs_ref_idc[idx];
+                            list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc;
+                            list0idx++;
+                        }
+                    }
+                }
+            }
+
+            ///// Generate frame list from sorted fs
+            /////
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+            for (idx = list0idx_1; idx < list0idx; idx++)
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1];
+
+            for (idx = 0; idx < list0idx_1; idx++)
+                gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx];
+
+            for (idx = list0idx_1; idx < list0idx; idx++)
+                gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx];
+
+            ///// Generate List_X0
+            /////
+            p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+                p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+
+            //// Generate List X1
+            ////
+            p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[1]; idx++)
+                p_dpb->listX_1[idx] = gen_pic_pic_list[idx];
+
+            ///////////////////////////////////////////// B1: long term handling
+            for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+                sort_fs_idc[listltidx]      = p_dpb->fs_ltref_idc[idx];
+                list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx;
+                listltidx++;
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+            for (idx = 0; idx < listltidx; idx++)
+                gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+
+            list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+            for (idx = 0; idx < list0idx_1; idx++)
+            {
+                p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+                p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx];
+            }
+
+            p_dpb->listXsize[0] += list0idx_1;
+            p_dpb->listXsize[1] += list0idx_1;
+        }
+    }
+
+    // Setup initial list sizes at this point
+    p_dpb->nInitListSize[0] = p_dpb->listXsize[0];
+    p_dpb->nInitListSize[1] = p_dpb->listXsize[1];
+    if (pInfo->SliceHeader.slice_type != h264_PtypeI)
+    {
+        if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1))
+        {
+            // check if lists are identical, if yes swap first two elements of listX[1]
+            diff = 0;
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1)
+            {
+                if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1;
+            }
+
+
+            if (!(diff))
+            {
+                list_idc       = p_dpb->listX_1[0];
+                p_dpb->listX_1[0] = p_dpb->listX_1[1];
+                p_dpb->listX_1[1] = list_idc;
+            }
+        }
+
+        // set max size
+        if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active)
+        {
+            p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active;
+        }
+
+
+        if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active)
+        {
+            p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active;
+        }
+
+
+
+    }
+
+
+
+    /// DPB reorder list
+    h264_dpb_reorder_lists(pInfo);
+
+    return;
+}   //// End of init_dpb_list
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_short_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+// Sets field_flag, bottom_field and err_flag based on the picture and whether
+// it is available or not...
+//
+static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit)
+{
+    register uint32_t idx;
+    register frame_param_ptr temp_fs;
+
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    *bottom_field_bit = 0;
+    for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+    {
+        temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]];
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            if (temp_fs->frame.used_for_reference == 3)
+                if (!(temp_fs->frame.is_long_term))
+                    if (temp_fs->frame.pic_num == pic_num) return temp_fs;
+        }
+        else // current picture is a field
+        {
+            if (temp_fs->frame.used_for_reference&0x1)
+                if (!(temp_fs->top_field.is_long_term))
+                    if (temp_fs->top_field.pic_num == pic_num)
+                    {
+                        return temp_fs;
+                    }
+
+            if (temp_fs->frame.used_for_reference&0x2)
+                if (!(temp_fs->bottom_field.is_long_term))
+                    if (temp_fs->bottom_field.pic_num == pic_num)
+                    {
+                        *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+                        return temp_fs;
+                    }
+        }
+    }
+    return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_long_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+//
+
+static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit)
+{
+    register uint32_t idx;
+    register frame_param_ptr temp_fs;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    *bottom_field_bit = 0;
+    for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+    {
+        temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]];
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            if (temp_fs->frame.used_for_reference == 3)
+                if (temp_fs->frame.is_long_term)
+                    if (temp_fs->frame.long_term_pic_num == long_term_pic_num)
+                        return temp_fs;
+        }
+        else
+        {
+            if (temp_fs->frame.used_for_reference&0x1)
+                if (temp_fs->top_field.is_long_term)
+                    if (temp_fs->top_field.long_term_pic_num == long_term_pic_num)
+                        return temp_fs;
+
+            if (temp_fs->frame.used_for_reference&0x2)
+                if (temp_fs->bottom_field.is_long_term)
+                    if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num)
+                    {
+                        *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+                        return temp_fs;
+                    }
+        }
+    }
+    return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_ref_pic_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+struct list_value_t
+{
+    int32_t value;
+    struct list_value_t *next;
+};
+
+struct linked_list_t
+{
+    struct list_value_t *begin;
+    struct list_value_t *end;
+    struct list_value_t *entry;
+    struct list_value_t *prev_entry;
+    struct list_value_t list[32];
+};
+
+static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size)
+{
+    struct list_value_t *lvp;
+
+    lvp            = lp->list;
+    lp->begin      = lvp;
+    lp->entry      = lvp;
+    lp->end        = lvp + (size-1);
+    lp->prev_entry = NULL;
+
+    while (lvp <= lp->end)
+    {
+        lvp->value = *(vp++);
+        lvp->next  = lvp + 1;
+        lvp++;
+    }
+    lp->end->next = NULL;
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value)
+{
+    register struct list_value_t *lvp = lp->entry;
+    register struct list_value_t *lvp_prev;
+
+    if (lvp == NULL) {
+        lp->end->value = list_value;  // replace the end entry
+    } else if ((lp->begin==lp->end)||(lvp==lp->end))  // replece the begin/end entry and set the entry to NULL
+    {
+        lp->entry->value = list_value;
+        lp->prev_entry   = lp->entry;
+        lp->entry        = NULL;
+    }
+    else if (lvp->value==list_value)  // the entry point matches
+    {
+        lp->prev_entry = lvp;
+        lp->entry      = lvp->next;
+    }
+    else if (lvp->next == lp->end) // the entry is just before the end
+    {
+        // replace the end and swap the end and entry points
+        //                  lvp
+        //  prev_entry  => entry                    => old_end
+        //                 old_end & new_prev_entry => new_end & entry
+        lp->end->value = list_value;
+
+        if (lp->prev_entry)
+            lp->prev_entry->next = lp->end;
+        else
+            lp->begin            = lp->end;
+
+        lp->prev_entry = lp->end;
+        lp->end->next  = lvp;
+        lp->end        = lvp;
+        lvp->next      = NULL;
+    }
+    else
+    {
+        lvp_prev = NULL;
+        while (lvp->next) // do not check the end but we'll be in the loop at least once
+        {
+            if (lvp->value == list_value) break;
+            lvp_prev = lvp;
+            lvp = lvp->next;
+        }
+        lvp->value = list_value;   // force end matches
+        if (lvp_prev != NULL)
+        {
+            // remove lvp from the list
+            lvp_prev->next = lvp->next;
+        }
+        if (lvp==lp->end) lp->end = lvp_prev;
+
+        // insert lvp in front of lp->entry
+        if (lp->entry==lp->begin)
+        {
+            lvp->next = lp->begin;
+            lp->begin = lvp;
+        }
+        else
+        {
+            lvp->next = lp->entry;
+            lp->prev_entry->next = lvp;
+        }
+        lp->prev_entry = lvp;
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_output (struct linked_list_t *lp, int32_t *vp)
+{
+    register int32_t *ip1;
+    register struct list_value_t *lvp;
+
+    lvp  = lp->begin;
+    ip1  = vp;
+    while (lvp)
+    {
+        *(ip1++) = lvp->value;
+        lvp = lvp->next;
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint8_t                   *remapping_of_pic_nums_idc;
+    list_reordering_num_t		*list_reordering_num;
+    int32_t                    bottom_field_bit;
+
+    int32_t  maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num;
+    int32_t  refIdxLX;
+    int32_t  i;
+
+    int32_t    PicList[32] = {0};
+    struct linked_list_t ll;
+    struct linked_list_t *lp = &ll;     // should consider use the scratch space
+
+    // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu
+    register frame_param_ptr temp_fs;
+    register int32_t temp;
+    register uint8_t  *ip1;
+
+    maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+
+    if (list_num == 0) // i.e list 0
+    {
+        ip1 = p_dpb->listX_0;
+        remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc;
+        list_reordering_num       = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num;
+    }
+    else
+    {
+        ip1 = p_dpb->listX_1;
+        remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc;
+        list_reordering_num       = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num;
+    }
+
+
+    linked_list_initialize (lp, ip1, num_ref_idx_active);
+
+    currPicNum = pInfo->SliceHeader.frame_num;
+    if (pInfo->SliceHeader.structure != FRAME)
+    {
+
+        /* The reason it is + 1 I think, is because the list is based on polarity
+           expand later...
+        */
+        maxPicNum  <<= 1;
+        currPicNum <<= 1;
+        currPicNum++;
+    }
+
+    picNumLXPred = currPicNum;
+    refIdxLX = 0;
+
+    for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++)
+    {
+        if (i > MAX_NUM_REF_FRAMES)
+        {
+            break;
+        }
+
+        if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering
+        {
+            temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1);
+            if (remapping_of_pic_nums_idc[i] == 0)
+            {
+                temp = picNumLXPred - temp;
+                if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum;
+                else           picNumLXNoWrap = temp;
+            }
+            else // (remapping_of_pic_nums_idc[i] == 1)
+            {
+                temp += picNumLXPred;
+                if (temp  >=  maxPicNum) picNumLXNoWrap = temp - maxPicNum;
+                else                     picNumLXNoWrap = temp;
+            }
+
+            // Updates for next iteration of the loop
+            picNumLXPred = picNumLXNoWrap;
+
+            if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum;
+            else                              pic_num = picNumLXNoWrap;
+
+            temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit);
+            if (temp_fs)
+            {
+                temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+                linked_list_reorder (lp, temp);
+            }
+        }
+        else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering
+        {
+            pic_num = list_reordering_num[i].long_term_pic_num;
+
+            temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit);
+            if (temp_fs)
+            {
+                temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+                linked_list_reorder (lp, temp);
+            }
+        }
+    }
+
+    linked_list_output (lp, PicList);
+
+    if (0 == list_num )
+    {
+        for (i=0; i<num_ref_idx_active; i++)
+        {
+            pInfo->slice_ref_list0[i]=(uint8_t)PicList[i];
+        }
+    }
+    else
+    {
+        for (i=0; i<num_ref_idx_active; i++)
+        {
+            pInfo->slice_ref_list1[i]=(uint8_t)PicList[i];
+        }
+    }
+
+
+    // Instead of updating the now reordered list here, just write it down...
+    // This way, we can continue to hold the initialised list in p_dpb->listX_0
+    // and therefore not need to update it every slice
+
+    //h264_dpb_write_list(list_num, PicList, num_ref_idx_active);
+
+    return num_ref_idx_active;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+void h264_dpb_RP_check_list (h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint8_t    *p_list = pInfo->slice_ref_list0;
+
+    //
+    // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away!
+    //
+
+    if ((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) {
+        pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+        pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+    }
+
+
+    //
+    // Repare Ref list if it damaged with RP recovery only
+    //
+    if ((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received)
+    {
+
+        int32_t idx, rp_found = 0;
+
+        if ( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) ||
+                ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) )
+        {
+            if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = pInfo->slice_ref_list0;
+            }
+            else
+            {
+                p_list = pInfo->dpb.listX_0;
+                //pInfo->sei_rp_received = 0;
+                //return;
+            }
+
+
+            for (idx = 0; idx < p_dpb->used_size; idx++) {
+                if (p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) {
+                    rp_found = 1;
+                    break;
+                }
+            }
+            if (rp_found) {
+#if 0
+                int32_t poc;
+
+                ///// Clear long-term ref list
+                for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+                {
+                    h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+                    h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+                }
+
+                ///// Clear short-term ref list
+                //while(p_dpb->used_size>1)
+                for (idx = 0; idx < p_dpb->used_size; idx++)
+                {
+                    int32_t idx_pos;
+                    //// find smallest non-output POC
+                    h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos);
+
+                    //// Remove all frames in previous GOP
+                    if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc))
+                    {
+                        // Remove from ref-list
+                        h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+                        h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+
+                        // Output from DPB
+                        //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                        //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0))
+                        {
+                            //int32_t existing;
+                            //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing);
+                            //p_dpb->last_output_poc = poc;
+                        }
+                        //h264_dpb_remove_frame_from_dpb(p_dpb, idx);		// Remove dpb.fs_dpb_idc[pos]
+
+                    }
+                }
+#endif
+
+                ///// Set the reference to last I frame
+                if ( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0]))
+                {
+                    /// Repaire the reference list now
+                    h264_dpb_unmark_for_reference(p_dpb, p_list[0]);
+                    h264_dpb_remove_ref_list(p_dpb, p_list[0]);
+                    p_list[0] = pInfo->last_I_frame_idc;
+                    if (pInfo->SliceHeader.structure != FRAME)
+                        p_list[1] = (pInfo->last_I_frame_idc ^ 0x20);
+                }
+            }
+        }
+
+        pInfo->sei_rp_received = 0;
+        pInfo->sei_b_state_ready = 1;
+
+    }
+
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_lists ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+void h264_dpb_reorder_lists(h264_Info * pInfo)
+{
+    int32_t currSliceType = pInfo->SliceHeader.slice_type;
+
+    if (currSliceType == h264_PtypeP )
+    {
+        /////////////////////////////////////////////// Reordering reference list for P slice
+        /// Forward reordering
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+    } else if (currSliceType == h264_PtypeB)
+    {
+        /////////////////////////////////////////////// Reordering reference list for B slice
+        /// Forward reordering
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+
+        /// Backward reordering
+        if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active;
+    }
+
+    //// Check if need recover reference list with previous recovery point
+    if (!pInfo->img.second_field)
+    {
+        h264_dpb_RP_check_list(pInfo);
+    }
+
+
+    return;
+}
+
+////////////////////////////////////////// DPB management //////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_dpb_get_non_output_frame_number ()
+//
+// get total non output frame number in the DPB.
+//
+static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo)
+{
+    int32_t idx;
+    int32_t number=0;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    for (idx = 0; idx < p_dpb->used_size; idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+        if (viddec_h264_get_is_output(p_dpb->active_fs) == 0)
+        {
+            (number)++;
+        }
+    }
+
+    return number;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB
+
+void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    int32_t used_for_reference;
+    int32_t is_direct_output;
+    int32_t second_field_stored = 0;
+    int32_t poc;
+    int32_t pos;
+    int32_t flag;
+    int32_t first_field_non_ref = 0;
+    int32_t idr_flag;
+
+    if (NonExisting) {
+        if (p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC)
+            return;
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+    } else {
+        if (p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC)
+            return;
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+    }
+
+    if (NonExisting == 0)
+    {
+        //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1;
+        pInfo->img.last_has_mmco_5       = 0;
+        pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag;
+
+        //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag);
+        used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0);
+
+        switch (viddec_h264_get_dec_structure(p_dpb->active_fs))
+        {
+        case(TOP_FIELD)   : {
+            p_dpb->active_fs->top_field.used_for_reference = used_for_reference;
+            viddec_h264_set_is_top_used(p_dpb->active_fs, 1);
+            //p_dpb->active_fs->crc_field_coded     = 1;
+        }
+        break;
+        case(BOTTOM_FIELD): {
+            p_dpb->active_fs->bottom_field.used_for_reference = used_for_reference << 1;
+            viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1);
+            //p_dpb->active_fs->crc_field_coded     = 1;
+        }
+        break;
+        default: {
+            p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0;
+            viddec_h264_set_is_frame_used(p_dpb->active_fs, 3);
+            //if(pInfo->img.MbaffFrameFlag) p_dpb->active_fs->crc_field_coded  = 1;
+
+        }
+        break;
+        }
+
+        //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image;
+        //if (freeze_assert)  sei_information.disp_frozen = 1;
+
+        idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag;
+        if (idr_flag) {
+            h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag);
+        } else {
+            // adaptive memory management
+            if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) {
+                h264_dpb_adaptive_memory_management(pInfo);
+            }
+        }
+        // Reset the active frame store - could have changed in mem management ftns