LibMIX vbp parser code re-structure

BZ: 131068

The LibMIX vbp parser codes are re-structured and legacy
codes are removded.

Change-Id: I8216a21f39f29bce7ac5f6aaa25e164806e8f012
Signed-off-by: wfeng6 <wei.feng@intel.com>
Signed-off-by: Gu, Wangyi <wangyi.gu@intel.com>
Reviewed-on: http://android.intel.com:8080/130377
Reviewed-by: Shi, PingX <pingx.shi@intel.com>
Tested-by: Shi, PingX <pingx.shi@intel.com>
Reviewed-by: cactus <cactus@intel.com>
Tested-by: cactus <cactus@intel.com>
diff --git a/mixvbp/Android.mk b/mixvbp/Android.mk
new file mode 100644
index 0000000..01ddde2
--- /dev/null
+++ b/mixvbp/Android.mk
@@ -0,0 +1,13 @@
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+MIXVBP_DIR := $(LOCAL_PATH)
+
+include $(MIXVBP_DIR)/vbp_manager/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/h264/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/mp4/Android.mk
+include $(MIXVBP_DIR)/vbp_plugin/vc1/Android.mk
+
+ifeq ($(USE_HW_VP8),true)
+include $(MIXVBP_DIR)/vbp_plugin/vp8/Android.mk
+endif
diff --git a/mixvbp/include/vbp_trace.h b/mixvbp/include/vbp_trace.h
new file mode 100755
index 0000000..fde232c
--- /dev/null
+++ b/mixvbp/include/vbp_trace.h
@@ -0,0 +1,66 @@
+/*
+ INTEL CONFIDENTIAL
+ Copyright 2009 Intel Corporation All Rights Reserved.
+ The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
+
+ No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
+ */
+
+
+#ifndef VBP_TRACE_H_
+#define VBP_TRACE_H_
+
+
+
+#define VBP_TRACE
+
+
+#ifdef VBP_TRACE /* if VBP_TRACE is defined*/
+
+#ifndef ANDROID
+
+#include <stdio.h>
+#include <stdarg.h>
+
+extern void vbp_trace_util(const char* cat, const char* fun, int line, const char* format, ...);
+#define VBP_TRACE_UTIL(cat, format, ...) \
+vbp_trace_util(cat, __FUNCTION__, __LINE__, format,  ##__VA_ARGS__)
+
+
+#define ETRACE(format, ...) VBP_TRACE_UTIL("ERROR:   ",  format, ##__VA_ARGS__)
+#define WTRACE(format, ...) VBP_TRACE_UTIL("WARNING: ",  format, ##__VA_ARGS__)
+#define ITRACE(format, ...) VBP_TRACE_UTIL("INFO:    ",  format, ##__VA_ARGS__)
+#define VTRACE(format, ...) VBP_TRACE_UTIL("VERBOSE: ",  format, ##__VA_ARGS__)
+
+
+#else
+
+// For Android OS
+
+#define LOG_NDEBUG 0
+
+#define LOG_TAG "MixVBP"
+
+#include <utils/Log.h>
+#define ETRACE(...) LOGE(__VA_ARGS__)
+#define WTRACE(...) LOGW(__VA_ARGS__)
+#define ITRACE(...) LOGI(__VA_ARGS__)
+#define VTRACE(...) LOGV(__VA_ARGS__)
+
+#endif
+
+
+#else /* if VBP_TRACE is not defined */
+
+#define ETRACE(format, ...)
+#define WTRACE(format, ...)
+#define ITRACE(format, ...)
+#define VTRACE(format, ...)
+
+
+#endif /* VBP_TRACE*/
+
+
+#endif /*VBP_TRACE_H_*/
+
+
diff --git a/mixvbp/include/viddec_debug.h b/mixvbp/include/viddec_debug.h
new file mode 100755
index 0000000..fcae102
--- /dev/null
+++ b/mixvbp/include/viddec_debug.h
@@ -0,0 +1,31 @@
+#ifndef VIDDEC_DEBUG_H
+#define VIDDEC_DEBUG_H
+
+#ifndef VBP
+
+#ifdef HOST_ONLY
+#include <stdio.h>
+#include <osal.h>
+#define DEB                        OS_PRINT
+#define FWTRACE                    OS_PRINT("trace:%s %d\n", __FUNCTION__, __LINE__ );
+//  #define DEB(format, args...)
+//  #define FWTRACE
+#define DEB_FNAME(format, args...) OS_PRINT("%s:  %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ ,  ## args )
+#define CDEB(a, format, args...)   if(a != 0) {DEB(format, ##args);}
+#else
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+#endif
+
+#else  // VBP is defined
+
+#define DEB(format, args...)
+#define FWTRACE
+#define CDEB(a, format, args...)
+#define DEB_FNAME(format, args...)
+
+#endif // end of VBP
+
+#endif
diff --git a/mixvbp/include/viddec_fw_common_defs.h b/mixvbp/include/viddec_fw_common_defs.h
new file mode 100644
index 0000000..2cc32b7
--- /dev/null
+++ b/mixvbp/include/viddec_fw_common_defs.h
@@ -0,0 +1,223 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_COMMON_DEFS_H
+#define VIDDEC_FW_COMMON_DEFS_H
+
+#define VIDDEC_FW_PARSER_IPC_HOST_INT    0x87654321
+#define EMITTER_WORKLOAD_ENTRIES    2048
+
+/* Maximum supported dependent views for H264 MVC. Based on spec this can be 1023 */
+#define MVC_MAX_SUPPORTED_VIEWS  1
+
+/* This enum defines priority level for opening a stream */
+enum viddec_stream_priority
+{
+    viddec_stream_priority_BACKGROUND, /* Lowest priority stream */
+    viddec_stream_priority_REALTIME,   /* Real time highest priority stream */
+    viddec_stream_priority_INVALID,
+};
+
+/* This enum defines supported flush types */
+enum viddec_stream_flushtype
+{
+    VIDDEC_STREAM_FLUSH_DISCARD, /* Reinitialise to start state */
+    VIDDEC_STREAM_FLUSH_PRESERVE, /* Reinitialise to start state  by preserving sequence info*/
+};
+
+enum viddec_stream_inband_flags
+{
+    VIDDEC_STREAM_DEFAULT_FLAG=0, /* Default value for flags */
+    VIDDEC_STREAM_EOS,          /* End of stream message */
+    VIDDEC_STREAM_DISCONTINUITY,  /* new segment which forces flush and preserve */
+};
+
+/* Message descriptor for Parser's Input and output queues. needs to be 8 byte aligned */
+typedef struct viddec_input_buffer
+{
+    unsigned int             flags; /* Flags for Inband messages like EOS, valid range defined in viddec_stream_inband_flags */
+    unsigned int             phys;/* DDR addr of where ES/WKLD is at. */
+    unsigned int             len;/* size of buffer at phys_addr */
+    unsigned int             id;/* A id for the buffer which is not used or modified by the FW. */
+#ifdef HOST_ONLY
+    unsigned char           *buf; /* virt pointer to buffer. This is a don't care for FW */
+#endif
+} ipc_msg_data;
+
+typedef ipc_msg_data viddec_input_buffer_t;
+typedef ipc_msg_data viddec_ipc_msg_data;
+
+/* Return types for interface functions */
+typedef enum
+{
+    VIDDEC_FW_SUCCESS, /* succesful with current operation */
+    VIDDEC_FW_NORESOURCES, /* No resources to execute the requested functionality */
+    VIDDEC_FW_FAILURE,    /* Failed for Uknown reason */
+    VIDDEC_FW_INVALID_PARAM, /* The parameters that were passed are Invalid */
+    VIDDEC_FW_PORT_FULL,     /* The operation failed since queue is full */
+    VIDDEC_FW_PORT_EMPTY,   /* The operation failed since queue is empty */
+    VIDDEC_FW_NEED_FREE_WKLD, /* The operation failed since a free wkld is not available */
+} viddec_fw_return_types_t;
+
+/* Defines for Interrupt mask and status */
+typedef enum
+{
+    VIDDEC_FW_WKLD_DATA_AVAIL=1, /* A processed workload is available */
+    VIDDEC_FW_INPUT_WATERMARK_REACHED=2,     /* The input path is below the set watermark for current stream */
+} viddec_fw_parser_int_status_t;
+
+/* Defines for attributes on stream, If not set explicitly will be default values */
+typedef enum
+{
+    VIDDEC_FW_INPUT_Q_WATERMARK, /* Define for setting Input queue watermarks */
+    VIDDEC_FW_STREAM_PRIORITY,    /* Define for setting stream priority */
+} viddec_fw_stream_attributes_t;
+
+typedef struct
+{
+    unsigned int input_q_space; /* Num of messages that can be written to input queue */
+    unsigned int output_q_data; /* Num of messages in output queue */
+    unsigned int workload_q_status; /* Number of free wklds available to parser */
+} viddec_fw_q_status_t;
+
+typedef struct
+{
+    unsigned int to_fw_q_space;     /* Num of messages that can be written to input queue */
+    unsigned int from_fw_q_data;    /* Num of messages in output queue */
+} viddec_fw_decoder_q_status_t;
+
+enum viddec_fw_decoder_int_status
+{
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_0       = (1<< 0), /* Decoder Stream 0 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_1       = (1<< 1), /* Decoder Stream 1 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_2       = (1<< 2), /* Decoder Stream 2 Requires Service */
+
+
+    VIDDEC_FW_DECODER_INT_STATUS_STREAM_HIGH    = (1<<30), /* Any Decoder Stream >= 30 Requires Service */
+    VIDDEC_FW_DECODER_INT_STATUS_AUTO_API       = (1<<31)  /* An Auto-API Function has completed */
+};
+
+/** Hardware Accelerated stream formats */
+typedef enum viddec_stream_format
+{
+    MFD_STREAM_FORMAT_MPEG=1,
+    MFD_STREAM_FORMAT_H264,
+    MFD_STREAM_FORMAT_VC1,
+    MFD_STREAM_FORMAT_MPEG42,
+
+    MFD_STREAM_FORMAT_MAX,   /* must be last  */
+    MFD_STREAM_FORMAT_INVALID
+} viddec_stream_format;
+
+/* Workload specific error codes */
+enum viddec_fw_workload_error_codes
+{
+    VIDDEC_FW_WORKLOAD_SUCCESS               = 0,
+    VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE      = (1 << 0),/* Parser/Decoder detected a non decodable error with this workload */
+    VIDDEC_FW_WORKLOAD_ERR_BUFFERS_OVERFLOW  = (1 << 1),/* Parser Detected more than 64 buffers between two start codes */
+    VIDDEC_FW_WORKLOAD_ERR_ITEMS_OVERFLOW    = (1 << 2),/* Parser Detected overflow of currently allocated workload memory */
+    VIDDEC_FW_WORKLOAD_ERR_FLUSHED_FRAME     = (1 << 3),/* This is impartial or empty frame which was flushed by Parser/Decoder */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM      = (1 << 4),/* This is impartial or empty frame from Parser/Decoder */
+    VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED       = (1 << 5),/* Parser Detected unsupported feature in the stream */
+    /* First 8 bits reserved for Non Decodable errors */
+    VIDDEC_FW_WORKLOAD_ERR_CONCEALED         = (1 << 9),/* The decoder concealed some errors in this frame */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE = (1 << 10),/* Deocder/parser detected at least one of the required reference frames is missing */
+    VIDDEC_FW_WORKLOAD_ERR_IN_REFERENCE      = (1 << 11),/* Deocder/parser detected at least one of the reference frames has errors in it */
+    VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD      = (1 << 12),/* Parser detected at least one of the fields are missing */
+    VIDDEC_FW_WORKLOAD_ERR_PARTIAL_SLICE     = (1 << 13),/* Deocder detected at least one of the fields are missing */
+    VIDDEC_FW_WORKLOAD_ERR_MACROBLOCK        = (1 << 14),/* Deocder detected macroblock errors */
+    VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO  = (1 << 16),/* Parser detected sequence information is missing */
+
+    VIDDEC_FW_WORKLOAD_ERR_TOPFIELD          = (1 << 17),/* Decoder/Parser detected  errors in "top field" or "frame"*/
+    VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD       = (1 << 18),/* Decoder/Parser detected  errors in "bottom field" or "frame" */
+    VIDDEC_FW_WORKLOAD_ERR_BITSTREAM_ERROR   = (1 << 19),/* Parser detected errors */
+
+};
+
+enum viddec_fw_mpeg2_error_codes
+{
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR       = (1 << 24),/* Parser detected corruption in sequence header. Will use the previous good sequence info, if found. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT       = (1 << 25),/* Parser detected corruption in seqeunce extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT  = (1 << 26),/* Parser detected corruption in sequence display extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR       = (1 << 27),/* Parser detected corruption in GOP header. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR       = (1 << 26),/* Parser detected corruption in picture header. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT   = (1 << 27),/* Parser detected corruption in picture coding extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT  = (1 << 28),/* Parser detected corruption in picture display extension. */
+    VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT      = (1 << 29),/* Parser detected corruption in quantization matrix extension. */
+};
+
+#ifdef VBP
+
+#ifndef NULL
+#define NULL (void*)0x0
+#endif
+
+#ifndef true
+#define true 1
+#define false 0
+#endif
+
+#ifndef __cplusplus
+#ifndef bool
+typedef int bool;
+#endif
+#endif
+
+#endif
+/* end of #ifdef VBP */
+
+#endif
diff --git a/mixvbp/include/viddec_fw_decoder_host.h b/mixvbp/include/viddec_fw_decoder_host.h
new file mode 100644
index 0000000..d902520
--- /dev/null
+++ b/mixvbp/include/viddec_fw_decoder_host.h
@@ -0,0 +1,242 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_DECODER_HOST_H
+#define VIDDEC_FW_DECODER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "viddec_fw_common_defs.h"
+
+    /** @weakgroup viddec Fw Decoder interface Functions */
+    /** @ingroup viddec_fw_decoder */
+    /*@{*/
+
+    /**
+       This function returns the size required for loading fw.
+       @retval  size : Required size.
+    */
+    uint32_t viddec_fw_decoder_query_fwsize(void);
+
+    /**
+       This function loads Decoder Firmware and initialises necessary state information.
+       @param[in] phys                : Physical address on where firmware should be loaded.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully loaded firmware.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to allocate resources for Loading firmware.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_decoder_loadfw(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns required size for global memory for all supported decoders. This is a synchronous message to FW.
+       @param[out] size               : returns the size required.
+       @retval VIDDEC_FW_SUCCESS      : Successfuly got required information from FW.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_query_fwsize_scratchmem(uint32_t *size);
+
+    /**
+       This function sets global memory for the firmware to use.This is a synchronous message to FW.
+       @param[in] phys                : Physical address on where global memory starts.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully setup global memory.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_set_fw_scratchmem(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns the size required opening a stream. This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want information about.
+       @param[out] size               : Size of memory required for opening a stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfuly talked to FW and got required size.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+    */
+    uint32_t viddec_fw_decoder_query_streamsize(uint32_t codec_type, uint32_t *size);
+
+    /**
+       This function opens requested codec.This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want to open.
+       @param[in] phys                : Physical address of allocated memory for this codec.
+       @param[in] prority             : Priority of stream. 1 for realtime and 0 for background.
+       @param[out] strm_handle        : Handle of the opened stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Opened the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to Open a stream.
+    */
+    uint32_t viddec_fw_decoder_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+
+    /**
+       This function closes stream.This a synchronous message to FW.
+       @param[in] strm_handle        : Handle of the stream to close.
+    */
+    void viddec_fw_decoder_closestream(uint32_t strm_handle);
+
+    /**
+       This function allows to get current status of the decoder workload queues. If the current stream is active we return
+       number of input messages that can be written to input queue and the number of messages in output queue of the stream.
+
+       Normally this is called when Host receives an interrupt from decoder, In which case before releasing the INT
+       Host will try its best to keep the FW busy. Normally when a interrupt is received it means at least one workload is
+       written into output queue of a stream.
+       @param[in] strm_handle         : The handle of stream that we want to get status of queues.
+       @param[out] status             : The status of each queue gets updated in here.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Got the status information.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+    */
+    uint32_t viddec_fw_decoder_get_queue_status(uint32_t strm_handle, viddec_fw_decoder_q_status_t *status);
+
+    /**
+       This function flushes the current stream. This is a synchronous message to FW.
+       Before calling this function the host has to make sure the output queue of the firmware
+       is empty. After this function is executed the FW will read all entries in input
+       wkld buffer queue into output queue. After this operation the host has to read all entries
+       in output queue again to finish the flush operation.
+       @param[in] flush_type          : Type of flush we want to perform.ex:flush and discard.
+       @param[in]  strm_handle        : Handle of the stream we want to flush.
+       @retval VIDDEC_FW_SUCCESS      : Successfully flushed the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to flush a stream.
+    */
+    uint32_t viddec_fw_decoder_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+    /**
+       This function sends an input workload buffer. The host should provide required frame buffers in this workload before
+       sending it to fw.
+       @param[in] strm_handle         : The handle of stream that we want to send workload buffer to.
+       @param[in] cur_wkld            : The workload buffer we want to send.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Port to fw full unsuccesful in sending message.
+    */
+    uint32_t viddec_fw_decoder_send(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+    /**
+       This function gets the decoded workload from fw.
+       @param[in] strm_handle         : The handle of stream that we want to read workload from.
+       @param[out] cur_wkld           : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_EMPTY   : Workload port is empty,unsuccesful in reading wkld.
+    */
+    uint32_t viddec_fw_decoder_recv(uint32_t strm_handle, ipc_msg_data *cur_wkld);
+
+    /**
+       This function unloads Decoder Firmware and free's the resources allocated in Load fw.
+       If this function is called before load fw it will crash with a segmentation fault.
+    */
+    void viddec_fw_decoder_deinit(void);
+
+    /**
+       This function gets the major and minor revison numbers of the loaded firmware.
+       @param[out] major              : The major revision number.
+       @param[out] minor              : The minor revision number.
+       @param[out] build              : The Internal Build number.
+    */
+    void viddec_fw_decoder_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+    /**
+       This function returns the interrupt status of all streams which need to be processed. A value of zero
+       means no active streams which generated this interrupt.
+    */
+    uint32_t viddec_fw_decoder_active_pending_interrupts(void);
+
+    /**
+       This function clears the interrupts for all active streams represented by status input parameter.
+       The status should always be a value that was returned by viddec_fw_decoder_active_pending_interrupts().
+       @param[in] status              : The status value that was returned by viddec_fw_decoder_active_pending_interrupts().
+    */
+    void viddec_fw_decoder_clear_all_pending_interrupts(uint32_t status);
+
+    /**
+       This function enables/disables interrupt for the stream specified.
+       @param[in] strm_handle         : The handle of stream that we want enable or disable interrupts for.
+       @param[in] enable              : Boolean value if ==0 means disable Interrupts else enable.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid stream handle was passed.
+    */
+    uint32_t viddec_fw_decoder_set_stream_interrupt_mask(uint32_t stream_handle, uint32_t enable);
+
+    /**
+       This function returns which stream interrupted in the past based on status, which is a snapshot of
+       interrupt status that was cleared in the past. The host has to call clear with status information
+       before calling this function again with status value. The Host should do this operation until this function
+       returns 0, which means all the streams that generated interrupt have been processed.
+       @param[out]strm_handle         : The handle of a stream that generated interrupt.
+       @param[in] status              : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+       @retval 1                      : A valid stream handle was found.
+       @retval 0                      : No more streams from the status which caused interrupt.
+    */
+    uint32_t viddec_fw_decoder_get_next_stream_from_interrupt_status(uint32_t status, uint32_t *stream_handle);
+
+    /**
+       This function clears the stream_handle from the status snapshot that we got from viddec_fw_decoder_active_pending_interrupts(),
+       This should be called after host performs all necessary actions for the stream.
+       @param[in] strm_handle         : The handle of a stream that we want to clear to indicate we handled it.
+       @param[in] status              : Snapshot of Interrupt status which was returned by viddec_fw_decoder_active_pending_interrupts().
+       @retval 1                      : Operation was sucessful.
+       @retval 0                      : Invalid stream handle was passed.
+    */
+    uint32_t viddec_fw_decoder_clear_stream_from_interrupt_status(uint32_t *status, uint32_t stream_handle);
+
+    /*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_DECODER_HOST_H
diff --git a/mixvbp/include/viddec_fw_frame_attr.h b/mixvbp/include/viddec_fw_frame_attr.h
new file mode 100644
index 0000000..4f4b479
--- /dev/null
+++ b/mixvbp/include/viddec_fw_frame_attr.h
@@ -0,0 +1,294 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_FRAME_ATTR_H
+#define VIDDEC_FW_FRAME_ATTR_H
+
+#include "viddec_fw_item_types.h"
+
+#define VIDDEC_PANSCAN_MAX_OFFSETS 4
+#define VIDDEC_MAX_CPB_CNT 32
+
+/**
+This enumeration lists all the frame types defined by the MPEG, VC1 and H264 specifications.
+Frame types applicable to a single codec are specified in the comments.
+*/
+typedef enum
+{
+    VIDDEC_FRAME_TYPE_INVALID=0,   /** Unknown type - default value */
+    VIDDEC_FRAME_TYPE_IDR=0x1,       /** IDR frame - h264 only */
+    VIDDEC_FRAME_TYPE_I=0x2,         /** I frame */
+    VIDDEC_FRAME_TYPE_P=0x3,         /** P frame */
+    VIDDEC_FRAME_TYPE_B=0x4,         /** B frame */
+    VIDDEC_FRAME_TYPE_BI=0x5,        /** BI frame - Intracoded B frame - vc1 only */
+    VIDDEC_FRAME_TYPE_SKIP=0x6,      /** Skipped frame - vc1 only */
+    VIDDEC_FRAME_TYPE_D=0x7,         /** D frame - mpeg1 only */
+    VIDDEC_FRAME_TYPE_S=0x8,         /** SVOP frame - mpeg4 only - sprite encoded frame - treat as P */
+    VIDDEC_FRAME_TYPE_MAX,
+} viddec_frame_type_t;
+
+/**
+This structure contains the content size info extracted from the stream.
+*/
+typedef struct viddec_rect_size
+{
+    unsigned int width;
+    unsigned int height;
+} viddec_rect_size_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_mpeg2_frame_center_offset
+{
+    int horz;
+    int vert;
+} viddec_mpeg2_frame_center_offset_t;
+
+/**
+This structure contains the MPEG2 specific frame attributes.
+*/
+typedef struct viddec_mpeg2_frame_attributes
+{
+    /**
+    10 bit unsigned integer corresponding to the display order of each coded picture
+    in the stream (or gop if gop header is present).
+    Refer to "temporal_reference" of the picture header in ITU-T H.262 Specification.
+    */
+    unsigned int temporal_ref;
+
+    /**
+    Pan/Scan rectangle info
+    Refer to the picture display extension in ITU-T H.262 Specification.
+    */
+    viddec_mpeg2_frame_center_offset_t frame_center_offset[VIDDEC_PANSCAN_MAX_OFFSETS];
+    unsigned int number_of_frame_center_offsets;
+
+    /**
+    Top-Field first flag
+    Refer to "top_field_first" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int top_field_first;
+
+    /**
+    Progressive frame flag - Indicates if current frame is progressive or not.
+    Refer to "progressive_frame" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int progressive_frame;
+
+    /**
+    Frame/field polarity for each coded picture.
+    Refer to Table 6-14 in ITU-T H.262 Specification.
+    */
+    unsigned int picture_struct;
+
+    /**
+    Repeat field/frame flag.
+    Refer to "repeat_first_field" of the picture coding extension in ITU-T H.262 Specification.
+    */
+    unsigned int repeat_first_field;
+
+
+} viddec_mpeg2_frame_attributes_t;
+
+/**
+This structure contains MPEG2 specific pan scan offsets extracted from the stream.
+*/
+typedef struct viddec_vc1_pan_scan_window
+{
+    unsigned int hoffset;
+    unsigned int voffset;
+    unsigned int width;
+    unsigned int height;
+} viddec_vc1_pan_scan_window_t;
+
+/**
+This structure contains the VC1 specific frame attributes.
+*/
+typedef struct viddec_vc1_frame_attributes
+{
+    /**
+    Temporal Reference of frame/field.
+    Refer to "TFCNTR" in the picture layer of the SMPTE VC1 Specification.
+    */
+    unsigned int tfcntr;
+
+    /**
+    Frame/field repeat information in the bitstream.
+    Refer to "RPTFRM", "TFF", "BFF" in the picture layer
+    of the SMPTE VC1 Specification.
+    */
+    unsigned int rptfrm;
+    unsigned int tff;
+    unsigned int rff;
+
+    /**
+    Pan-scan information in the bitstream.
+    Refer to "PANSCAN_FLAG" in the entrypoint layer, "PS_PRESENT", "PS_HOFFSET", "PS_VOFFSET",
+    "PS_WIDTH" and "PS_HEIGHT" in the picture layer of the SMPTE VC1 Specification.
+    */
+    unsigned int panscan_flag;
+    unsigned int ps_present;
+    unsigned int num_of_pan_scan_windows;
+    viddec_vc1_pan_scan_window_t pan_scan_window[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+} viddec_vc1_frame_attributes_t;
+
+/**
+This structure contains the H264 specific frame attributes.
+*/
+typedef struct viddec_h264_frame_attributes
+{
+    /**
+       used_for_reference : 1 means this frame is used as ref frame of others. 0 means no any frame ref to this frame
+    */
+    unsigned int used_for_reference;
+    /**
+       Picture Order Count for the current frame/field.
+       This value is computed using information from the bitstream.
+       Refer to Section 8.2.1, function 8-1 of the ITU-T H.264 Specification.
+       These fileds will be supported in future
+    */
+    int top_field_poc;
+    int bottom_field_poc;
+
+    /**
+       Display size, which is cropped from content size.
+       Currently, the cont_size is cropped, so this paramter is redundant, but in future, cont_size may be changed
+    */
+    viddec_rect_size_t cropped_size;
+
+    /**
+       top_field_first: 0 means bottom_field_POC is smaller than top_field_POC, else 1
+    */
+    unsigned int top_field_first;
+
+    /**
+       field_frame_flag: 0 means all slice of this frame are frame-base encoded, else 1
+    */
+    unsigned int field_pic_flag;
+
+    /**
+       This data type holds view specific information of current frame.
+       The following information is packed into this data type:
+         view_id(0-9 bits):        Assigned 10 bit value in the encoded stream.
+         priority_id(10-15bits):   Assigned 6 bit priority id.
+         is_base_view(16th bit):   Flag on true indicates current frame belongs to base view, else dependent view.
+     */
+#define viddec_fw_h264_mvc_get_view_id(x)              viddec_fw_bitfields_extract( (x)->view_spcific_info, 0, 0x3FF)
+#define viddec_fw_h264_mvc_set_view_id(x, val)         viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 0, 0x3FF)
+#define viddec_fw_h264_mvc_get_priority_id(x)          viddec_fw_bitfields_extract( (x)->view_spcific_info, 10, 0x3F)
+#define viddec_fw_h264_mvc_set_priority_id(x, val)     viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 10, 0x3F)
+#define viddec_fw_h264_mvc_get_is_base_view(x)         viddec_fw_bitfields_extract( (x)->view_spcific_info, 16, 0x1)
+#define viddec_fw_h264_mvc_set_is_base_view(x, val)    viddec_fw_bitfields_insert( (x)->view_spcific_info, val, 16, 0x1)
+    unsigned int view_spcific_info;
+} viddec_h264_frame_attributes_t;
+
+/**
+This structure contains the MPEG4 specific frame attributes.
+*/
+typedef struct viddec_mpeg4_frame_attributes
+{
+    /**
+    Top-Field first flag
+    Refer to "top_field_first" of the Video Object Plane of the MPEG4 Spec.
+    */
+    unsigned int top_field_first;
+
+} viddec_mpeg4_frame_attributes_t;
+
+/**
+This structure groups all the frame attributes that are exported by the firmware.
+The frame attributes are split into attributes that are common to all codecs and
+that are specific to codec type.
+*/
+typedef struct viddec_frame_attributes
+{
+    /**
+    Content size specified in the stream.
+    For MPEG2, refer to "horizontal_size_value, vertical_size_value" of the sequence header and
+    "horizontal_size_extension, vertical_size_extension" of the sequence extension in ITU-T H.262 Specification.
+    For H264, refer to "pic_width_in_mbs_minus1" and "pic_height_in_map_units_minus1" of the
+    sequence parameter set in ITU-T H.264 Specification.
+    For VC1, refer to "MAX_CODED_WIDTH" and "MAX_CODED_HEIGHT" in the sequence layer,
+    "CODED_SIZE_FLAG", "CODED_WIDTH" and "CODED_HEIGHT" in the entrypoint layer of the SMPTE VC1 Specification.
+    */
+    viddec_rect_size_t cont_size;
+
+    /**
+    Type of frame populated in the workload.
+    frame_type contains the frame type for progressive frame and the field type for the top field for interlaced frames.
+    bottom_field_type contains the field type for the bottom field for interlaced frames.
+    For MPEG2, refer to "picture_coding_type" in picture header (Table 6-12) in ITU-T H.262 Specification.
+    For H264, refer to "slice_type" in slice header (Table 7-6) in ITU-T H.264 Specification.
+    For VC1, refer to "PTYPE" and FPTYPE in the picture layer (Tables 33, 34, 35, 105) in SMPTE VC1 Specification.
+    */
+    viddec_frame_type_t frame_type;
+    viddec_frame_type_t bottom_field_type;
+
+    /** Codec specific attributes */
+    union
+    {
+        viddec_mpeg2_frame_attributes_t  mpeg2;
+        viddec_vc1_frame_attributes_t    vc1;
+        viddec_h264_frame_attributes_t   h264;
+        viddec_mpeg4_frame_attributes_t  mpeg4;
+    };
+
+} viddec_frame_attributes_t;
+
+#endif /* VIDDEC_FRAME_ATTR_H */
diff --git a/mixvbp/include/viddec_fw_item_types.h b/mixvbp/include/viddec_fw_item_types.h
new file mode 100644
index 0000000..472dff2
--- /dev/null
+++ b/mixvbp/include/viddec_fw_item_types.h
@@ -0,0 +1,784 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_ITEM_TYPES_H
+#define VIDDEC_FW_ITEM_TYPES_H
+
+
+/* The following macros are defined to pack data into 32 bit words.
+   mask:    A 32 bit value of N 1 bits starting from lsb where N represents the length of data we are packing.
+   start:   Bit start position of data we want.
+          ex:  If we want to pack Height(16bits), width(16bits) where width is from (1:16) and height is from (17:32), these are
+          the start and mask values for width and height.
+          width: start = 0 mask=0xFFFF
+          Height:start=  16 mask=0xFFFF
+
+   extract: will extract data from x_32 of unsigned integer type from bit start offset(0 to 31) of length based on mask and returns in
+            unsigned integer type.
+   insert:  Will insert N bits from val_32 ,where N is length based on mask, into x_32 from bit offset based on start. val_32 is expected to
+           be a unsigned int of N bits starting with lsb.
+*/
+
+#define viddec_fw_bitfields_extract(x_32, start, mask)     (((x_32) >> (start)) & (mask) )
+#define viddec_fw_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+
+/* Workload items type. Each item here represents data that Parser detected ex:slice data which
+ is used either by host or decoder.*/
+typedef enum workload_item_type
+{
+    VIDDEC_WORKLOAD_INVALID                               =0x0,/* Unknown type */
+    VIDDEC_WORKLOAD_PIXEL_ES                              =0x100,/* Slice data tag */
+    VIDDEC_WORKLOAD_TAG                                   =0x200,/* Frame association tag */
+    VIDDEC_WORKLOAD_USERDATA                              =0x300,/* user data tag */
+
+    VIDDEC_WORKLOAD_IBUF_DONE                             =0x500,/* Es buffer completely used tag */
+    VIDDEC_WORKLOAD_IBUF_CONTINUED                        =0x600,/* Es buffer partially used tag */
+    VIDDEC_WORKLOAD_IBUF_DISCONTINUITY                    =0x700,/* Discontinuity tag on first workload after discontinuity */
+    VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER               =0x800, /* Reorder frames in DPB tag */
+    VIDDEC_WORKLOAD_IBUF_EOS                              =0x900,/* EOS tag on last workload used for current stream */
+    VIDDEC_WORKLOAD_SEQUENCE_INFO                         =0xa00,/* MPEG2 Seq Hdr, H264 SPS, VC1 SeqLayer */
+    VIDDEC_WORKLOAD_DISPLAY_INFO                          =0xb00,/* MPEG2 Seq Disp Ext, H264 VUI */
+    VIDDEC_WORKLOAD_GOP_INFO                              =0xc00,/* MPEG2 GOP, VC1 Entrypoint */
+    VIDDEC_WORKLOAD_SEQ_USER_DATA                         =0xd00,/* MPEG2, VC1 Sequence Level User data */
+    VIDDEC_WORKLOAD_GOP_USER_DATA                         =0xe00,/* MPEG2, VC1 Gop Level User data */
+    VIDDEC_WORKLOAD_FRM_USER_DATA                         =0xf00,/* MPEG2 Picture User data, VC1 Frame User data */
+
+    VIDDEC_WORKLOAD_FLD_USER_DATA                         =0x1000,/* MPEG2, VC1 Field User data */
+    VIDDEC_WORKLOAD_SLC_USER_DATA                         =0x1100,/* VC1 Slice User data */
+    VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA                  =0x1200,/* MPEG4 Visual Object User data */
+    VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C                =0x1200,/* VC1 Only */
+    VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA                   =0x1300,/* MPEG4 Video Object Layer User data */
+    VIDDEC_WORKLOAD_H264_CROPPING                         =0x1400,/* H264 only */
+    VIDDEC_WORKLOAD_H264_PAN_SCAN                         =0x1500,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_PIC_TIMING                        =0x1600,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT                     =0x1700,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED              =0x1800,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED            =0x1900,/* H264 only */
+    VIDDEC_WORKLOAD_SEI_RECOVERY_POINT                    =0x1a00,/* H264 only */
+    VIDDEC_WORKLOAD_MPEG2_SEQ_EXT                         =0x1b00,/* MPEG2 Only - Sequence Extension */
+    VIDDEC_WORKLOAD_H264_MVC_SPS_VIEW_IDS                 =0x1c00,/* H264 only */
+    VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ                  =0x1d00,/* MPEG4 Only - Visual Sequence */
+    VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ                       =0x1e00,/* MPEG4 Only - Video Object Layer */
+    VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ                   =0x1f00,/* MPEG4 Only - Group of Video Object Planes */
+
+    VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT               =0x2000,/* MPEG4 Only - Video Plane with Short Header */
+    VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO                  =0x2100,/* H264 only */
+
+    VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0                    =0x10000,/* required reference frames tag,last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0                   =0x20000,/* release frames tag, last eight bits indicate index in dpb*/
+    VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0                   =0x30000,/* Display order in DPB tag, for H264 */
+    VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0                   =0x40000,/* Release frames but not display, for H264 */
+    VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0                   =0x50000,/* Release list while EOS, last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0                   =0x60000,/* Display list while EOS, last eight bits indicate index in dpb */
+    VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0                    =0x70000,/* required for H264 as it needs whole DPB for each frame */
+    VIDDEC_WORKLOAD_H264_REFR_LIST_0                      =0x80000,/* ref list 0 for H264 */
+    VIDDEC_WORKLOAD_H264_REFR_LIST_1                      =0x90000,/* ref list 1 for H264 */
+    VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY                    =0xa0000,/* eos items begin after this */
+
+    VIDDEC_WORKLOAD_DECODER_SPECIFIC                      =0x100000,/* pvt info for decoder tags */
+    VIDDEC_WORKLOAD_MAX,
+} workload_item_type;
+
+struct h264_witem_sps_mvc_id
+{
+    /*
+      0-9:    num_views_minus1
+      10-19:  start index of views in current item.
+      20-23:  Number of valid items.
+    */
+#define viddec_fw_h264_sps_mvc_id_get_num_views_minus1(x)         viddec_fw_bitfields_extract( (x)->num_views, 0, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_set_num_views_minus1(x, val)    viddec_fw_bitfields_insert( (x)->num_views, val, 0, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_get_cur_start_index(x)          viddec_fw_bitfields_extract( (x)->num_views, 10, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_set_cur_start_index(x, val)     viddec_fw_bitfields_insert( (x)->num_views, val, 10, 0x3FF)
+#define viddec_fw_h264_sps_mvc_id_get_num_cur_valid_items(x)      viddec_fw_bitfields_extract( (x)->num_views, 20, 0x7)
+#define viddec_fw_h264_sps_mvc_id_set_num_cur_valid_items(x, val) viddec_fw_bitfields_insert( (x)->num_views, val, 20, 0x7)
+    unsigned int num_views;
+
+    /* We pack six id's into two integers.Each packed_view(integer) contains three 10 bit ids at 0-9, 10-19, 20-29
+       These values can be extracted/set using viddec_fw_h264_sps_mvc_id_get_data_frm_index()
+       and viddec_fw_h264_sps_mvc_id_set_data_frm_index() functions.
+    */
+#define viddec_fw_h264_sps_mvc_id_max_packed_ids        6 /* Max number of packed ids in a workload item */
+    unsigned int packed_view[2];
+};
+
+/* This function extracts a 10 bit view id of index( <6) that was packed into h264_witem_sps_mvc_id structure */
+static inline unsigned int viddec_fw_h264_sps_mvc_id_get_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index)
+{
+    unsigned int start=0, *word;
+
+    start = ((index > 2) ?(index - 3) : index) *10;
+    word = &(data->packed_view[(index > 2) ? 1:0]);
+    return viddec_fw_bitfields_extract(*word, start, 0x3FF);
+}
+
+/* This function packs  a 10 bit view id(val) at index( <6) in h264_witem_sps_mvc_id structure */
+static inline void viddec_fw_h264_sps_mvc_id_set_data_frm_index(struct h264_witem_sps_mvc_id *data, unsigned int index, unsigned int val)
+{
+    unsigned int start=0, *word;
+
+    start = ((index > 2) ?(index - 3) : index) *10;
+    word = &(data->packed_view[(index > 2) ? 1:0]);
+    viddec_fw_bitfields_insert(*word, val, start, 0x3FF);
+}
+
+/* 16-byte workload */
+typedef struct viddec_workload_item
+{
+    enum workload_item_type vwi_type;
+    union
+    {
+        struct
+        {
+            unsigned int    es_phys_addr;
+            unsigned int    es_phys_len;
+            unsigned int    es_flags;
+        } es;
+        struct
+        {
+            unsigned int    tag_phys_addr;
+            unsigned int    tag_phys_len;
+            unsigned int    tag_value;
+        } tag;
+        struct
+        {
+            unsigned int    data_offset;
+            unsigned int    data_payload[2];
+        } data;
+        struct
+        {
+            signed int      reference_id;     /* Assigned by parser */
+            unsigned int    luma_phys_addr;   /* assigned by host, for DM */
+            unsigned int    chroma_phys_addr; /* assigned by host, for DM */
+        } ref_frame;
+        struct   /* when vwi_type == VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER */
+        {
+            signed int      ref_table_offset;    /* Index of first "reordered" */
+            /* index from Current[] for Next[offset+0], Ref[offset+1], Ref[offset+2], Ref[offset+3] */
+            unsigned int    ref_reorder_00010203;
+            /* index from Current[] for Next[offset+4], Ref[offset+5], Ref[offset+6], Ref[offset+7] */
+            unsigned int    ref_reorder_04050607;
+        } ref_reorder;
+        struct
+        {
+            /* we pack a maximum of 11 bytes of user data and 1 byte for size */
+            /* TODO: we can pack 12 bytes and use bottom 8 bits of type to indicate size */
+#define viddec_fw_get_user_data_size(x)  ((x)->user_data.size)
+#define viddec_fw_get_user_data(x)   (unsigned char *)&((x)->user_data.data_payload[0])
+            unsigned char size;
+            unsigned char data_payload[11];
+            /*
+              ITEM TYPES WHICH use this:
+              VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED, VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED,
+              VIDDEC_WORKLOAD_SEQ_USER_DATA, VIDDEC_WORKLOAD_GOP_USER_DATA,
+              VIDDEC_WORKLOAD_FRM_USER_DATA, VIDDEC_WORKLOAD_FLD_USER_DATA, VIDDEC_WORKLOAD_SLC_USER_DATA,
+            */
+        } user_data;
+        struct
+        {
+            // Sequence Header Item I (From LSB):
+            //    - horizontal_size_value    - 12 bits
+            //    - vertical_size_value      - 12 bits
+            //    - aspect_ratio_information - 4 bits
+            //    - frame_rate_code          - 4 bits
+#define viddec_fw_mp2_sh_get_horizontal_size_value(x)    viddec_fw_bitfields_extract( (x)->seq_hdr_item_1,  0, 0xFFF)
+#define viddec_fw_mp2_sh_get_vertical_size_value(x)      viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 12, 0xFFF)
+#define viddec_fw_mp2_sh_get_aspect_ratio_information(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 24, 0xF)
+#define viddec_fw_mp2_sh_get_frame_rate_code(x)          viddec_fw_bitfields_extract( (x)->seq_hdr_item_1, 28, 0xF)
+#define viddec_fw_mp2_sh_set_horizontal_size_value(x, val)    viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val,  0, 0xFFF)
+#define viddec_fw_mp2_sh_set_vertical_size_value(x, val)      viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 12, 0xFFF)
+#define viddec_fw_mp2_sh_set_aspect_ratio_information(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 24, 0xF)
+#define viddec_fw_mp2_sh_set_frame_rate_code(x, val)          viddec_fw_bitfields_insert ( (x)->seq_hdr_item_1, val, 28, 0xF)
+            unsigned int seq_hdr_item_1;
+
+            // Sequence Header Item II (From LSB):
+            //    - bit_rate_value        - 18 bits
+            //    - vbv_buffer_size_value - 10 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_sh_get_bit_rate_value(x)        viddec_fw_bitfields_extract( (x)->seq_hdr_item_2,  0, 0x3FFFF)
+#define viddec_fw_mp2_sh_get_vbv_buffer_size_value(x) viddec_fw_bitfields_extract( (x)->seq_hdr_item_2, 18, 0x3FF)
+#define viddec_fw_mp2_sh_set_bit_rate_value(x, val)        viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val,  0, 0x3FFFF)
+#define viddec_fw_mp2_sh_set_vbv_buffer_size_value(x, val) viddec_fw_bitfields_insert ( (x)->seq_hdr_item_2, val, 18, 0x3FF)
+            unsigned int seq_hdr_item_2;
+
+            unsigned int pad;
+        } mp2_sh; // mp2 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+        struct
+        {
+            // Sequence Extension Item I (From LSB):
+            //    - profile_and_level_indication - 8 bits
+            //    - progressive_sequence         - 1 bit
+            //    - chroma_format                - 2 bits
+            //    - horizontal_size_extension    - 2 bits
+            //    - vertical_size_extension      - 2 bits
+            //    - bit_rate_extension           - 12 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_se_get_profile_and_level_indication(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  0, 0xFF)
+#define viddec_fw_mp2_se_get_progressive_sequence(x)         viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  8, 0x1)
+#define viddec_fw_mp2_se_get_chroma_format(x)                viddec_fw_bitfields_extract( (x)->seq_ext_item_1,  9, 0x3)
+#define viddec_fw_mp2_se_get_horizontal_size_extension(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 11, 0x3)
+#define viddec_fw_mp2_se_get_vertical_size_extension(x)      viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 13, 0x3)
+#define viddec_fw_mp2_se_get_bit_rate_extension(x)           viddec_fw_bitfields_extract( (x)->seq_ext_item_1, 15, 0xFFF)
+#define viddec_fw_mp2_se_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  0, 0xFF)
+#define viddec_fw_mp2_se_set_progressive_sequence(x, val)         viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  8, 0x1)
+#define viddec_fw_mp2_se_set_chroma_format(x, val)                viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val,  9, 0x3)
+#define viddec_fw_mp2_se_set_horizontal_size_extension(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 11, 0x3)
+#define viddec_fw_mp2_se_set_vertical_size_extension(x, val)      viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 13, 0x3)
+#define viddec_fw_mp2_se_set_bit_rate_extension(x, val)           viddec_fw_bitfields_insert ( (x)->seq_ext_item_1, val, 15, 0xFFF)
+            unsigned int seq_ext_item_1;
+
+            // Sequence Extension Item II (From LSB):
+            //    - vbv_buffer_size_extension - 8 bits
+            //    - frame_rate_extension_n    - 2 bits
+            //    - frame_rate_extension_d    - 5 bits
+            //    - remaining pad bits
+#define viddec_fw_mp2_se_get_vbv_buffer_size_extension(x) viddec_fw_bitfields_extract( (x)->seq_ext_item_2,  0, 0xFF)
+#define viddec_fw_mp2_se_get_frame_rate_extension_n(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_2,  8, 0x3)
+#define viddec_fw_mp2_se_get_frame_rate_extension_d(x)    viddec_fw_bitfields_extract( (x)->seq_ext_item_2, 10, 0x1F)
+#define viddec_fw_mp2_se_set_vbv_buffer_size_extension(x, val) viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val,  0, 0xFF)
+#define viddec_fw_mp2_se_set_frame_rate_extension_n(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val,  8, 0x3)
+#define viddec_fw_mp2_se_set_frame_rate_extension_d(x, val)    viddec_fw_bitfields_insert ( (x)->seq_ext_item_2, val, 10, 0x1F)
+            unsigned int seq_ext_item_2;
+
+            unsigned int pad;
+        } mp2_se; // mp2 item of type VIDDEC_WORKLOAD_MPEG2_SEQ_EXT
+        struct
+        {
+            // Sequence Display Extension Item I (From LSB):
+            //   - display_horizontal_size - 14 bits
+            //   - display_vertical_size   - 14 bits
+            //   - video_format            - 3 bits
+            //   - color_description       - 1 bit
+#define viddec_fw_mp2_sde_get_display_horizontal_size(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1,  0, 0x3FFF)
+#define viddec_fw_mp2_sde_get_display_vertical_size(x)   viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 14, 0x3FFF)
+#define viddec_fw_mp2_sde_get_video_format(x)            viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 28, 0x7)
+#define viddec_fw_mp2_sde_get_color_description(x)       viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_1, 31, 0x1)
+#define viddec_fw_mp2_sde_set_display_horizontal_size(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val,  0, 0x3FFF)
+#define viddec_fw_mp2_sde_set_display_vertical_size(x, val)   viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 14, 0x3FFF)
+#define viddec_fw_mp2_sde_set_video_format(x, val)            viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 28, 0x7)
+#define viddec_fw_mp2_sde_set_color_description(x, val)       viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_1, val, 31, 0x1)
+            unsigned int seq_disp_ext_item_1;
+
+            // Sequence Display Extension II (From LSB):
+            //   - color_primaries - 8 bits
+            //   - transfer_characteristics - 8 bits
+            //   - remaining pad bits
+#define viddec_fw_mp2_sde_get_color_primaries(x)          viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2,  0, 0xFF)
+#define viddec_fw_mp2_sde_get_transfer_characteristics(x) viddec_fw_bitfields_extract( (x)->seq_disp_ext_item_2,  8, 0xFF)
+#define viddec_fw_mp2_sde_set_color_primaries(x, val)          viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val,  0, 0xFF)
+#define viddec_fw_mp2_sde_set_transfer_characteristics(x, val) viddec_fw_bitfields_insert ( (x)->seq_disp_ext_item_2, val,  8, 0xFF)
+            unsigned int seq_disp_ext_item_2;
+
+            unsigned int pad;
+        } mp2_sde; // mp2 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+            // Group of Pictures Header Item I (From LSB):
+            //   - closed_gop - 1 bit
+            //   - broken_link - 1 bit
+            //   - remaining pad bits
+#define viddec_fw_mp2_gop_get_closed_gop(x)  viddec_fw_bitfields_extract( (x)->gop_hdr_item_1,  0, 0x1)
+#define viddec_fw_mp2_gop_get_broken_link(x) viddec_fw_bitfields_extract( (x)->gop_hdr_item_1,  1, 0x1)
+#define viddec_fw_mp2_gop_set_closed_gop(x, val)  viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val,  0, 0x1)
+#define viddec_fw_mp2_gop_set_broken_link(x, val) viddec_fw_bitfields_insert ( (x)->gop_hdr_item_1, val,  1, 0x1)
+            unsigned int gop_hdr_item_1;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp2_gop; // mp2 item of type VIDDEC_WORKLOAD_GOP_INFO
+        struct
+        {
+#define viddec_fw_vc1_get_profile(x) viddec_fw_bitfields_extract((x)->size, 30, 0x3)
+#define viddec_fw_vc1_set_profile(x, val) viddec_fw_bitfields_insert((x)->size, val, 30, 0x3)
+
+#define viddec_fw_vc1_get_level(x) viddec_fw_bitfields_extract((x)->size, 27, 0x7)
+#define viddec_fw_vc1_set_level(x, val) viddec_fw_bitfields_insert((x)->size, val, 27, 0x7)
+
+#define viddec_fw_vc1_get_colordiff_format(x) viddec_fw_bitfields_extract((x)->size, 25, 0x3)
+#define viddec_fw_vc1_set_colordiff_format(x, val) viddec_fw_bitfields_insert((x)->size, val, 25, 0x3)
+
+#define viddec_fw_vc1_get_pulldown(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+#define viddec_fw_vc1_set_pulldown(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+#define viddec_fw_vc1_get_max_coded_width(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+#define viddec_fw_vc1_set_max_coded_width(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+#define viddec_fw_vc1_get_max_coded_height(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+#define viddec_fw_vc1_set_max_coded_height(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+#define viddec_fw_vc1_get_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1F)
+#define viddec_fw_vc1_set_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1F)
+
+#define viddec_fw_vc1_get_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x7)
+#define viddec_fw_vc1_set_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x7)
+
+#define viddec_fw_vc1_get_interlace(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+#define viddec_fw_vc1_set_interlace(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+#define viddec_fw_vc1_get_tfcntrflag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+#define viddec_fw_vc1_set_tfcntrflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 2, 0x1)
+#define viddec_fw_vc1_set_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 2, 0x1)
+
+#define viddec_fw_vc1_get_psf(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x1)
+#define viddec_fw_vc1_set_psf(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x1)
+
+#define viddec_fw_vc1_get_display_ext(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+#define viddec_fw_vc1_set_display_ext(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+            uint32_t size;    // profile:2, level:3, colordiff_format:2, pulldown:1, max_coded_width:12, max_coded_height:12
+            uint32_t flags;   // bitrtq_postproc:5, frmrtq_postproc:3, interlace:1, tfcntrflag:1, finterpflag:1, psf:1, display_ext:1
+            uint32_t pad;
+        } vc1_sl; // vc1 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+        struct
+        {
+            // This item is populated when display_ext flag is set in the sequence layer
+            // therefore, no need to provide this flag
+#define viddec_fw_vc1_get_disp_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 18, 0x3FFF)
+#define viddec_fw_vc1_set_disp_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 18, 0x3FFF)
+
+#define viddec_fw_vc1_get_disp_vert_size(x) viddec_fw_bitfields_extract((x)->size, 4, 0x3FFF)
+#define viddec_fw_vc1_set_disp_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 4, 0x3FFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_flag(x) viddec_fw_bitfields_extract((x)->size, 3, 0x1)
+#define viddec_fw_vc1_set_disp_aspect_ratio_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_disp_color_format_flag(x) viddec_fw_bitfields_extract((x)->size, 2, 0x1)
+#define viddec_fw_vc1_set_disp_color_format_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 2, 0x1)
+
+#define viddec_fw_vc1_get_disp_framerate_flag(x) viddec_fw_bitfields_extract((x)->size, 1, 0x1)
+#define viddec_fw_vc1_set_disp_framerate_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 1, 0x1)
+
+#define viddec_fw_vc1_get_disp_framerateind(x) viddec_fw_bitfields_extract((x)->size, 0, 0x1)
+#define viddec_fw_vc1_set_disp_framerateind(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0x1)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio(x) viddec_fw_bitfields_extract((x)->framerate, 28, 0xF)
+#define viddec_fw_vc1_set_disp_aspect_ratio(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 28, 0xF)
+
+#define viddec_fw_vc1_get_disp_frameratenr(x) viddec_fw_bitfields_extract((x)->framerate, 20, 0xFF)
+#define viddec_fw_vc1_set_disp_frameratenr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 20, 0xFF)
+
+#define viddec_fw_vc1_get_disp_frameratedr(x) viddec_fw_bitfields_extract((x)->framerate, 16, 0xF)
+#define viddec_fw_vc1_set_disp_frameratedr(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 16, 0xF)
+
+#define viddec_fw_vc1_get_disp_framerateexp(x) viddec_fw_bitfields_extract((x)->framerate, 0, 0xFFFF)
+#define viddec_fw_vc1_set_disp_framerateexp(x, val) viddec_fw_bitfields_insert((x)->framerate, val, 0, 0xFFFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_horiz_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 24, 0xFF)
+#define viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 24, 0xFF)
+
+#define viddec_fw_vc1_get_disp_aspect_ratio_vert_size(x) viddec_fw_bitfields_extract((x)->aspectsize, 16, 0xFF)
+#define viddec_fw_vc1_set_disp_aspect_ratio_vert_size(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 16, 0xFF)
+
+#define viddec_fw_vc1_get_disp_color_prim(x) viddec_fw_bitfields_extract((x)->aspectsize, 8, 0xFF)
+#define viddec_fw_vc1_set_disp_color_prim(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 8, 0xFF)
+
+#define viddec_fw_vc1_get_disp_transfer_char(x) viddec_fw_bitfields_extract((x)->aspectsize, 0, 0xFF)
+#define viddec_fw_vc1_set_disp_transfer_char(x, val) viddec_fw_bitfields_insert((x)->aspectsize, val, 0, 0xFF)
+
+            uint32_t size; // disp_horiz_size:14, disp_vert_size:14, aspect_ratio_flag:1, color_format_flag:1, framerate_flag:1, framerateind:1
+            uint32_t framerate;  // aspect_ratio:4, frameratenr:8, frameratedr:4, framerateexp:16
+            uint32_t aspectsize; // aspect_ratio_horiz_size:8, aspect_ratio_vert_size:8, color_prim:8, transfer_char:8
+        } vc1_sl_de; // vc1 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+#define viddec_fw_vc1_get_rcv_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 16, 0xFFFF)
+#define viddec_fw_vc1_set_rcv_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 16, 0xFFFF)
+
+#define viddec_fw_vc1_get_rcv_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFFF)
+#define viddec_fw_vc1_set_rcv_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFFF)
+
+#define viddec_fw_vc1_get_rcv_bitrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 16, 0x1F)
+#define viddec_fw_vc1_set_rcv_bitrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 16, 0x1F)
+
+#define viddec_fw_vc1_get_rcv_frmrtq_postproc(x) viddec_fw_bitfields_extract((x)->flags, 13, 0x7)
+#define viddec_fw_vc1_set_rcv_frmrtq_postproc(x, val) viddec_fw_bitfields_insert((x)->flags, val, 13, 0x7)
+
+#define viddec_fw_vc1_get_rcv_profile(x) viddec_fw_bitfields_extract((x)->flags, 9, 0xF)
+#define viddec_fw_vc1_set_rcv_profile(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0xF)
+
+#define viddec_fw_vc1_get_rcv_level(x) viddec_fw_bitfields_extract((x)->flags, 6, 0x7)
+#define viddec_fw_vc1_set_rcv_level(x, val) viddec_fw_bitfields_insert((x)->flags, val, 6, 0x7)
+
+#define viddec_fw_vc1_get_rcv_cbr(x) viddec_fw_bitfields_extract((x)->flags, 5, 0x1)
+#define viddec_fw_vc1_set_rcv_cbr(x, val) viddec_fw_bitfields_insert((x)->flags, val, 5, 0x1)
+
+#define viddec_fw_vc1_get_rcv_rangered(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x1)
+#define viddec_fw_vc1_set_rcv_rangered(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x1)
+
+#define viddec_fw_vc1_get_rcv_maxbframes(x) viddec_fw_bitfields_extract((x)->flags, 1, 0x7)
+#define viddec_fw_vc1_set_rcv_maxbframes(x, val) viddec_fw_bitfields_insert((x)->flags, val, 1, 0x7)
+
+#define viddec_fw_vc1_get_rcv_finterpflag(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x1)
+#define viddec_fw_vc1_set_rcv_finterpflag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x1)
+
+            uint32_t size;    // horiz_size:16, vert_size:16
+            uint32_t flags;   // bitrtq_postproc:5, frmrtq_postproc:3, profile:4, level:3, cbr:1, rangered:1, maxbframes:3, finterpflag:1
+            uint32_t pad;
+        } vc1_sh_struct_a_c; // vc1 item of type VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C
+        struct
+        {
+#define viddec_fw_vc1_get_ep_size_flag(x) viddec_fw_bitfields_extract((x)->size, 24, 0x1)
+#define viddec_fw_vc1_set_ep_size_flag(x, val) viddec_fw_bitfields_insert((x)->size, val, 24, 0x1)
+
+#define viddec_fw_vc1_get_ep_horiz_size(x) viddec_fw_bitfields_extract((x)->size, 12, 0xFFF)
+#define viddec_fw_vc1_set_ep_horiz_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 12, 0xFFF)
+
+#define viddec_fw_vc1_get_ep_vert_size(x) viddec_fw_bitfields_extract((x)->size, 0, 0xFFF)
+#define viddec_fw_vc1_set_ep_vert_size(x, val) viddec_fw_bitfields_insert((x)->size, val, 0, 0xFFF)
+
+#define viddec_fw_vc1_get_ep_broken_link(x) viddec_fw_bitfields_extract((x)->flags, 10, 0x1)
+#define viddec_fw_vc1_set_ep_broken_link(x, val) viddec_fw_bitfields_insert((x)->flags, val, 10, 0x1)
+
+#define viddec_fw_vc1_get_ep_closed_entry(x) viddec_fw_bitfields_extract((x)->flags, 9, 0x1)
+#define viddec_fw_vc1_set_ep_closed_entry(x, val) viddec_fw_bitfields_insert((x)->flags, val, 9, 0x1)
+
+#define viddec_fw_vc1_get_ep_panscan_flag(x) viddec_fw_bitfields_extract((x)->flags, 8, 0x1)
+#define viddec_fw_vc1_set_ep_panscan_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 8, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapy_flag(x) viddec_fw_bitfields_extract((x)->flags, 7, 0x1)
+#define viddec_fw_vc1_set_ep_range_mapy_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 7, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapy(x) viddec_fw_bitfields_extract((x)->flags, 4, 0x7)
+#define viddec_fw_vc1_set_ep_range_mapy(x, val) viddec_fw_bitfields_insert((x)->flags, val, 4, 0x7)
+
+#define viddec_fw_vc1_get_ep_range_mapuv_flag(x) viddec_fw_bitfields_extract((x)->flags, 3, 0x1)
+#define viddec_fw_vc1_set_ep_range_mapuv_flag(x, val) viddec_fw_bitfields_insert((x)->flags, val, 3, 0x1)
+
+#define viddec_fw_vc1_get_ep_range_mapuv(x) viddec_fw_bitfields_extract((x)->flags, 0, 0x7)
+#define viddec_fw_vc1_set_ep_range_mapuv(x, val) viddec_fw_bitfields_insert((x)->flags, val, 0, 0x7)
+
+            uint32_t size;    // coded_size_flag:1, coded_width:12, coded_height:12
+            uint32_t flags;   // broken_link:1, closed_entry:1, panscan_flag:1, range_mapy_flag:1, range_mapy:3, range_mapuv_flag:1, range_mapuv:3
+            uint32_t pad;
+        } vc1_ep; // vc1 item of type VIDDEC_WORKLOAD_GOP_INFO
+        struct
+        {
+            /*
+              0-7 bits for profile_idc.
+              8-15 bits for level_idc.
+              16-17 bits for chroma_format_idc.
+              18-22 bits for num_ref_frames.
+              23  for gaps_in_frame_num_value_allowed_flag.
+              24 for frame_mbs_only_flag.
+              25 for frame_cropping_flag.
+              26 for vui_parameters_present_flag.
+             */
+#define viddec_fw_h264_sps_get_profile_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 0, 0xFF)
+#define viddec_fw_h264_sps_set_profile_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 0, 0xFF)
+#define viddec_fw_h264_sps_get_level_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 8, 0xFF)
+#define viddec_fw_h264_sps_set_level_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 8, 0xFF)
+#define viddec_fw_h264_sps_get_chroma_format_idc(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 16, 0x3)
+#define viddec_fw_h264_sps_set_chroma_format_idc(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 16, 0x3)
+#define viddec_fw_h264_sps_get_num_ref_frames(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 18, 0x1F)
+#define viddec_fw_h264_sps_set_num_ref_frames(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 18, 0x1F)
+#define viddec_fw_h264_sps_get_gaps_in_frame_num_value_allowed_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 23, 0x1)
+#define viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 23, 0x1)
+#define viddec_fw_h264_sps_get_frame_mbs_only_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 24, 0x1)
+#define viddec_fw_h264_sps_set_frame_mbs_only_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 24, 0x1)
+#define viddec_fw_h264_sps_get_frame_cropping_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 25, 0x1)
+#define viddec_fw_h264_sps_set_frame_cropping_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 25, 0x1)
+#define viddec_fw_h264_sps_get_vui_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->sps_messages, 26, 0x1)
+#define viddec_fw_h264_sps_set_vui_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->sps_messages, val, 26, 0x1)
+            unsigned int sps_messages;
+            unsigned int pic_width_in_mbs_minus1;
+            unsigned int pic_height_in_map_units_minus1;
+        } h264_sps; // h264 item of type VIDDEC_WORKLOAD_SEQUENCE_INFO
+
+        struct h264_witem_sps_mvc_id h264_sps_mvc_id;
+
+        struct
+        {
+#define viddec_fw_h264_cropping_get_left(x)  viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_left(x, val)  viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+            unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+#define viddec_fw_h264_cropping_get_top(x)  viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_cropping_set_top(x, val)  viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_cropping_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+            unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+            unsigned int pad;
+        } h264_cropping; // h264 item of type VIDDEC_WORKLOAD_H264_CROPPING
+
+        struct
+        {
+            /* 0 bit for aspect_ratio_info_present_flag
+               1 st bit for video_signal_type_present_flag
+               2 nd bit for colour_description_present_flag
+               3 rd bit for timing_info_present_flag
+               4 th bit for nal_hrd_parameters_present_flag
+               5 th bit for vcl_hrd_parameters_present_flag
+               6 th bit for fixed_frame_rate_flag
+               7 th bit for pic_struct_present_flag
+               8 th bit for low_delay_hrd_flag
+               9,10,11 bits for video_format
+            */
+#define viddec_fw_h264_vui_get_aspect_ratio_info_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 0, 0x1)
+#define viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 0, 0x1)
+#define viddec_fw_h264_vui_get_video_signal_type_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 1, 0x1)
+#define viddec_fw_h264_vui_set_video_signal_type_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 1, 0x1)
+#define viddec_fw_h264_vui_get_colour_description_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 2, 0x1)
+#define viddec_fw_h264_vui_set_colour_description_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 2, 0x1)
+#define viddec_fw_h264_vui_get_timing_info_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 3, 0x1)
+#define viddec_fw_h264_vui_set_timing_info_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 3, 0x1)
+#define viddec_fw_h264_vui_get_nal_hrd_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 4, 0x1)
+#define viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 4, 0x1)
+#define viddec_fw_h264_vui_get_vcl_hrd_parameters_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 5, 0x1)
+#define viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 5, 0x1)
+#define viddec_fw_h264_vui_get_fixed_frame_rate_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 6, 0x1)
+#define viddec_fw_h264_vui_set_fixed_frame_rate_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 6, 0x1)
+#define viddec_fw_h264_vui_get_pic_struct_present_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 7, 0x1)
+#define viddec_fw_h264_vui_set_pic_struct_present_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 7, 0x1)
+#define viddec_fw_h264_vui_get_low_delay_hrd_flag(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 8, 0x1)
+#define viddec_fw_h264_vui_set_low_delay_hrd_flag(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 8, 0x1)
+#define viddec_fw_h264_vui_get_video_format(x)  viddec_fw_bitfields_extract( (x)->vui_flags_and_format, 9, 0x7)
+#define viddec_fw_h264_vui_set_video_format(x, val)  viddec_fw_bitfields_insert( (x)->vui_flags_and_format, val, 9, 0x7)
+            unsigned int vui_flags_and_format;
+
+#define viddec_fw_h264_vui_get_aspect_ratio_idc(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 0, 0xFF)
+#define viddec_fw_h264_vui_set_aspect_ratio_idc(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 0, 0xFF)
+#define viddec_fw_h264_vui_get_colour_primaries(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 8, 0xFF)
+#define viddec_fw_h264_vui_set_colour_primaries(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 8, 0xFF)
+#define viddec_fw_h264_vui_get_transfer_characteristics(x)  viddec_fw_bitfields_extract( (x)->aspc_color_transfer, 16, 0xFF)
+#define viddec_fw_h264_vui_set_transfer_characteristics(x, val)  viddec_fw_bitfields_insert( (x)->aspc_color_transfer, val, 16, 0xFF)
+            /* lower 8 bits for aspect_ratio, next 8bits for color primaries and next 8 bits for transfer characteristics */
+            unsigned int aspc_color_transfer;
+
+#define viddec_fw_h264_vui_get_sar_width(x)  viddec_fw_bitfields_extract( (x)->sar_width_height, 16, 0xFFFF)
+#define viddec_fw_h264_vui_get_sar_height(x) viddec_fw_bitfields_extract( (x)->sar_width_height, 0, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_width(x, val)  viddec_fw_bitfields_insert( (x)->sar_width_height, val, 16, 0xFFFF)
+#define viddec_fw_h264_vui_set_sar_height(x, val) viddec_fw_bitfields_insert( (x)->sar_width_height, val, 0, 0xFFFF)
+            unsigned int sar_width_height; /* Lower 16 for height upper 16 for width */
+        } h264_vui; // h264 item of type VIDDEC_WORKLOAD_DISPLAY_INFO
+        struct
+        {
+#define viddec_fw_h264_vui_get_num_units_in_tick_flag(x)  viddec_fw_bitfields_extract( (x)->num_units_in_tick, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_num_units_in_tick_flag(x, val)  viddec_fw_bitfields_insert( (x)->num_units_in_tick, val, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_get_time_scale_flag(x)  viddec_fw_bitfields_extract( (x)->time_scale, 0, 0xFFFFFFFF)
+#define viddec_fw_h264_vui_set_time_scale_flag(x, val)  viddec_fw_bitfields_insert( (x)->time_scale, val, 0, 0xFFFFFFFF)
+            unsigned int num_units_in_tick;
+            unsigned int time_scale;
+            unsigned int pad1;
+        } h264_vui_time_info; // VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO
+        struct
+        {
+            unsigned int pic_struct; /* 4 bit length */
+            unsigned int pad1;
+            unsigned int pad2;
+        } h264_sei_pic_timing; // h264 item of type VIDDEC_WORKLOAD_SEI_PIC_TIMING
+        struct
+        {
+            unsigned int pan_scan_rect_id;
+
+#define viddec_fw_h264_sei_pan_scan_get_cancel_flag(x)  viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_get_cnt_minus1(x) viddec_fw_bitfields_extract( (x)->pan_scan_cancel_and_cnt, 1, 0x3)
+#define viddec_fw_h264_sei_pan_scan_set_cancel_flag(x, val)  viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 0, 0x1)
+#define viddec_fw_h264_sei_pan_scan_set_cnt_minus1(x, val) viddec_fw_bitfields_insert( (x)->pan_scan_cancel_and_cnt, val, 1, 0x3)
+            unsigned int pan_scan_cancel_and_cnt; /* 0 bit for cancel flag and 2 bits for cnt_minus1 */
+            unsigned int pan_scan_rect_repetition_period;
+        } h264_sei_pan_scan; // h264 item of type VIDDEC_WORKLOAD_H264_PAN_SCAN
+
+        struct
+        {
+
+#define viddec_fw_h264_pan_scan_get_left(x)  viddec_fw_bitfields_extract( (x)->left_right, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_right(x) viddec_fw_bitfields_extract( (x)->left_right, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_left(x, val)  viddec_fw_bitfields_insert( (x)->left_right, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_right(x, val) viddec_fw_bitfields_insert( (x)->left_right, val, 0, 0xFFFF)
+            unsigned int left_right; /* Left in upper 16 bits and right in Lower 16 bits */
+
+#define viddec_fw_h264_pan_scan_get_top(x)  viddec_fw_bitfields_extract( (x)->top_bottom, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_get_bottom(x) viddec_fw_bitfields_extract( (x)->top_bottom, 0, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_top(x, val)  viddec_fw_bitfields_insert( (x)->top_bottom, val, 16, 0xFFFF)
+#define viddec_fw_h264_pan_scan_set_bottom(x, val) viddec_fw_bitfields_insert( (x)->top_bottom, val, 0, 0xFFFF)
+            unsigned int top_bottom; /* top in upper 16 bits and bottom in lower 16 bits */
+
+            unsigned int pad;
+        } h264_pan_scan_rect; // h264 item of type VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT
+        struct
+        {
+            unsigned int recovery_frame_cnt;
+#define viddec_fw_h264_h264_sei_recovery_get_exact_match_flag(x)  viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_get_broken_link_flag(x) viddec_fw_bitfields_extract( (x)->broken_and_exctmatch_flags, 1, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(x, val)  viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 0, 0x1)
+#define viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(x, val) viddec_fw_bitfields_insert( (x)->broken_and_exctmatch_flags, val, 1, 0x1)
+            unsigned int broken_and_exctmatch_flags; /* 0 bit for exact match, 1 bit for brokenlink */
+
+            unsigned int changing_slice_group_idc; /* 2bit value for slice_group idc */
+
+        } h264_sei_recovery_point; // h264 item of type VIDDEC_WORKLOAD_SEI_RECOVERY_POINT
+
+
+        struct
+        {
+            // Visual Sequence (From LSB):
+            //   - profile_and_level_indication - 8 bits
+#define viddec_fw_mp4_vs_get_profile_and_level_indication(x)      viddec_fw_bitfields_extract( (x)->vs_item,  0, 0xFF)
+#define viddec_fw_mp4_vs_set_profile_and_level_indication(x, val) viddec_fw_bitfields_insert ( (x)->vs_item, val,  0, 0xFF)
+            unsigned int vs_item;
+
+            // Visual Object - video_signal_type
+            //   - video_signal_type - 1b
+            //   - video_format - 3b
+            //   - video_range - 1b
+            //   - colour_description - 1b
+#define viddec_fw_mp4_vo_get_colour_description(x)      viddec_fw_bitfields_extract( (x)->video_signal_type, 5, 0x1)
+#define viddec_fw_mp4_vo_set_colour_description(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 5, 0x1)
+#define viddec_fw_mp4_vo_get_video_range(x)      viddec_fw_bitfields_extract( (x)->video_signal_type, 4, 0x1)
+#define viddec_fw_mp4_vo_set_video_range(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val, 4, 0x1)
+#define viddec_fw_mp4_vo_get_video_format(x)      viddec_fw_bitfields_extract( (x)->video_signal_type,  1, 0x7)
+#define viddec_fw_mp4_vo_set_video_format(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val,  1, 0x7)
+#define viddec_fw_mp4_vo_get_video_signal_type(x)      viddec_fw_bitfields_extract( (x)->video_signal_type,  0, 0x1)
+#define viddec_fw_mp4_vo_set_video_signal_type(x, val) viddec_fw_bitfields_insert ( (x)->video_signal_type, val,  0, 0x1)
+            unsigned int video_signal_type;
+
+            // Visual Object - video_signal_type
+            //   - color_primaries - 8 bits
+            //   - transfer_characteristics - 8 bits
+#define viddec_fw_mp4_vo_get_transfer_char(x)      viddec_fw_bitfields_extract( (x)->color_desc,  8, 0xFF)
+#define viddec_fw_mp4_vo_set_transfer_char(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val,  8, 0xFF)
+#define viddec_fw_mp4_vo_get_color_primaries(x)      viddec_fw_bitfields_extract( (x)->color_desc,  0, 0xFF)
+#define viddec_fw_mp4_vo_set_color_primaries(x, val) viddec_fw_bitfields_insert ( (x)->color_desc, val,  0, 0xFF)
+            unsigned int color_desc;
+        } mp4_vs_vo; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ
+
+        struct
+        {
+            // Video Object Layer(From LSB):
+            //   - aspect_ratio_info - 4b
+            //   - par_width - 8b
+            //   - par_height - 8b
+            //   - vol_control_param - 1b
+            //   - chroma_format - 2b
+            //   - interlaced - 1b
+            //   - fixed_vop_rate - 1b
+#define viddec_fw_mp4_vol_get_fixed_vop_rate(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 24, 0x1)
+#define viddec_fw_mp4_vol_set_fixed_vop_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 24, 0x1)
+#define viddec_fw_mp4_vol_get_interlaced(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 23, 0x1)
+#define viddec_fw_mp4_vol_set_interlaced(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 23, 0x1)
+#define viddec_fw_mp4_vol_get_chroma_format(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 21, 0x3)
+#define viddec_fw_mp4_vol_set_chroma_format(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 21, 0x3)
+#define viddec_fw_mp4_vol_get_control_param(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 20, 0x1)
+#define viddec_fw_mp4_vol_set_control_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 20, 0x1)
+#define viddec_fw_mp4_vol_get_par_height(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 12, 0xFF)
+#define viddec_fw_mp4_vol_set_par_height(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 12, 0xFF)
+#define viddec_fw_mp4_vol_get_par_width(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 4, 0xFF)
+#define viddec_fw_mp4_vol_set_par_width(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 4, 0xFF)
+#define viddec_fw_mp4_vol_get_aspect_ratio_info(x)      viddec_fw_bitfields_extract( (x)->vol_aspect_ratio, 0, 0xF)
+#define viddec_fw_mp4_vol_set_aspect_ratio_info(x, val) viddec_fw_bitfields_insert ( (x)->vol_aspect_ratio, val, 0, 0xF)
+            unsigned int vol_aspect_ratio;
+
+            // Video Object Layer(From LSB):
+            //   - vbv_parameters - 1b
+            //   - bit_rate - 30b
+#define viddec_fw_mp4_vol_get_bit_rate(x)      viddec_fw_bitfields_extract( (x)->vol_bit_rate, 1, 0x3FFFFFFF)
+#define viddec_fw_mp4_vol_set_bit_rate(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 1, 0x3FFFFFFF)
+#define viddec_fw_mp4_vol_get_vbv_param(x)      viddec_fw_bitfields_extract( (x)->vol_bit_rate, 0, 0x1)
+#define viddec_fw_mp4_vol_set_vbv_param(x, val) viddec_fw_bitfields_insert ( (x)->vol_bit_rate, val, 0, 0x1)
+            unsigned int vol_bit_rate;
+
+            // Video Object Layer(From LSB):
+            //   - fixed_vop_time_increment - 16b
+            //   - vop_time_increment_resolution - 16b
+#define viddec_fw_mp4_vol_get_vop_time_increment_resolution(x) viddec_fw_bitfields_extract((x)->vol_frame_rate, 16, 0xFFFF)
+#define viddec_fw_mp4_vol_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 16, 0xFFFF)
+#define viddec_fw_mp4_vol_get_fixed_vop_time_increment(x)      viddec_fw_bitfields_extract((x)->vol_frame_rate, 1, 0xFFFF)
+#define viddec_fw_mp4_vol_set_fixed_vop_time_increment(x, val) viddec_fw_bitfields_insert((x)->vol_frame_rate, val, 1, 0xFFFF)
+            unsigned int vol_frame_rate;
+        } mp4_vol; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ
+
+        struct
+        {
+            // Group of Video Object Planes(From LSB):
+            //   - time_code - 18b
+            //   - closed_gov - 1b
+            //   - broken_link - 1b
+#define viddec_fw_mp4_gvop_get_broken_link(x)      viddec_fw_bitfields_extract((x)->gvop_info, 19, 0x1)
+#define viddec_fw_mp4_gvop_set_broken_link(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 19, 0x1)
+#define viddec_fw_mp4_gvop_get_closed_gov(x)      viddec_fw_bitfields_extract((x)->gvop_info, 18, 0x1)
+#define viddec_fw_mp4_gvop_set_closed_gov(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 18, 0x1)
+#define viddec_fw_mp4_gvop_get_time_code(x)      viddec_fw_bitfields_extract((x)->gvop_info, 0, 0x3FFFF)
+#define viddec_fw_mp4_gvop_set_time_code(x, val) viddec_fw_bitfields_insert((x)->gvop_info, val, 0, 0x3FFFF)
+            unsigned int gvop_info;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp4_gvop; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ
+
+        struct
+        {
+            // Group of Video Object Planes(From LSB):
+            //   - source_format - 3b
+#define viddec_fw_mp4_vpsh_get_source_format(x)      viddec_fw_bitfields_extract((x)->info, 0, 0x7)
+#define viddec_fw_mp4_vpsh_set_source_format(x, val) viddec_fw_bitfields_insert((x)->info, val, 0, 0x7)
+            unsigned int info;
+
+            unsigned int pad1;
+            unsigned int pad2;
+        } mp4_vpsh; // mp4 item of type VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT
+
+        unsigned int    vwi_payload[3];
+    };
+} viddec_workload_item_t;
+
+
+
+#endif /* VIDDEC_ITEM_TYPES_H */
diff --git a/mixvbp/include/viddec_fw_parser_host.h b/mixvbp/include/viddec_fw_parser_host.h
new file mode 100644
index 0000000..550cf0a
--- /dev/null
+++ b/mixvbp/include/viddec_fw_parser_host.h
@@ -0,0 +1,237 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+
+#ifndef VIDDEC_FW_PARSER_HOST_H
+#define VIDDEC_FW_PARSER_HOST_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+#include "viddec_fw_common_defs.h"
+
+    /** @weakgroup viddec Fw Parser interface Functions */
+    /** @ingroup viddec_fw_parser */
+    /*@{*/
+
+    /**
+       This function returns the size required for loading fw.
+       @retval  size : Required size.
+    */
+    uint32_t viddec_fw_parser_query_fwsize(void);
+
+    /**
+       This function loads Parser Firmware and initialises necessary state information.This a synchronous message to FW.
+       @param[in] phys                : Physical address on where firmware should be loaded.
+       @param[in] len                 : Length of data allocated at phys.
+       @retval VIDDEC_FW_SUCCESS      : Successfully loaded firmware.
+       @retval VIDDEC_FW_FAILURE      : Failed to communicate with firmware.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to allocate resources for Loading firmware.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_loadfw(uint32_t phys, uint32_t len);
+
+    /**
+       This function returns the size required opening a stream. This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want information about.
+       @param[out] num_wklds          : Number of wklds required for initialisation.
+       @param[out] size               : Size of memory required for opening a stream.
+    */
+    void viddec_fw_parser_query_streamsize(uint32_t codec_type, uint32_t *num_wklds, uint32_t *size);
+
+    /**
+       This function opens requested codec.This a synchronous message to FW.
+       @param[in] codec_type          : Type of codec that we want to open.
+       @param[in] phys                : Physical address of allocated memory for this codec.
+       @param[in] prority             : Priority of stream. 1 for realtime and 0 for background.
+       @param[out] strm_handle        : Handle of the opened stream.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Opened the stream.
+       @retval VIDDEC_FW_FAILURE      : Failed to Open a stream.
+       @retval VIDDEC_FW_NORESOURCES  : Failed to Open a stream as we are out of resources.
+    */
+    uint32_t viddec_fw_parser_openstream(uint32_t codec_type, uint32_t *strm_handle, uint32_t phys, uint32_t priority);
+
+    /**
+       This function closes stream.This a synchronous message to FW.
+       For the close stream to be effective, host has to do flush with discard first and then close the stream.
+       @param[in] strm_handle        : Handle of the stream to close.
+    */
+    void viddec_fw_parser_closestream(uint32_t strm_handle);
+
+    /**
+       This function flushes the current stream. This is a synchronous message to FW.
+       Before calling this function the host has to make sure the output queue of the firmware
+       is empty. After this function is executed the FW will read all entries in input
+       es buffer queue into a free or partial workload and push it into output queue.
+       After this operation the host has to read all entries in output queue again to
+       finish the flush operation.
+       @param[in] flush_type          : Type of flush we want to perform.ex:flush and discard.
+       @param[in]  strm_handle        : Handle of the stream we want to flush.
+       @retval VIDDEC_FW_SUCCESS      : Successfully flushed the stream.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+       @retval VIDDEC_FW_NEED_FREE_WKLD  : Failed to flush sice a free wkld was not available.
+    */
+    uint32_t viddec_fw_parser_flushstream(uint32_t strm_handle, uint32_t flush_type);
+
+    /**
+       This function sends an input es buffer.
+       @param[in] strm_handle         : The handle of stream that we want to send es buffer to.
+       @param[in] message             : The es buffer we want to send.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Port to fw full unsuccesful in sending message.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_send(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function gets the next processed workload. The host is required to add free workloads
+       to keep the parser busy. The FW will stall when it doesn't have enough workloads(2) to continue.
+       @param[in] strm_handle         : The handle of stream that we want to read workload from.
+       @param[out] message            : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_EMPTY   : Workload port is empty,unsuccesful in reading wkld.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_recv(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function adds a free workload to current stream.
+       @param[in] strm_handle         : The handle of stream that we want to write workload to.
+       @param[out] message            : The workload descriptor.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Sent the message.
+       @retval VIDDEC_FW_PORT_FULL    : Workload port is full,unsuccesful in writing wkld.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_addwkld(uint32_t strm_handle, ipc_msg_data *message);
+
+    /**
+       This function enables or disables Interrupts for a stream. By default the FW will always enable interrupts.
+       The driver can disable/enable Interrupts if it needs for this particular stream.
+
+       @param[in] strm_handle         : The handle of stream that we want to get mask from
+       @param[in] mask                : This is read as boolean variable, true to enable, false to disable.
+       @retval VIDDEC_FW_SUCCESS      : Successfully set mask.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_set_interruptmask(uint32_t strm_handle, uint32_t mask);
+    /**
+       This function gets the interrupt status for current stream.
+       When the host gets Interrupted since its a global interrupt it's expected that host will look at all active streams,
+       by calling this function. The status is what the FW thinks the current state of stream is. The status information that
+       FW provides is complete information on all possible events that are defined. The host should only access this information
+       in its ISR at which state FW doesn't modify this information.
+
+       @param[in] strm_handle         : The handle of stream that we want to get mask from
+       @param[out] status             : The status of the stream based on viddec_fw_parser_int_status_t enum.
+       @retval VIDDEC_FW_SUCCESS      : Successfully in reading status.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_getstatus(uint32_t strm_handle, uint32_t *status);
+
+    /**
+       This function allows to set stream attributes that are supported.
+       @param[in] strm_handle         : The handle of stream that we want to set attribute on.
+       @param[in] type                : The type of attribute we want to set, this should be one of items in viddec_fw_stream_attributes_t.
+       @param[in] value               : The value of the type that we want to set.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Set the attribute.
+       @retval VIDDEC_FW_INVALID_PARAM: The input parameters are not valid.
+    */
+    uint32_t viddec_fw_parser_set_stream_attributes(uint32_t strm_handle, uint32_t type, uint32_t value);
+
+    /**
+       This function allows to get current status of all the parser queues. If the current stream is active we return
+       number of inout messages that can be written to input queue, no of messages in output queue and number of
+       free available workloads the stream has.
+       Normally this is called when Host receives an interrupt from parser, In which case before releasing the INT
+       Host will try its best to keep the FW busy. We always get a interrupt if we passed the watermark on input or
+       a workload was pushed into output and INT line is free. If host holds onto INT when firmware tries to send an INT
+       FW would send the Interrupt after host releases INT. Since we have EDGE triggered interrupts we cannot guarantee
+       one interrupt per frame, ex: If three frames are generated and after the first frame FW was able to provide an INT
+       to host, but host held on to INT while the FW finished the next two frames, after host releases the INT the FW will
+       give only one INT and host should try to empty output queue.
+       @param[in] strm_handle         : The handle of stream that we want to get status of queues.
+       @param[out] status             : The status of each queue gets updated in here.
+       @retval VIDDEC_FW_SUCCESS      : Successfully Got the status information.
+       @retval VIDDEC_FW_INVALID_PARAM: Invalid parameter in this case an inactive stream.
+    */
+    uint32_t viddec_fw_parser_get_queue_status(uint32_t strm_handle, viddec_fw_q_status_t *status);
+
+    /**
+       This function unloads Parser Firmware and free's the resources allocated in Load fw.
+       If this function is called before load fw it will crash with a segmentation fault.
+    */
+    void viddec_fw_parser_deinit(void);
+
+    /**
+       This function gets the major and minor revison numbers of the loaded firmware.
+       @param[out] major              : The major revision numner.
+       @param[out] minor              : The minor revision number.
+       @param[out] build              : The Internal Build number.
+    */
+    void viddec_fw_parser_get_version_number(unsigned int *major, unsigned int *minor, unsigned int *build);
+
+    /**
+       This function clears the global interrupt. This is the last thing host calls before exiting ISR.
+    */
+    void viddec_fw_parser_clear_global_interrupt(void);
+
+    /*@}*/
+#ifdef __cplusplus
+}
+#endif
+
+#endif//#ifndef VIDDEC_FW_PARSER_HOST_H
diff --git a/mixvbp/include/viddec_fw_workload.h b/mixvbp/include/viddec_fw_workload.h
new file mode 100644
index 0000000..3b86270
--- /dev/null
+++ b/mixvbp/include/viddec_fw_workload.h
@@ -0,0 +1,152 @@
+/*
+    This file is provided under a dual BSD/GPLv2 license.  When using or
+    redistributing this file, you may do so under either license.
+
+    GPL LICENSE SUMMARY
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of version 2 of the GNU General Public License as
+    published by the Free Software Foundation.
+
+    This program is distributed in the hope that it will be useful, but
+    WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+    General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
+    The full GNU General Public License is included in this distribution
+    in the file called LICENSE.GPL.
+
+    Contact Information:
+
+    BSD LICENSE
+
+    Copyright(c) 2007-2009 Intel Corporation. All rights reserved.
+    All rights reserved.
+
+    Redistribution and use in source and binary forms, with or without
+    modification, are permitted provided that the following conditions
+    are met:
+
+    * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in
+    the documentation and/or other materials provided with the
+    distribution.
+    * Neither the name of Intel Corporation nor the names of its
+    contributors may be used to endorse or promote products derived
+    from this software without specific prior written permission.
+
+    THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+    "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+    LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+    A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+    OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+    SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+    LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+    DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+    THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+    (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+    OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+*/
+#ifndef VIDDEC_FW_WORKLOAD_H
+#define VIDDEC_FW_WORKLOAD_H
+
+#include <stdint.h>
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_frame_attr.h"
+#include "viddec_fw_common_defs.h"
+
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_FRAME (1 << 0)
+#define VIDDEC_WORKLOAD_FLAGS_ES_START_SLICE (1 << 1)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_SLICE   (1 << 2)
+#define VIDDEC_WORKLOAD_FLAGS_ES_END_FRAME   (1 << 3)
+
+#define VIDDEC_FRAME_REFERENCE_IS_VALID   (0x1<<1)
+// PIP Output Frame request bits
+#define BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE  24
+#define BMSK_VIDDEC_FRAME_REFERENCE_PIP_MODE  (0x3<<BLSB_VIDDEC_FRAME_REFERENCE_PIP_MODE)
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_NORMAL     0x0
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_HALF     0x1
+#define VIDDEC_FRAME_REFERENCE_PIP_MODE_W_QUARTER  0x2
+
+/** Frame reference information to pass to video decoder  when performing a workload (frame decode)  */
+typedef struct viddec_frame_reference
+{
+    signed int   driver_frame_id;
+    unsigned int luma_phys_addr;
+    unsigned int chroma_phys_addr;
+    int internal_id; /* Used by workload manager only */
+} viddec_frame_reference_t;
+
+#define WORKLOAD_REFERENCE_FRAME (1 << 16)
+#define WORKLOAD_SKIPPED_FRAME   (1 << 17)
+/**
+Bitmask to indicate that this workload has range adjustment and needs a range_adjusted_out buffer for successful decode.
+Will be used for VC1 only.
+*/
+#define WORKLOAD_FLAGS_RA_FRAME   (1 << 21)
+#define WORKLOAD_REFERENCE_FRAME_BMASK 0x000000ff
+
+/** This structure contains all the information required  to fully decode one frame of data  */
+/**
+    num_error_mb: This field is populated at the output of the decoder.
+                  Currently, its valid only for MPEG2.
+                  For other codecs, it defaults to 0.
+
+    range_adjusted_out:	Frame buffer needed to store range adjusted frames for VC1 only.
+                        Range adjustment in VC1 requires that the luma/chroma values in the decoded frame be modified
+                        before the frame can be displayed. In this case, we need a new frame buffer to store he adjusted values.
+                        The parser will indicate this requirement by setting the WORKLOAD_FLAGS_RA_FRAME bit in the
+                        is_reference_frame of the workload. The decoder expects this field to be valid when range adjustment
+                        is indicated and populates this frame buffer along with frame_out.
+
+    Expectation from user:
+                        Before feeding workload to the decoder, do the following:
+                           If pip is indicated/needed,
+                              provide the pip_out buffer
+                           If range adjustment is indicated (WORKLOAD_FLAGS_RA_FRAME bit in is_reference_frame is set),
+                              provide range_adjusted_out buffer
+                           Provide frame_out buffer.
+
+                        After workload is returned from the decoder, do the following:
+                           If pip is indicated,
+                              display the pip_out buffer
+                           Else If range adjustment is indicated,
+                              display range_adjusted_out buffer
+                           Else
+                              display frame_out buffer.
+*/
+typedef struct viddec_workload
+{
+    enum viddec_stream_format codec;
+    signed int                is_reference_frame;
+    unsigned int              result;
+    unsigned int              time;
+    unsigned int              num_items;/* number of viddec_workload_item_t in current workload */
+    unsigned int              num_error_mb; /* Number of error macroblocks in the current picture. */
+    viddec_frame_attributes_t attrs;
+
+    viddec_frame_reference_t  frame_out;   /* output frame */
+    viddec_frame_reference_t  range_adjusted_out;   /* for VC1 only */
+    viddec_frame_reference_t  pip_out;     /* PIP Buffer */
+
+    /* Alignment is needed because the packing different between host and vSparc */
+    __attribute__ ((aligned (16))) viddec_workload_item_t   item[1];
+
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* This structure is ALLOC_EXTENDED with workload_items   */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+    /* ------------------------------------------------------ */
+} viddec_workload_t;
+
+#endif /* VIDDEC_WORKLOAD_H */
diff --git a/mixvbp/vbp_manager/Android.mk b/mixvbp/vbp_manager/Android.mk
new file mode 100755
index 0000000..e5f722d
--- /dev/null
+++ b/mixvbp/vbp_manager/Android.mk
@@ -0,0 +1,62 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+#MIXVBP_LOG_ENABLE := true
+
+LOCAL_SRC_FILES :=			\
+	vbp_h264_parser.c		\
+	vbp_vc1_parser.c		\
+	vbp_loader.c			\
+	vbp_mp42_parser.c		\
+	vbp_utils.c			\
+	viddec_parse_sc.c		\
+	viddec_pm_parser_ops.c		\
+	viddec_pm_utils_bstream.c       \
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES +=			\
+	$(LOCAL_PATH)/include		\
+	$(MIXVBP_DIR)/include		      \
+	$(MIXVBP_DIR)/vbp_plugin/h264/include \
+	$(MIXVBP_DIR)/vbp_plugin/mp2/include  \
+	$(MIXVBP_DIR)/vbp_plugin/mp4/include  \
+	$(MIXVBP_DIR)/vbp_plugin/vc1/include  \
+	$(MIXVBP_DIR)/vbp_plugin/vc1/         \
+	$(MIXVBP_DIR)/vbp_plugin/mp4/         \
+	$(TARGET_OUT_HEADERS)/libva
+
+LOCAL_COPY_HEADERS_TO := libmixvbp
+
+LOCAL_COPY_HEADERS :=	\
+	vbp_loader.h
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp
+
+LOCAL_SHARED_LIBRARIES :=		\
+	libdl				\
+	libcutils
+
+ifeq ($(strip $(MIXVBP_LOG_ENABLE)),true)
+LOCAL_CFLAGS += -DVBP_TRACE
+LOCAL_SHARED_LIBRARIES += liblog
+endif
+
+ifeq ($(USE_HW_VP8),true)
+LOCAL_SRC_FILES += vbp_vp8_parser.c
+LOCAL_C_INCLUDES += $(MIXVBP_DIR)/vbp_plugin/vp8/include
+LOCAL_CFLAGS += -DUSE_HW_VP8
+endif
+
+PLATFORM_SUPPORT_AVC_SHORT_FORMAT := \
+    baytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),)
+LOCAL_CFLAGS += -DUSE_AVC_SHORT_FORMAT
+LOCAL_C_INCLUDES += $(LOCAL_PATH)/secvideo/baytrail/
+LOCAL_SRC_FILES += secvideo/baytrail/vbp_h264secure_parser.c
+endif
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/mixvbp/vbp_manager/include/vbp_common.h b/mixvbp/vbp_manager/include/vbp_common.h
new file mode 100755
index 0000000..ee3d796
--- /dev/null
+++ b/mixvbp/vbp_manager/include/vbp_common.h
@@ -0,0 +1,9 @@
+#ifndef VBP_COMMON
+#define VBP_COMMON
+
+#define SWAP_BYTE(x,y,z)   (( ( (x) >> ((y) << 3))& 0xFF)  << ((z) << 3))
+#define SWAP_WORD(x)      ( SWAP_BYTE((x),0,3) | SWAP_BYTE((x),1,2) |SWAP_BYTE((x),2,1) |SWAP_BYTE((x),3,0))
+
+#define DEB
+
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_parser_ops.h b/mixvbp/vbp_manager/include/viddec_parser_ops.h
new file mode 100755
index 0000000..b7e9984
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_parser_ops.h
@@ -0,0 +1,121 @@
+#ifndef VIDDEC_PARSER_OPS_H
+#define VIDDEC_PARSER_OPS_H
+
+#include "viddec_fw_workload.h"
+#include <stdint.h>
+
+#define VIDDEC_PARSE_INVALID_POS 0xFFFFFFFF
+
+typedef enum
+{
+    VIDDEC_PARSE_EOS = 0x0FFF, /* Dummy start code to force EOS */
+    VIDDEC_PARSE_DISCONTINUITY,  /* Dummy start code to force completion and flush */
+} viddec_parser_inband_messages_t;
+
+typedef struct
+{
+    uint32_t context_size;
+    uint32_t persist_size;
+} viddec_parser_memory_sizes_t;
+
+typedef    void  (*fn_init)(void *ctxt, uint32_t *persist, uint32_t preserve);
+typedef    uint32_t (*fn_parse_sc) (void *ctxt, void *pcxt, void *sc_state);
+typedef    uint32_t (*fn_parse_syntax) (void *parent, void *ctxt);
+typedef    void (*fn_get_cxt_size) (viddec_parser_memory_sizes_t *size);
+typedef    uint32_t (*fn_is_wkld_done)(void *parent, void *ctxt, uint32_t next_sc, uint32_t *codec_specific_errors);
+typedef    uint32_t (*fn_is_frame_start)(void *ctxt);
+typedef    uint32_t (*fn_gen_contrib_tags)(void *parent, uint32_t ignore_partial);
+typedef    uint32_t (*fn_gen_assoc_tags)(void *parent);
+typedef    void (*fn_flush_parser) (void *parent, void *ctxt);
+#ifdef USE_AVC_SHORT_FORMAT
+typedef    uint32_t (*fn_update_data)(void *parent, void *data, uint32_t size);
+#endif
+
+
+typedef struct
+{
+    fn_init init;
+    fn_parse_sc parse_sc;
+    fn_parse_syntax parse_syntax;
+    fn_get_cxt_size get_cxt_size;
+    fn_is_wkld_done is_wkld_done;
+    fn_is_frame_start is_frame_start;
+    fn_gen_contrib_tags gen_contrib_tags;
+    fn_gen_assoc_tags gen_assoc_tags;
+    fn_flush_parser flush;
+#ifdef USE_AVC_SHORT_FORMAT
+    fn_update_data update_data;
+#endif
+} viddec_parser_ops_t;
+
+
+typedef enum
+{
+    VIDDEC_PARSE_ERROR = 0xF0,
+    VIDDEC_PARSE_SUCESS = 0xF1,
+    VIDDEC_PARSE_FRMDONE = 0xF2,
+} viddec_parser_error_t;
+
+/*
+ *
+ *Functions used by Parsers
+ *
+ */
+
+/* This function returns the requested number of bits(<=32) and increments au byte position.
+ */
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function returns requested number of bits(<=32) with out incrementing au byte position
+ */
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+/* This function skips requested number of bits(<=32) by incrementing au byte position.
+ */
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits);
+
+/* This function appends a work item to current/next workload.
+ */
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next);
+
+/* This function gets current byte and bit positions and information on whether an emulation byte is present after
+current byte.
+ */
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+/* This function appends Pixel tag to current work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata(void *parent);
+
+/* This function appends Pixel tag to next work load starting from current position to end of au unit.
+ */
+int32_t viddec_pm_append_pixeldata_next(void *parent);
+
+/* This function provides the workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_header(void *parent);
+
+/* This function provides the next workload header for pasers to fill in attribute values
+ */
+viddec_workload_t* viddec_pm_get_next_header(void *parent);
+
+/* Returns the current byte value where offset is on */
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte);
+
+/* Tells us if there is more data that need to parse */
+int32_t viddec_pm_is_nomoredata(void *parent);
+
+/* This function appends misc tag to work load starting from start position to end position of au unit */
+int32_t viddec_pm_append_misc_tags(void *parent, uint32_t start, uint32_t end, viddec_workload_item_t *wi, uint32_t using_next);
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error);
+
+void viddec_pm_set_late_frame_detect(void *parent);
+
+static inline void viddec_fw_reset_workload_item(viddec_workload_item_t *wi)
+{
+    wi->vwi_payload[0] = wi->vwi_payload[1] = wi->vwi_payload[2] = 0;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm.h b/mixvbp/vbp_manager/include/viddec_pm.h
new file mode 100755
index 0000000..45b884b
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm.h
@@ -0,0 +1,93 @@
+#ifndef VIDDEC_PM_H
+#define VIDDEC_PM_H
+
+#include <stdint.h>
+#include "viddec_pm_utils_bstream.h"
+#include "viddec_pm_parse.h"
+#include "viddec_parser_ops.h"
+
+#define SC_DETECT_BUF_SIZE 1024
+#define MAX_CODEC_CXT_SIZE 4096
+
+typedef enum
+{
+    PM_SUCCESS = 0,
+    /* Messages to indicate more ES data */
+    PM_NO_DATA = 0x100,
+    /* Messages to indicate SC found */
+    PM_SC_FOUND = 0x200,
+    PM_FIRST_SC_FOUND = 0x201,
+    /* Messages to indicate Frame done */
+    PM_WKLD_DONE = 0x300,
+    /* Messages to indicate Error conditions */
+    PM_OVERFLOW = 0x400,
+    /* Messages to indicate inband conditions */
+    PM_INBAND_MESSAGES = 0x500,
+    PM_EOS = 0x501,
+    PM_DISCONTINUITY = 0x502,
+} pm_parse_state_t;
+
+/* This is a temporary structure for first pass sc parsing. index tells us where we are in list of es buffers
+   cur_es points to current es buffer we are parsing. */
+typedef struct
+{
+    int32_t list_index; /* current index of list */
+    uint32_t cur_offset;
+    uint32_t cur_size;
+    viddec_input_buffer_t *cur_es;
+} viddec_pm_sc_cur_buf_t;
+
+typedef struct
+{
+    uint32_t pending_tags[MAX_IBUFS_PER_SC];
+    uint8_t dummy;
+    uint8_t frame_done;
+    uint8_t first_buf_aligned;
+    uint8_t using_next;
+} vidded_pm_pending_tags_t;
+
+/* This structure holds all necessary data required by parser manager for stream parsing.
+ */
+typedef struct
+{
+    /* Actual buffer where data gets DMA'd. 8 padding bytes for alignment */
+    uint8_t scbuf[SC_DETECT_BUF_SIZE + 8];
+    viddec_sc_parse_cubby_cxt_t parse_cubby;
+    viddec_pm_utils_list_t list;
+    /* Place to store tags to be added to next to next workload */
+    viddec_pm_sc_cur_buf_t cur_buf;
+    //viddec_emitter emitter;
+    viddec_pm_utils_bstream_cxt_t getbits;
+    viddec_sc_prefix_state_t sc_prefix_info;
+    vidded_pm_pending_tags_t pending_tags;
+    uint8_t word_align_dummy;
+    uint8_t late_frame_detect;
+    uint8_t frame_start_found;
+    uint8_t found_fm_st_in_current_au;
+    uint32_t next_workload_error_eos;
+    uint32_t pending_inband_tags;
+#ifdef VBP
+    uint32_t codec_data[MAX_CODEC_CXT_SIZE<<3];
+#else
+    uint32_t codec_data[MAX_CODEC_CXT_SIZE>>2];
+#endif
+} viddec_pm_cxt_t;
+
+/*
+ *
+ * Functions used by Parser kernel
+ *
+ */
+
+/* This is for initialising parser manager context to default values */
+void viddec_pm_init_context(viddec_pm_cxt_t *cxt, uint32_t codec_type, uint32_t *persist_mem, uint32_t clean);
+
+/* This is the main parse function which returns state information that parser kernel can understand.*/
+uint32_t viddec_pm_parse_es_buffer(viddec_pm_cxt_t *cxt, uint32_t codec_type, viddec_input_buffer_t *es_buf);
+
+void viddec_pm_init_ops();
+
+void viddec_pm_update_time(viddec_pm_cxt_t *cxt, uint32_t time);
+
+uint32_t viddec_pm_get_parser_sizes(uint32_t codec_type, viddec_parser_memory_sizes_t *size);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_parse.h b/mixvbp/vbp_manager/include/viddec_pm_parse.h
new file mode 100755
index 0000000..beca8d7
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_parse.h
@@ -0,0 +1,24 @@
+#ifndef VIDDEC_PM_PARSE_H
+#define VIDDEC_PM_PARSE_H
+
+#include <stdint.h>
+/* This structure is used by first pass parsing(sc detect), the pm passes information on number of bytes
+   that needs to be parsed and if start code found then sc_end_pos contains the index of last sc code byte
+   in the current buffer */
+typedef struct
+{
+    uint32_t size; /* size pointed to by buf */
+    uint8_t *buf;  /* ptr to data */
+    int32_t sc_end_pos; /* return value end position of sc */
+    uint32_t phase; /* phase information(state) for sc */
+} viddec_sc_parse_cubby_cxt_t;
+
+typedef struct
+{
+    uint16_t next_sc;
+    uint8_t  second_scprfx_length;
+    uint8_t  first_sc_detect;
+} viddec_sc_prefix_state_t;
+
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h
new file mode 100755
index 0000000..999a067
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_utils_bstream.h
@@ -0,0 +1,88 @@
+#ifndef VIDDEC_PM_UTILS_BSTREAM_H
+#define VIDDEC_PM_UTILS_BSTREAM_H
+
+#include "viddec_pm_utils_list.h"
+
+#define CUBBY_SIZE 1024
+#define SCRATCH_SIZE 20
+#define MIN_DATA     8
+
+typedef struct
+{
+#ifdef VBP
+    uint8_t *buf;
+#else
+    uint8_t buf[CUBBY_SIZE + 8 + MIN_DATA];/* extra 8 bytes for alignmet, extra 8 bytes for old data */
+#endif
+    uint32_t buf_st; /* start pos in buf */
+    uint32_t buf_end; /* first invalid byte in buf */
+    uint32_t buf_index; /* current index in buf */
+    uint32_t buf_bitoff; /* bit offset in current index position */
+} viddec_pm_utils_bstream_buf_cxt_t;
+
+typedef struct
+{
+    uint8_t  buf_scratch[SCRATCH_SIZE];/* scratch for boundary reads*/
+    uint32_t st; /* start index of valid byte */
+    uint32_t size;/* Total number of bytes in current buffer */
+    uint32_t bitoff; /* bit offset in first valid byte */
+} viddec_pm_utils_bstream_scratch_cxt_t;
+
+typedef struct
+{
+#ifdef VBP
+    /* counter of emulation prevention byte */
+    uint32_t emulation_byte_counter;
+#endif
+    /* After First pass of scan we figure out how many bytes are in the current access unit(N bytes). We store
+       the bstream buffer's first valid byte index wrt to accessunit in this variable */
+    uint32_t au_pos;
+    /* This is for keeping track of which list item was used to load data last */
+    uint32_t list_off;
+    /* This is for tracking emulation prevention bytes */
+    uint32_t phase;
+    /* This flag tells us whether to look for emulation prevention or not */
+    uint32_t is_emul_reqd;
+    /* A pointer to list of es buffers which contribute to current access unit */
+    viddec_pm_utils_list_t *list;
+    /* scratch buffer to stage data on boundaries and reloads */
+    viddec_pm_utils_bstream_scratch_cxt_t scratch;
+    /* Actual context which has valid data for get bits functionality */
+    viddec_pm_utils_bstream_buf_cxt_t bstrm_buf;
+} viddec_pm_utils_bstream_cxt_t;
+
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul);
+
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits);
+
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip);
+
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte);
+
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt);
+
+void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt);
+
+/*
+  This function gets bit and byte position of where we are in the current AU. We always return the position of next byte to be
+  read.
+  is_emul on true indicates we are on second zero byte in emulation prevention sequence.
+ */
+static inline void viddec_pm_utils_bstream_get_au_offsets(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+    uint32_t phase=cxt->phase;
+
+    *bit = cxt->bstrm_buf.buf_bitoff;
+    *byte = cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st);
+    if (cxt->phase > 0)
+    {
+        phase = phase - ((cxt->bstrm_buf.buf_bitoff != 0)? 1: 0 );
+    }
+    /* Assumption: we will never be parked on 0x3 byte of emulation prevention sequence */
+    *is_emul = (cxt->is_emul_reqd) && (phase > 0) &&
+               (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index] == 0) &&
+               (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x3);
+}
+#endif
diff --git a/mixvbp/vbp_manager/include/viddec_pm_utils_list.h b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h
new file mode 100755
index 0000000..0e650d5
--- /dev/null
+++ b/mixvbp/vbp_manager/include/viddec_pm_utils_list.h
@@ -0,0 +1,50 @@
+#ifndef VIDDEC_PM_COMMON_LIST_H
+#define VIDDEC_PM_COMMON_LIST_H
+
+/* Limitation:This is the maximum numbers of es buffers between start codes. Needs to change if we encounter
+   a case if this is not sufficent */
+#ifdef VBP
+#define MAX_IBUFS_PER_SC 512
+#else
+#define MAX_IBUFS_PER_SC 64
+#endif
+
+/* This structure is for storing information on byte position in the current access unit.
+   stpos is the au byte index of first byte in current es buffer.edpos is the au byte index+1 of last
+   valid byte in current es buffer.*/
+typedef struct
+{
+    uint32_t stpos;
+    uint32_t edpos;
+} viddec_pm_utils_au_bytepos_t;
+
+/* this structure is for storing all necessary information for list handling */
+typedef struct
+{
+    uint16_t num_items;                  /* Number of buffers in List */
+    uint16_t first_scprfx_length;        /* Length of first sc prefix in this list */
+    int32_t start_offset;                /* starting offset of unused data including sc prefix in first buffer */
+    int32_t end_offset;                  /* Offset of unsused data in last buffer including 2nd sc prefix */
+    //viddec_input_buffer_t sc_ibuf[MAX_IBUFS_PER_SC]; /* Place to store buffer descriptors */
+    viddec_pm_utils_au_bytepos_t data[MAX_IBUFS_PER_SC]; /* place to store au byte positions */
+    int32_t total_bytes;                 /* total bytes for current access unit including first sc prefix*/
+} viddec_pm_utils_list_t;
+
+/* This function initialises the list to default values */
+void viddec_pm_utils_list_init(viddec_pm_utils_list_t *cxt);
+#ifndef VBP
+/* This function adds a new entry to list and will emit tags if needed */
+uint32_t viddec_pm_utils_list_addbuf(viddec_pm_utils_list_t *list, viddec_input_buffer_t *es_buf);
+
+/* This function updates au byte position of the current list. This should be called after sc codes are detected and before
+   syntax parsing as get bits requires this to be initialized. */
+void viddec_pm_utils_list_updatebytepos(viddec_pm_utils_list_t *list, uint8_t sc_prefix_length);
+
+/* This function walks through the list and removes consumed buffers based on total bytes. It then moves
+   unused entires to the top of list. */
+void viddec_pm_utils_list_remove_used_entries(viddec_pm_utils_list_t *list, uint32_t length);
+
+/* this function returns 1 if the requested byte is not found. If found returns list and offset into list */
+uint32_t viddec_pm_utils_list_getbyte_position(viddec_pm_utils_list_t *list, uint32_t byte, uint32_t *list_index, uint32_t *offset);
+#endif
+#endif
diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c
new file mode 100644
index 0000000..498cbc4
--- /dev/null
+++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.c
@@ -0,0 +1,1830 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264secure_parser.h"
+
+#define TERMINATE_KEY 0xFFFFFFFF
+
+typedef struct vbp_h264secure_parser_private vbp_h264secure_parser_private;
+
+typedef enum
+{
+    H264_BS_LENGTH_PREFIXED,
+    H264_BS_SC_PREFIXED,
+    H264_BS_SINGLE_NAL
+} H264_BS_PATTERN;
+
+struct vbp_h264secure_parser_private
+{
+    /* number of bytes used to encode length of NAL payload.  If parser does not receive configuration data
+    and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB
+    byte stream format. */
+    int NAL_length_size;
+
+    /* indicate if stream is length prefixed */
+    int length_prefix_verified;
+
+    H264_BS_PATTERN bitstream_pattern;
+
+    uint8_t* start;
+    int32_t  offset;
+    int32_t  size;
+};
+
+/* default scaling list table */
+static unsigned char Default_4x4_Intra[16] =
+{
+    6,13,20,28,
+    13,20,28,32,
+    20,28,32,37,
+    28,32,37,42
+};
+
+static unsigned char Default_4x4_Inter[16] =
+{
+    10,14,20,24,
+    14,20,24,27,
+    20,24,27,30,
+    24,27,30,34
+};
+
+static unsigned char Default_8x8_Intra[64] =
+{
+    6,10,13,16,18,23,25,27,
+    10,11,16,18,23,25,27,29,
+    13,16,18,23,25,27,29,31,
+    16,18,23,25,27,29,31,33,
+    18,23,25,27,29,31,33,36,
+    23,25,27,29,31,33,36,38,
+    25,27,29,31,33,36,38,40,
+    27,29,31,33,36,38,40,42
+};
+
+static unsigned char Default_8x8_Inter[64] =
+{
+    9,13,15,17,19,21,22,24,
+    13,13,17,19,21,22,24,25,
+    15,17,19,21,22,24,25,27,
+    17,19,21,22,24,25,27,28,
+    19,21,22,24,25,27,28,30,
+    21,22,24,25,27,28,30,32,
+    22,24,25,27,28,30,32,33,
+    24,25,27,28,30,32,33,35
+};
+
+static unsigned char quant_flat[16] =
+{
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16
+};
+
+static unsigned char quant8_flat[64] =
+{
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16
+};
+
+static unsigned char* UseDefaultList[8] =
+{
+    Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+    Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+    Default_8x8_Intra,
+    Default_8x8_Inter
+};
+
+static uint8 h264_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    {4, 3},
+    {3, 2},
+    {2, 1},
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264secure_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264secure_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264secure_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->update_data = dlsym(pcontext->fd_parser, "viddec_h264secure_update");
+    if (NULL == pcontext->parser_ops->update_data)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_wkld_done = NULL;
+    pcontext->parser_ops->flush = NULL;
+    pcontext->parser_ops->is_frame_start = NULL;
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+    vbp_data_h264 *query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_h264, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+    vbp_h264secure_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_h264secure_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->start = 0;
+    parser_private->offset = 0;
+    parser_private->size = 0;
+    parser_private->NAL_length_size = 0;
+    parser_private->length_prefix_verified = 0;
+    parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_h264secure(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264secure(vbp_context *pcontext)
+{
+    if (NULL != pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    int i;
+    vbp_data_h264 *query_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].pic_parms);
+        }
+        free(query_data->pic_data);
+    }
+
+    free(query_data->IQ_matrix_buf);
+    free(query_data->codec_data);
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+    uint16_t i = ((*p) << 8) + ((*(p+1)));
+    return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+    uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+    return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264secure(
+    int curr_picture_structure,
+    int bottom_field,
+    frame_store* store,
+    VAPictureH264* pic)
+{
+    if (FRAME == curr_picture_structure)
+    {
+        if (FRAME != viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("Reference picture structure is not frame for current frame picture!");
+        }
+        pic->flags = 0;
+        pic->TopFieldOrderCnt = store->top_field.poc;
+        pic->BottomFieldOrderCnt = store->bottom_field.poc;
+    }
+    else
+    {
+        if (FRAME == viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("reference picture structure is frame for current field picture!");
+        }
+        if (bottom_field)
+        {
+            pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+        else
+        {
+            pic->flags = VA_PICTURE_H264_TOP_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+    }
+}
+
+static inline void vbp_set_slice_ref_list_h264secure(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    int i, j;
+    int num_ref_idx_active = 0;
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    uint8_t* p_list = NULL;
+    VAPictureH264* refPicListX = NULL;
+    frame_store* fs = NULL;
+
+    /* initialize ref picutre list, set picture id and flags to invalid. */
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+        for (j = 0; j < 32; j++)
+        {
+            refPicListX->picture_id = VA_INVALID_SURFACE;
+            refPicListX->frame_idx = 0;
+            refPicListX->flags = VA_PICTURE_H264_INVALID;
+            refPicListX->TopFieldOrderCnt = 0;
+            refPicListX->BottomFieldOrderCnt = 0;
+            refPicListX++;
+        }
+    }
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+        if ((i == 0) &&
+            ((h264_PtypeB == slice_header->slice_type) ||
+             (h264_PtypeP == slice_header->slice_type)))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+            if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list0;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_0;
+            }
+        }
+        else if ((i == 1) && (h264_PtypeB == slice_header->slice_type))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+            if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list1;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_1;
+            }
+        }
+        else
+        {
+            num_ref_idx_active = 0;
+            p_list = NULL;
+        }
+
+
+        for (j = 0; j < num_ref_idx_active; j++)
+        {
+            fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+            /* bit 5 indicates if reference picture is bottom field */
+            vbp_set_VAPicture_h264secure(
+                h264_parser->info.img.structure,
+                (p_list[j] & 0x20) >> 5,
+                fs,
+                refPicListX);
+
+            refPicListX->frame_idx = fs->frame_num;
+            refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            refPicListX++;
+        }
+    }
+}
+
+static inline void vbp_set_pre_weight_table_h264secure(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    int i, j;
+
+    if ((((h264_PtypeP == slice_header->slice_type) ||
+          (h264_PtypeB == slice_header->slice_type)) &&
+          h264_parser->info.active_PPS.weighted_pred_flag) ||
+         ((h264_PtypeB == slice_header->slice_type) &&
+         (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+    {
+        slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+        slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+        slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+        slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+        slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+        slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = slice_header->sh_predwttbl.luma_weight_l0[i];
+            slc_parms->luma_offset_l0[i] = slice_header->sh_predwttbl.luma_offset_l0[i];
+            slc_parms->luma_weight_l1[i] = slice_header->sh_predwttbl.luma_weight_l1[i];
+            slc_parms->luma_offset_l1[i] = slice_header->sh_predwttbl.luma_offset_l1[i];
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+                slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+                slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+                slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+            }
+        }
+    }
+    else
+    {
+        /* default weight table */
+        slc_parms->luma_log2_weight_denom = 5;
+        slc_parms->chroma_log2_weight_denom = 5;
+        slc_parms->luma_weight_l0_flag = 0;
+        slc_parms->luma_weight_l1_flag = 0;
+        slc_parms->chroma_weight_l0_flag = 0;
+        slc_parms->chroma_weight_l1_flag = 0;
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 0;
+            slc_parms->luma_offset_l0[i] = 0;
+            slc_parms->luma_weight_l1[i] = 0;
+            slc_parms->luma_offset_l1[i] = 0;
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = 0;
+                slc_parms->chroma_offset_l0[i][j] = 0;
+                slc_parms->chroma_weight_l1[i][j] = 0;
+                slc_parms->chroma_offset_l1[i][j] = 0;
+            }
+        }
+    }
+}
+
+
+static inline void vbp_set_reference_frames_h264secure(
+    struct h264_viddec_parser *parser,
+    VAPictureParameterBufferH264* pic_parms)
+{
+    int buffer_idx;
+    int frame_idx;
+    frame_store* store = NULL;
+    h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+    /* initialize reference frames */
+    for (frame_idx = 0; frame_idx < 16; frame_idx++)
+    {
+        pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+        pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+        pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+        pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+        pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+    }
+    pic_parms->num_ref_frames = 0;
+
+    frame_idx = 0;
+
+    /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer);  */
+    /* set short term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+
+        store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+        /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0) */
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    /* set long term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+        store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+        if (!viddec_h264_get_is_long_term(store))
+        {
+            WTRACE("long term frame is not marked as long term.");
+        }
+        /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->long_term_frame_idx;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0)*/
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    pic_parms->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (frame_idx > parser->info.active_SPS.num_ref_frames)
+    {
+        WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+               frame_idx, parser->info.active_SPS.num_ref_frames);
+    }
+}
+
+
+static inline void vbp_set_scaling_list_h264secure(
+    struct h264_viddec_parser *parser,
+    VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+    int i;
+    int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0);
+
+    if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+    {
+        for (i = 0; i < lists_to_set; i++)
+        {
+            if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+            {
+                if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                        ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+                {
+                    /* use default scaling list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                    }
+                }
+                else
+                {
+                    /* use PPS list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+                    }
+                }
+            }
+            else /* pic_scaling_list not present */
+            {
+                if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+                {
+                    /* SPS matrix present - use fallback rule B */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+                               16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+                               64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        //g_warning("invalid scaling list index.");
+                        break;
+                    }
+                }
+                else /* seq_scaling_matrix not present */
+                {
+                    /* SPS matrix not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                } /* end of seq_scaling_matrix not present */
+            } /* end of  pic_scaling_list not present */
+        } /* for loop for each index from 0 to 7 */
+    } /* end of pic_scaling_matrix present */
+    else
+    {
+        /* PPS matrix not present, use SPS information */
+        if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+        {
+            for (i = 0; i < lists_to_set; i++)
+            {
+                if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+                {
+                    if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                            ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+                    {
+                        /* use default scaling list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        }
+                    }
+                    else
+                    {
+                        /* use SPS list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+                        }
+                    }
+                }
+                else
+                {
+                    /* SPS list not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                }
+            }
+        }
+        else
+        {
+            /* SPS matrix not present - use flat lists */
+            for (i = 0; i < 6; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+            }
+            for (i = 0; i < 2; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+            }
+        }
+    }
+
+    if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+            (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+             parser->info.active_SPS.seq_scaling_matrix_present_flag))
+    {
+        for (i = 0; i < 2; i++)
+        {
+            memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+        }
+    }
+}
+
+static void vbp_set_codec_data_h264secure(
+    struct h264_viddec_parser *parser,
+     vbp_data_h264 *query_data)
+{
+    vbp_codec_data_h264* codec_data = query_data->codec_data;
+
+    /* The following variables are used to detect if there is new SPS or PPS */
+    uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id;
+    uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id;
+    int frame_width = codec_data->frame_width;
+    int frame_height = codec_data->frame_height;
+
+    /* parameter id */
+    codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+    codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+    /* profile and level */
+    codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+    codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+    /*constraint flag sets (h.264 Spec v2009)*/
+    codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4;
+    codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3;
+    codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+    codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1;
+    codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1;
+
+    /* reference frames */
+    codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+        !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+    {
+        /* no longer necessary: two fields share the same interlaced surface */
+        /* codec_data->num_ref_frames *= 2; */
+    }
+
+    codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+    /* frame coding */
+    codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+    codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+    /* frame dimension */
+    codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+    codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                               (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+    /* cropping information */
+    codec_data->crop_left = 0;
+    codec_data->crop_right = 0;
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = 0;
+    if(parser->info.active_SPS.sps_disp.frame_cropping_flag) {
+        int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0;
+        int ChromaArrayType = 0;
+        if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) {
+            if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) {
+                SubWidthC = 2;
+                SubHeightC = 2;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) {
+                SubWidthC = 2;
+                SubHeightC = 1;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) {
+                SubWidthC = 1;
+                SubHeightC = 1;
+            }
+            ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        }
+
+        if(ChromaArrayType == 0) {
+            CropUnitX = 1;
+            CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        } else {
+            CropUnitX = SubWidthC;
+            CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag);
+        }
+
+        codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+        codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1;
+        codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+        codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1;
+    }
+
+    /* aspect ratio */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+    {
+        codec_data->aspect_ratio_idc =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+        if (codec_data->aspect_ratio_idc < 17)
+        {
+            codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0];
+            codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1];
+        }
+        else if (codec_data->aspect_ratio_idc == 255)
+        {
+            codec_data->sar_width =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+            codec_data->sar_height =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+        }
+        else
+        {
+            codec_data->sar_width = 0;
+            codec_data->sar_height = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        codec_data->aspect_ratio_idc = 0;
+        codec_data->sar_width = 0;
+        codec_data->sar_height = 0;
+    }
+
+    /* video format */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+    {
+        codec_data->video_format =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format = 5;
+    }
+
+    codec_data->video_full_range_flag =
+        parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag;
+
+
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+    {
+        codec_data->matrix_coefficients =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients;
+    }
+    else
+    {
+        // Unspecified
+        codec_data->matrix_coefficients = 2;
+    }
+
+    codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value;
+
+    /* picture order type and count */
+    codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+    codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+
+
+    /* udpate sps and pps status */
+    query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0;
+    query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0;
+    query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff;
+    query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff;
+    if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height)
+    {
+        query_data->new_sps = 1;
+        query_data->new_pps = 1;
+    }
+}
+
+
+static uint32_t vbp_add_pic_data_h264secure(vbp_context *pcontext, int list_index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    struct h264_viddec_parser* parser = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+    VAPictureParameterBufferH264* pic_parms = NULL;
+
+    parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+    if (0 == parser->info.SliceHeader.first_mb_in_slice)
+    {
+        /* a new picture is parsed */
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        /* partial frame */
+        query_data->num_pictures = 1;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        WTRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    pic_parms = pic_data->pic_parms;
+
+    // relax this condition to support partial frame parsing
+
+    //if (parser->info.SliceHeader.first_mb_in_slice == 0)
+    {
+        /**
+        * picture parameter only needs to be set once,
+        * even multiple slices may be encoded
+        */
+
+        /* VAPictureParameterBufferH264 */
+        pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+        pic_parms->CurrPic.frame_idx = 0;
+        if (parser->info.img.field_pic_flag == 1)
+        {
+            if (parser->info.img.bottom_field_flag)
+            {
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            }
+            else
+            {
+                /* also OK set to 0 (from test suite) */
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+            }
+        }
+        else
+        {
+            pic_parms->CurrPic.flags = 0; /* frame picture */
+        }
+        pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+        pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+        pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+        /* don't care if current frame is used as long term reference */
+        if (parser->info.SliceHeader.nal_ref_idc != 0)
+        {
+            pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+
+        pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+        /* frame height in MBS */
+        pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+        pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+        pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+        pic_parms->seq_fields.value = 0;
+        pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+        pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+        pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+        /* new fields in libva 0.31 */
+        pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+        pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+        pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+        pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+        pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+        /* referened from UMG_Moorstown_TestSuites */
+        pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+        pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+        pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+        pic_parms->slice_group_change_rate_minus1 = 0;
+        pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+        pic_parms->pic_init_qs_minus26 = 0;
+        pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+        pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+        pic_parms->pic_fields.value = 0;
+        pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+        pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+        pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+        pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+        /* new LibVA fields in v0.31*/
+        pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+        pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+        pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+        pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+        /* all slices in the pciture have the same field_pic_flag */
+        pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+        pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+        pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+
+        pic_parms->num_ref_idx_l0_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l0_active-1;
+        pic_parms->num_ref_idx_l1_default_active_minus1 = parser->info.active_PPS.num_ref_idx_l1_active-1;
+    }
+
+
+    /* set reference frames, and num_ref_frames */
+    vbp_set_reference_frames_h264secure(parser, pic_parms);
+    if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        int frame_idx;
+        for (frame_idx = 0; frame_idx < 16; frame_idx++)
+        {
+            pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+            pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+            pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+        }
+    }
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_h264secure(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_data->buffer_addr = cxt->parse_cubby.buf;
+    slc_parms = &(slc_data->slc_parms);
+
+    /* byte: how many bytes have been parsed */
+    /* bit: bits parsed within the current parsing position */
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    slc_data->nal_unit_type = h264_parser->info.nal_unit_type;
+
+    slc_parms->slice_data_size = slc_data->slice_size =
+                                     pcontext->parser_cxt->list.data[index].edpos -
+                                     pcontext->parser_cxt->list.data[index].stpos;
+
+    slc_parms->slice_data_offset = 0;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = cxt->list.data[index].stpos;
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    pic_data->num_slices++;
+
+    //vbp_update_reference_frames_h264_methodB(pic_data);
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    return VBP_OK;
+}
+
+
+static uint32_t vbp_update_slice_data_h264secure(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private;
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_parms = &(slc_data->slc_parms);
+
+    slc_parms->slice_data_size = parser_private->size;
+    slc_parms->slice_data_offset = parser_private->offset;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = 0;
+    slc_data->buffer_addr  = parser_private->start;
+    slc_data->slice_size = parser_private->size + parser_private->offset;
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    pic_data->num_slices++;
+
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    return VBP_OK;
+}
+
+
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264secure(vbp_context* pcontext)
+{
+    /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+    uint8 configuration_version = 0;
+    uint8 AVC_profile_indication = 0;
+    uint8 profile_compatibility = 0;
+    uint8 AVC_level_indication = 0;
+    uint8 length_size_minus_one = 0;
+    uint8 num_of_sequence_parameter_sets = 0;
+    uint8 num_of_picture_parameter_sets = 0;
+    uint16 sequence_parameter_set_length = 0;
+    uint16 picture_parameter_set_length = 0;
+
+    int i = 0;
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private;
+    //Enable emulation prevention
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* check if configuration data is start code prefix */
+    viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    int ret = ops->parse_sc((void *)&cubby,
+                            NULL, /* context, not used */
+                            &(cxt->sc_prefix_info));
+    if (ret == 1)
+    {
+        WTRACE("configuration data is start-code prefixed.\n");
+        parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+        return vbp_parse_start_code_h264secure(pcontext);
+    }
+
+
+    uint8* cur_data = cxt->parse_cubby.buf;
+
+
+    if (cxt->parse_cubby.size < 6)
+    {
+        /* need at least 6 bytes to start parsing the structure, see spec 15 */
+        return VBP_DATA;
+    }
+
+    configuration_version = *cur_data++;
+    AVC_profile_indication = *cur_data++;
+
+    /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+    profile_compatibility = *cur_data++;
+    AVC_level_indication = *cur_data++;
+
+    /* ITRACE("Level indication: %d", AVC_level_indication);*/
+    /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+    length_size_minus_one = (*cur_data) & 0x3;
+
+    if (length_size_minus_one != 3)
+    {
+        WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+    }
+
+    parser_private->NAL_length_size = length_size_minus_one + 1;
+
+    cur_data++;
+
+    /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+    num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+    if (num_of_sequence_parameter_sets > 1)
+    {
+        WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+    }
+    if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+    {
+        /* this would never happen as MAX_NUM_SPS = 32 */
+        WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+    }
+    cur_data++;
+
+    cxt->list.num_items = 0;
+    for (i = 0; i < num_of_sequence_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse sequence_parameter_set_length */
+            ETRACE("Not enough data to parse SPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least sequence_parameter_set_length bytes for SPS */
+            ETRACE("Not enough data to parse SPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += sequence_parameter_set_length;
+    }
+
+    if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+    {
+        /* need at least one more byte to parse num_of_picture_parameter_sets */
+        ETRACE("Not enough data to parse number of PPS.");
+        return VBP_DATA;
+    }
+
+    num_of_picture_parameter_sets = *cur_data++;
+    if (num_of_picture_parameter_sets > 1)
+    {
+        /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+    }
+
+    for (i = 0; i < num_of_picture_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse picture_parameter_set_length */
+            ETRACE("Not enough data to parse PPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least picture_parameter_set_length bytes for PPS */
+            ETRACE("Not enough data to parse PPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += picture_parameter_set_length;
+    }
+
+    if ((cur_data - cxt->parse_cubby.buf) !=  cxt->parse_cubby.size)
+    {
+        WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+               cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+    }
+
+    parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED;
+    return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size)
+{
+    switch (*NAL_length_size)
+    {
+    case 4:
+        return vbp_utils_ntohl(p);
+
+    case 3:
+    {
+        uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+        return i;
+    }
+
+    case 2:
+        return vbp_utils_ntohs(p);
+
+    case 1:
+        return *p;
+
+    default:
+        WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+        /* default to 4 bytes for length */
+        *NAL_length_size = 4;
+        return vbp_utils_ntohl(p);
+    }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define H264_SC_SIZE 3
+uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *)pcontext->parser_private;
+
+    /* reset query data for the new sample buffer */
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+    int i;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    cxt->list.num_items = 0;
+
+    /* reset start position of first item to 0 in case there is only one item */
+    cxt->list.data[0].stpos = 0;
+
+    /* start code emulation prevention byte is present in NAL */
+    cxt->getbits.is_emul_reqd = 1;
+
+    if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+        int32_t size_left = 0;
+        int32_t size_parsed = 0;
+        int32_t NAL_length = 0;
+
+        cubby = &(cxt->parse_cubby);
+
+        size_left = cubby->size;
+
+        while (size_left >= parser_private->NAL_length_size)
+        {
+            NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size);
+            if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size)
+            {
+                ETRACE("Invalid NAL_length parsed.");
+                break;
+            }
+
+            size_parsed += parser_private->NAL_length_size;
+            cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+            size_parsed += NAL_length; /* skip NAL bytes */
+            /* end position is exclusive */
+            cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+            cxt->list.num_items++;
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+                break;
+            }
+
+            size_left = cubby->size - size_parsed;
+        }
+
+        if (size_left != 0 && parser_private->length_prefix_verified == 0)
+        {
+            WTRACE("Elementary stream is not aligned (%d).", size_left);
+
+            /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will
+                    * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed
+                    */
+            parser_private->length_prefix_verified = 1;
+            viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby;
+
+            viddec_parser_ops_t *ops = pcontext->parser_ops;
+            int ret = ops->parse_sc((void *)&temp_cubby,
+                                    NULL, /* context, not used */
+                                    &(cxt->sc_prefix_info));
+
+            /* found start code */
+            if (ret == 1)
+            {
+                WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed.");
+                parser_private->NAL_length_size = 0;
+                parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+                /* reset parsing data */
+                for (i = 0; i < MAX_NUM_PICTURES; i++)
+                {
+                    query_data->pic_data[i].num_slices = 0;
+                }
+                query_data->num_pictures = 0;
+                cxt->list.num_items = 0;
+            }
+        }
+    }
+
+
+    if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t cubby;
+        /*  memory copy without updating cxt->parse_cubby */
+        cubby = cxt->parse_cubby;
+        viddec_parser_ops_t *ops = pcontext->parser_ops;
+        int ret = 0;
+
+        while (1)
+        {
+            ret = ops->parse_sc((void *)&cubby,
+                                NULL, /* context, not used */
+                                &(cxt->sc_prefix_info));
+            if (ret == 1)
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.data[0].stpos = cubby.sc_end_pos;
+                }
+                else
+                {
+                    cxt->list.data[cxt->list.num_items].stpos =
+                        cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+                    cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE;
+                }
+
+                cubby.phase = 0;
+                cubby.buf = cxt->parse_cubby.buf +
+                            cxt->list.data[cxt->list.num_items].stpos;
+
+                cubby.size = cxt->parse_cubby.size -
+                             cxt->list.data[cxt->list.num_items].stpos;
+
+                cxt->list.num_items++;
+                if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+                {
+                    WTRACE("Num items exceeds the limit!");
+                    /* not fatal, just stop parsing */
+                    break;
+                }
+            }
+            else
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.num_items = 1;
+                    parser_private->bitstream_pattern = H264_BS_SINGLE_NAL;
+                    WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL.");
+                }
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+        }
+
+    }
+
+    if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL)
+    {
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264secure( vbp_context *pcontext, int i)
+{
+    if (i >= MAX_NUM_SLICES)
+    {
+        return VBP_PARM;
+    }
+
+    uint32 error = VBP_OK;
+
+    struct h264_viddec_parser* parser = NULL;
+    parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+    vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data;
+    switch (parser->info.nal_unit_type)
+    {
+    case h264_NAL_UNIT_TYPE_SLICE:
+        VTRACE("slice header is parsed.");
+        error = vbp_add_pic_data_h264secure(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264secure(pcontext, i);
+        }
+        break;
+
+    case  h264_NAL_UNIT_TYPE_IDR:
+        VTRACE("IDR header is parsed.");
+        error = vbp_add_pic_data_h264secure(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264secure(pcontext, i);
+        }
+        break;
+    case h264_NAL_UNIT_TYPE_SEI:
+        //ITRACE("SEI header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_SPS:
+        VTRACE("SPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_PPS:
+        VTRACE("PPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        VTRACE("ACC unit delimiter is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+        ITRACE("EOSeq is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOstream:
+        ITRACE("EOStream is parsed");
+        break;
+
+    default:
+        WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+        break;
+    }
+
+    if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1)
+    {
+        WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures);
+        return (error == VBP_OK ? VBP_MULTI : error);
+    }
+    return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext)
+{
+    vbp_data_h264 *query_data = NULL;
+    struct h264_viddec_parser *parser = NULL;
+    struct vbp_h264_parser_private_t* private = NULL;
+
+    parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+    private = (struct vbp_h264_parser_private_t *)pcontext->parser_private;
+
+    vbp_set_codec_data_h264secure(parser, query_data);
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* VQIAMatrixBufferH264 */
+    vbp_set_scaling_list_h264secure(parser, query_data->IQ_matrix_buf);
+
+    if (query_data->num_pictures > 0)
+    {
+     /*
+        * picture parameter buffer and slice parameter buffer have been populated
+        */
+    }
+    else
+    {
+        /**
+        * add a dummy picture that contains picture parameters parsed
+          from SPS and PPS.
+        */
+        vbp_add_pic_data_h264secure(pcontext, 0);
+    }
+
+    return VBP_OK;
+}
+
+uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size)
+{
+    uint32 error = VBP_OK;
+    uint32 offset = 0;
+    uint32 key = 0;
+    uint32 i,j;
+
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    vbp_h264secure_parser_private *parser_private = (vbp_h264secure_parser_private *) pcontext->parser_private;
+
+    int32_t  sliceheadersize;
+    uint32_t slice_num = 0;
+    while (offset < size) {
+        memcpy(&key, (uint8_t *)newdata+offset, sizeof(uint32_t));
+        if (key == TERMINATE_KEY) {
+            break;
+        }
+        slice_num++;
+        offset += sizeof(uint32_t);
+
+        memcpy(&parser_private->start, (uint8_t *)newdata+offset, 4);
+        offset += 4;
+
+        memcpy(&parser_private->offset, (uint8_t *)newdata+offset, sizeof(int32_t));
+        offset += 4;
+
+        memcpy(&parser_private->size, (uint8_t *)newdata+offset, sizeof(int32_t));
+        offset += 4;
+
+        sliceheadersize = sizeof(slice_header_t) + sizeof(dec_ref_pic_marking_t);
+        error = pcontext->parser_ops->update_data(pcontext->parser_cxt,
+                                                  newdata+offset, sliceheadersize);
+        offset += sliceheadersize;
+        if (error != VBP_OK)
+        {
+            ETRACE("update_data error = 0x%x",error);
+            return error;
+        }
+
+        error = vbp_add_pic_data_h264secure(pcontext, slice_num);
+        if (error != VBP_OK)
+        {
+            ETRACE("vbp_add_pic_data_h264secure error = 0x%x",error);
+            return error;
+        }
+
+        error = vbp_update_slice_data_h264secure(pcontext, slice_num);
+        if (error != VBP_OK)
+        {
+            ETRACE("vbp_add_slice_data_h264secure error = 0x%x",error);
+            return error;
+        }
+    }
+    if (key != TERMINATE_KEY)
+    {
+        ETRACE("Don't find a terminated key 0xFFFFFF!");
+        return VBP_DATA;
+    } else {
+        if (slice_num < 1) {
+            ETRACE("Don't find a valid slice header!");
+            return VBP_DATA;
+        }
+    }
+    error = vbp_populate_query_data_h264secure(pcontext);
+
+    if (error != VBP_OK)
+    {
+        ETRACE("vbp_populate_query_data_h264secure error = 0x%x",error);
+        return error;
+    }
+    return error;
+}
diff --git a/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h
new file mode 100644
index 0000000..a55c07c
--- /dev/null
+++ b/mixvbp/vbp_manager/secvideo/baytrail/vbp_h264secure_parser.h
@@ -0,0 +1,70 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_H264SECURE_PARSER_H
+#define VBP_H264SECURE_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264secure(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264secure(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264secure(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264secure(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264secure(vbp_context *pcontext);
+
+/*
+ * update the parsing result with extra data
+ */
+uint32 vbp_update_data_h264secure(vbp_context *pcontext, void *newdata, uint32 size);
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_h264_parser.c b/mixvbp/vbp_manager/vbp_h264_parser.c
new file mode 100755
index 0000000..3f6400d
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_h264_parser.c
@@ -0,0 +1,1751 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_h264_parser.h"
+
+typedef struct vbp_h264_parser_private_t vbp_h264_parser_private;
+
+typedef enum
+{
+    H264_BS_LENGTH_PREFIXED,
+    H264_BS_SC_PREFIXED,
+    H264_BS_SINGLE_NAL
+} H264_BS_PATTERN;
+
+struct vbp_h264_parser_private_t
+{
+    /* number of bytes used to encode length of NAL payload.  If parser does not receive configuration data
+    and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB
+    byte stream format. */
+    int NAL_length_size;
+
+    /* indicate if stream is length prefixed */
+    int length_prefix_verified;
+
+    H264_BS_PATTERN bitstream_pattern;
+};
+
+/* default scaling list table */
+unsigned char Default_4x4_Intra[16] =
+{
+    6,13,20,28,
+    13,20,28,32,
+    20,28,32,37,
+    28,32,37,42
+};
+
+unsigned char Default_4x4_Inter[16] =
+{
+    10,14,20,24,
+    14,20,24,27,
+    20,24,27,30,
+    24,27,30,34
+};
+
+unsigned char Default_8x8_Intra[64] =
+{
+    6,10,13,16,18,23,25,27,
+    10,11,16,18,23,25,27,29,
+    13,16,18,23,25,27,29,31,
+    16,18,23,25,27,29,31,33,
+    18,23,25,27,29,31,33,36,
+    23,25,27,29,31,33,36,38,
+    25,27,29,31,33,36,38,40,
+    27,29,31,33,36,38,40,42
+};
+
+unsigned char Default_8x8_Inter[64] =
+{
+    9,13,15,17,19,21,22,24,
+    13,13,17,19,21,22,24,25,
+    15,17,19,21,22,24,25,27,
+    17,19,21,22,24,25,27,28,
+    19,21,22,24,25,27,28,30,
+    21,22,24,25,27,28,30,32,
+    22,24,25,27,28,30,32,33,
+    24,25,27,28,30,32,33,35
+};
+
+unsigned char quant_flat[16] =
+{
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16,
+    16,16,16,16
+};
+
+unsigned char quant8_flat[64] =
+{
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16,
+    16,16,16,16,16,16,16,16
+};
+
+unsigned char* UseDefaultList[8] =
+{
+    Default_4x4_Intra, Default_4x4_Intra, Default_4x4_Intra,
+    Default_4x4_Inter, Default_4x4_Inter, Default_4x4_Inter,
+    Default_8x8_Intra,
+    Default_8x8_Inter
+};
+
+static uint8 h264_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    {4, 3},
+    {3, 2},
+    {2, 1},
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_h264_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_h264_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_h264_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->is_wkld_done = NULL;
+#else
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_h264_wkld_done");
+    if (NULL == pcontext->parser_ops->is_wkld_done)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+
+    pcontext->parser_ops->flush = dlsym(pcontext->fd_parser, "viddec_h264_flush");;
+    if (NULL == pcontext->parser_ops->flush)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_frame_start = NULL;
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+    vbp_data_h264 *query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_h264, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_h264, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferH264, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_h264, MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferH264, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_h264, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+    vbp_h264_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_h264_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->NAL_length_size = 0;
+
+    parser_private->length_prefix_verified = 0;
+
+    parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_h264(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_h264(vbp_context *pcontext)
+{
+    if (NULL != pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    int i;
+    vbp_data_h264 *query_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].pic_parms);
+        }
+        free(query_data->pic_data);
+    }
+
+    free(query_data->IQ_matrix_buf);
+    free(query_data->codec_data);
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+static inline uint16_t vbp_utils_ntohs(uint8_t* p)
+{
+    uint16_t i = ((*p) << 8) + ((*(p+1)));
+    return i;
+}
+
+static inline uint32_t vbp_utils_ntohl(uint8_t* p)
+{
+    uint32_t i = ((*p) << 24) + ((*(p+1)) << 16) + ((*(p+2)) << 8) + ((*(p+3)));
+    return i;
+}
+
+
+static inline void vbp_set_VAPicture_h264(
+    int curr_picture_structure,
+    int bottom_field,
+    frame_store* store,
+    VAPictureH264* pic)
+{
+    if (FRAME == curr_picture_structure)
+    {
+        if (FRAME != viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("Reference picture structure is not frame for current frame picture!");
+        }
+        pic->flags = 0;
+        pic->TopFieldOrderCnt = store->top_field.poc;
+        pic->BottomFieldOrderCnt = store->bottom_field.poc;
+    }
+    else
+    {
+        if (FRAME == viddec_h264_get_dec_structure(store))
+        {
+            WTRACE("reference picture structure is frame for current field picture!");
+        }
+        if (bottom_field)
+        {
+            pic->flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+        else
+        {
+            pic->flags = VA_PICTURE_H264_TOP_FIELD;
+            pic->TopFieldOrderCnt = store->top_field.poc;
+            pic->BottomFieldOrderCnt = store->bottom_field.poc;
+        }
+    }
+}
+
+static inline void vbp_set_slice_ref_list_h264(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    int i, j;
+    int num_ref_idx_active = 0;
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    uint8_t* p_list = NULL;
+    VAPictureH264* refPicListX = NULL;
+    frame_store* fs = NULL;
+
+    /* initialize ref picutre list, set picture id and flags to invalid. */
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+        for (j = 0; j < 32; j++)
+        {
+            refPicListX->picture_id = VA_INVALID_SURFACE;
+            refPicListX->frame_idx = 0;
+            refPicListX->flags = VA_PICTURE_H264_INVALID;
+            refPicListX->TopFieldOrderCnt = 0;
+            refPicListX->BottomFieldOrderCnt = 0;
+            refPicListX++;
+        }
+    }
+
+    for (i = 0; i < 2; i++)
+    {
+        refPicListX = (i == 0) ? &(slc_parms->RefPicList0[0]) : &(slc_parms->RefPicList1[0]);
+
+        if ((i == 0) &&
+            ((h264_PtypeB == slice_header->slice_type) ||
+             (h264_PtypeP == slice_header->slice_type)))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l0_active;
+            if (slice_header->sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list0;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_0;
+            }
+        }
+        else if ((i == 1) && (h264_PtypeB == slice_header->slice_type))
+        {
+            num_ref_idx_active = slice_header->num_ref_idx_l1_active;
+            if (slice_header->sh_refpic_l1.ref_pic_list_reordering_flag)
+            {
+                p_list = h264_parser->info.slice_ref_list1;
+            }
+            else
+            {
+                p_list = h264_parser->info.dpb.listX_1;
+            }
+        }
+        else
+        {
+            num_ref_idx_active = 0;
+            p_list = NULL;
+        }
+
+
+        for (j = 0; j < num_ref_idx_active; j++)
+        {
+            fs = &(h264_parser->info.dpb.fs[(p_list[j] & 0x1f)]);
+
+            /* bit 5 indicates if reference picture is bottom field */
+            vbp_set_VAPicture_h264(
+                h264_parser->info.img.structure,
+                (p_list[j] & 0x20) >> 5,
+                fs,
+                refPicListX);
+
+            refPicListX->frame_idx = fs->frame_num;
+            refPicListX->flags |= viddec_h264_get_is_long_term(fs) ? VA_PICTURE_H264_LONG_TERM_REFERENCE : VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            refPicListX++;
+        }
+    }
+}
+
+static inline void vbp_set_pre_weight_table_h264(
+    struct h264_viddec_parser* h264_parser,
+    VASliceParameterBufferH264 *slc_parms)
+{
+    h264_Slice_Header_t* slice_header = &(h264_parser->info.SliceHeader);
+    int i, j;
+
+    if ((((h264_PtypeP == slice_header->slice_type) ||
+          (h264_PtypeB == slice_header->slice_type)) &&
+          h264_parser->info.active_PPS.weighted_pred_flag) ||
+         ((h264_PtypeB == slice_header->slice_type) &&
+         (1 == h264_parser->info.active_PPS.weighted_bipred_idc)))
+    {
+        slc_parms->luma_log2_weight_denom = slice_header->sh_predwttbl.luma_log2_weight_denom;
+        slc_parms->chroma_log2_weight_denom = slice_header->sh_predwttbl.chroma_log2_weight_denom;
+        slc_parms->luma_weight_l0_flag = slice_header->sh_predwttbl.luma_weight_l0_flag;
+        slc_parms->chroma_weight_l0_flag = slice_header->sh_predwttbl.chroma_weight_l0_flag;
+        slc_parms->luma_weight_l1_flag = slice_header->sh_predwttbl.luma_weight_l1_flag;
+        slc_parms->chroma_weight_l1_flag = slice_header->sh_predwttbl.chroma_weight_l1_flag;
+
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 	slice_header->sh_predwttbl.luma_weight_l0[i];
+            slc_parms->luma_offset_l0[i] = 	slice_header->sh_predwttbl.luma_offset_l0[i];
+            slc_parms->luma_weight_l1[i] = 	slice_header->sh_predwttbl.luma_weight_l1[i];
+            slc_parms->luma_offset_l1[i] = 	slice_header->sh_predwttbl.luma_offset_l1[i];
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = slice_header->sh_predwttbl.chroma_weight_l0[i][j];
+                slc_parms->chroma_offset_l0[i][j] = slice_header->sh_predwttbl.chroma_offset_l0[i][j];
+                slc_parms->chroma_weight_l1[i][j] = slice_header->sh_predwttbl.chroma_weight_l1[i][j];
+                slc_parms->chroma_offset_l1[i][j] = slice_header->sh_predwttbl.chroma_offset_l1[i][j];
+            }
+        }
+    }
+    else
+    {
+        /* default weight table */
+        slc_parms->luma_log2_weight_denom = 5;
+        slc_parms->chroma_log2_weight_denom = 5;
+        slc_parms->luma_weight_l0_flag = 0;
+        slc_parms->luma_weight_l1_flag = 0;
+        slc_parms->chroma_weight_l0_flag = 0;
+        slc_parms->chroma_weight_l1_flag = 0;
+        for (i = 0; i < 32; i++)
+        {
+            slc_parms->luma_weight_l0[i] = 0;
+            slc_parms->luma_offset_l0[i] = 0;
+            slc_parms->luma_weight_l1[i] = 0;
+            slc_parms->luma_offset_l1[i] = 0;
+
+            for (j = 0; j < 2; j++)
+            {
+                slc_parms->chroma_weight_l0[i][j] = 0;
+                slc_parms->chroma_offset_l0[i][j] = 0;
+                slc_parms->chroma_weight_l1[i][j] = 0;
+                slc_parms->chroma_offset_l1[i][j] = 0;
+            }
+        }
+    }
+}
+
+
+static inline void vbp_set_reference_frames_h264(
+    struct h264_viddec_parser *parser,
+    VAPictureParameterBufferH264* pic_parms)
+{
+    int buffer_idx;
+    int frame_idx;
+    frame_store* store = NULL;
+    h264_DecodedPictureBuffer* dpb = &(parser->info.dpb);
+    /* initialize reference frames */
+    for (frame_idx = 0; frame_idx < 16; frame_idx++)
+    {
+        pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+        pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+        pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+        pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+        pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+    }
+    pic_parms->num_ref_frames = 0;
+
+    frame_idx = 0;
+
+    /* ITRACE("short term frame in dpb %d", dpb->ref_frames_in_buffer);  */
+    /* set short term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+
+        store = &dpb->fs[dpb->fs_ref_idc[buffer_idx]];
+        /* if (store->is_used == 3 && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = store->frame_num;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0) */
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    /* set long term reference frames */
+    for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
+    {
+        if (frame_idx >= 16 || buffer_idx >= 16)
+        {
+            WTRACE("Frame index is out of bound.");
+            break;
+        }
+        store = &dpb->fs[dpb->fs_ltref_idc[buffer_idx]];
+        if (!viddec_h264_get_is_long_term(store))
+        {
+            WTRACE("long term frame is not marked as long term.");
+        }
+        /*if (store->is_used == 3 && store->is_long_term && store->frame.used_for_reference == 3) */
+        if (viddec_h264_get_is_used(store))
+        {
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_LONG_TERM_REFERENCE;
+            if (FRAME == parser->info.img.structure)
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->frame.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->frame.poc;
+            }
+            else
+            {
+                pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = store->top_field.poc;
+                pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = store->bottom_field.poc;
+                if (store->top_field.used_for_reference && store->bottom_field.used_for_reference)
+                {
+                    /* if both fields are used for reference, just set flag to be frame (0)*/
+                }
+                else
+                {
+                    if (store->top_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_TOP_FIELD;
+                    if (store->bottom_field.used_for_reference)
+                        pic_parms->ReferenceFrames[frame_idx].flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+                }
+            }
+        }
+        frame_idx++;
+    }
+
+    pic_parms->num_ref_frames = frame_idx;
+
+    if (frame_idx > parser->info.active_SPS.num_ref_frames)
+    {
+        WTRACE("actual num_ref_frames (%d) exceeds the value in the sequence header (%d).",
+               frame_idx, parser->info.active_SPS.num_ref_frames);
+    }
+}
+
+
+static inline void vbp_set_scaling_list_h264(
+    struct h264_viddec_parser *parser,
+    VAIQMatrixBufferH264* IQ_matrix_buf)
+{
+    int i;
+    int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0);
+
+    if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
+    {
+        for (i = 0; i < lists_to_set; i++)
+        {
+            if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
+            {
+                if (((i < 6) && parser->info.active_PPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                        ((i >= 6) && parser->info.active_PPS.UseDefaultScalingMatrix8x8Flag[i-6]))
+                {
+                    /* use default scaling list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                    }
+                }
+                else
+                {
+                    /* use PPS list */
+                    if (i < 6)
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_PPS.ScalingList4x4[i], 16);
+                    }
+                    else
+                    {
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_PPS.ScalingList8x8[i - 6], 64);
+                    }
+                }
+            }
+            else /* pic_scaling_list not present */
+            {
+                if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+                {
+                    /* SPS matrix present - use fallback rule B */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList4x4[i] : UseDefaultList[i],
+                               16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6],
+                               parser->info.active_SPS.seq_scaling_list_present_flag[i] ? parser->info.active_PPS.ScalingList8x8[i - 6] : UseDefaultList[i],
+                               64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        //g_warning("invalid scaling list index.");
+                        break;
+                    }
+                }
+                else /* seq_scaling_matrix not present */
+                {
+                    /* SPS matrix not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                } /* end of seq_scaling_matrix not present */
+            } /* end of  pic_scaling_list not present */
+        } /* for loop for each index from 0 to 7 */
+    } /* end of pic_scaling_matrix present */
+    else
+    {
+        /* PPS matrix not present, use SPS information */
+        if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
+        {
+            for (i = 0; i < lists_to_set; i++)
+            {
+                if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
+                {
+                    if (((i < 6) && parser->info.active_SPS.UseDefaultScalingMatrix4x4Flag[i]) ||
+                            ((i >= 6) && parser->info.active_SPS.UseDefaultScalingMatrix8x8Flag[i - 6]))
+                    {
+                        /* use default scaling list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        }
+                    }
+                    else
+                    {
+                        /* use SPS list */
+                        if (i < 6)
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList4x4[i], parser->info.active_SPS.ScalingList4x4[i], 16);
+                        }
+                        else
+                        {
+                            memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], parser->info.active_SPS.ScalingList8x8[i - 6], 64);
+                        }
+                    }
+                }
+                else
+                {
+                    /* SPS list not present - use fallback rule A */
+                    switch (i)
+                    {
+                    case 0:
+                    case 3:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i], UseDefaultList[i], 16);
+                        break;
+
+                    case 6:
+                    case 7:
+                        memcpy(IQ_matrix_buf->ScalingList8x8[i - 6], UseDefaultList[i], 64);
+                        break;
+
+                    case 1:
+                    case 2:
+                    case 4:
+                    case 5:
+                        memcpy(IQ_matrix_buf->ScalingList4x4[i],
+                               IQ_matrix_buf->ScalingList4x4[i - 1],
+                               16);
+                        break;
+
+                    default:
+                        WTRACE("invalid scaling list index.");
+                        break;
+                    }
+                }
+            }
+        }
+        else
+        {
+            /* SPS matrix not present - use flat lists */
+            for (i = 0; i < 6; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList4x4[i], quant_flat, 16);
+            }
+            for (i = 0; i < 2; i++)
+            {
+                memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+            }
+        }
+    }
+
+    if ((0 == parser->info.active_PPS.transform_8x8_mode_flag) &&
+            (parser->info.active_PPS.pic_scaling_matrix_present_flag ||
+             parser->info.active_SPS.seq_scaling_matrix_present_flag))
+    {
+        for (i = 0; i < 2; i++)
+        {
+            memcpy(IQ_matrix_buf->ScalingList8x8[i], quant8_flat, 64);
+        }
+    }
+}
+
+static void vbp_set_codec_data_h264(
+    struct h264_viddec_parser *parser,
+     vbp_data_h264 *query_data)
+{
+    vbp_codec_data_h264* codec_data = query_data->codec_data;
+
+    /* The following variables are used to detect if there is new SPS or PPS */
+    uint8 seq_parameter_set_id = codec_data->seq_parameter_set_id;
+    uint8 pic_parameter_set_id = codec_data->pic_parameter_set_id;
+    int frame_width = codec_data->frame_width;
+    int frame_height = codec_data->frame_height;
+
+    /* parameter id */
+    codec_data->seq_parameter_set_id = parser->info.active_SPS.seq_parameter_set_id;
+    codec_data->pic_parameter_set_id = parser->info.active_PPS.pic_parameter_set_id;
+
+    /* profile and level */
+    codec_data->profile_idc = parser->info.active_SPS.profile_idc;
+    codec_data->level_idc = parser->info.active_SPS.level_idc;
+
+
+    /*constraint flag sets (h.264 Spec v2009)*/
+    codec_data->constraint_set0_flag = (parser->info.active_SPS.constraint_set_flags & 0x10) >> 4;
+    codec_data->constraint_set1_flag = (parser->info.active_SPS.constraint_set_flags & 0x8) >> 3;
+    codec_data->constraint_set2_flag = (parser->info.active_SPS.constraint_set_flags & 0x4) >> 2;
+    codec_data->constraint_set3_flag = (parser->info.active_SPS.constraint_set_flags & 0x2) >> 1;
+    codec_data->constraint_set4_flag = parser->info.active_SPS.constraint_set_flags & 0x1;
+
+    /* reference frames */
+    codec_data->num_ref_frames = parser->info.active_SPS.num_ref_frames;
+
+    if (!parser->info.active_SPS.sps_disp.frame_mbs_only_flag &&
+        !parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag)
+    {
+        /* no longer necessary: two fields share the same interlaced surface */
+        /* codec_data->num_ref_frames *= 2; */
+    }
+
+    codec_data->gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+
+    /* frame coding */
+    codec_data->frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+    codec_data->mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+
+    /* frame dimension */
+    codec_data->frame_width = (parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1 ) * 16;
+
+    codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                               (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
+
+    /* cropping information */
+    codec_data->crop_left = 0;
+    codec_data->crop_right = 0;
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = 0;
+    if(parser->info.active_SPS.sps_disp.frame_cropping_flag) {
+        int CropUnitX = 0, CropUnitY = 0, SubWidthC = 0, SubHeightC = 0;
+        int ChromaArrayType = 0;
+        if(parser->info.active_SPS.sps_disp.separate_colour_plane_flag == 0) {
+            if(parser->info.active_SPS.sps_disp.chroma_format_idc == 1) {
+                SubWidthC = 2;
+                SubHeightC = 2;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 2) {
+                SubWidthC = 2;
+                SubHeightC = 1;
+            } else if( parser->info.active_SPS.sps_disp.chroma_format_idc == 3) {
+                SubWidthC = 1;
+                SubHeightC = 1;
+            }
+            ChromaArrayType = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        }
+
+        if(ChromaArrayType == 0) {
+            CropUnitX = 1;
+            CropUnitY = 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        } else {
+            CropUnitX = SubWidthC;
+            CropUnitY = SubHeightC * ( 2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag);
+        }
+
+        codec_data->crop_left = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
+        codec_data->crop_right = CropUnitX * parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset; // + 1;
+        codec_data->crop_top = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
+        codec_data->crop_bottom = CropUnitY * parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset; // + 1;
+    }
+
+    /* aspect ratio */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+    {
+        codec_data->aspect_ratio_idc =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+        if (codec_data->aspect_ratio_idc < 17)
+        {
+            codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0];
+            codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1];
+        }
+        else if (codec_data->aspect_ratio_idc == 255)
+        {
+            codec_data->sar_width =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+
+            codec_data->sar_height =
+                parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+        }
+        else
+        {
+            codec_data->sar_width = 0;
+            codec_data->sar_height = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        codec_data->aspect_ratio_idc = 0;
+        codec_data->sar_width = 0;
+        codec_data->sar_height = 0;
+    }
+
+    /* video format */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+    {
+        codec_data->video_format =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format = 5;
+    }
+
+    codec_data->video_full_range_flag =
+        parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag;
+
+
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+    {
+        codec_data->matrix_coefficients =
+            parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients;
+    }
+    else
+    {
+        // Unspecified
+        codec_data->matrix_coefficients = 2;
+    }
+
+    codec_data->bit_rate = parser->info.active_SPS.sps_disp.vui_seq_parameters.bit_rate_value;
+
+    /* picture order type and count */
+    codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+    codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+
+
+    /* udpate sps and pps status */
+    query_data->new_sps = (seq_parameter_set_id != parser->info.active_PPS.seq_parameter_set_id) ? 1 : 0;
+    query_data->new_pps = (pic_parameter_set_id != parser->info.active_PPS.pic_parameter_set_id) ? 1 : 0;
+    query_data->has_sps = parser->info.active_SPS.seq_parameter_set_id != 0xff;
+    query_data->has_pps = parser->info.active_PPS.seq_parameter_set_id != 0xff;
+    if ( frame_width != codec_data->frame_width || frame_height != codec_data->frame_height)
+    {
+        query_data->new_sps = 1;
+        query_data->new_pps = 1;
+    }
+}
+
+
+static uint32_t vbp_add_pic_data_h264(vbp_context *pcontext, int list_index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    struct h264_viddec_parser* parser = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+    VAPictureParameterBufferH264* pic_parms = NULL;
+
+    parser = (struct h264_viddec_parser *)cxt->codec_data;
+
+    if (0 == parser->info.SliceHeader.first_mb_in_slice)
+    {
+        /* a new picture is parsed */
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        /* partial frame */
+        query_data->num_pictures = 1;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("num of pictures exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        WTRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+    pic_parms = pic_data->pic_parms;
+
+    // relax this condition to support partial frame parsing
+
+    //if (parser->info.SliceHeader.first_mb_in_slice == 0)
+    {
+        /**
+        * picture parameter only needs to be set once,
+        * even multiple slices may be encoded
+        */
+
+        /* VAPictureParameterBufferH264 */
+        pic_parms->CurrPic.picture_id = VA_INVALID_SURFACE;
+        pic_parms->CurrPic.frame_idx = 0;
+        if (parser->info.img.field_pic_flag == 1)
+        {
+            if (parser->info.img.bottom_field_flag)
+            {
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_BOTTOM_FIELD;
+            }
+            else
+            {
+                /* also OK set to 0 (from test suite) */
+                pic_parms->CurrPic.flags = VA_PICTURE_H264_TOP_FIELD;
+            }
+        }
+        else
+        {
+            pic_parms->CurrPic.flags = 0; /* frame picture */
+        }
+        pic_parms->CurrPic.TopFieldOrderCnt = parser->info.img.toppoc;
+        pic_parms->CurrPic.BottomFieldOrderCnt = parser->info.img.bottompoc;
+        pic_parms->CurrPic.frame_idx = parser->info.SliceHeader.frame_num;
+
+        /* don't care if current frame is used as long term reference */
+        if (parser->info.SliceHeader.nal_ref_idc != 0)
+        {
+            pic_parms->CurrPic.flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+        }
+
+        pic_parms->picture_width_in_mbs_minus1 = parser->info.active_SPS.sps_disp.pic_width_in_mbs_minus1;
+
+        /* frame height in MBS */
+        pic_parms->picture_height_in_mbs_minus1 = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) *
+                (parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) - 1;
+
+        pic_parms->bit_depth_luma_minus8 = parser->info.active_SPS.bit_depth_luma_minus8;
+        pic_parms->bit_depth_chroma_minus8 = parser->info.active_SPS.bit_depth_chroma_minus8;
+
+
+        pic_parms->seq_fields.value = 0;
+        pic_parms->seq_fields.bits.chroma_format_idc = parser->info.active_SPS.sps_disp.chroma_format_idc;
+        pic_parms->seq_fields.bits.residual_colour_transform_flag = parser->info.active_SPS.residual_colour_transform_flag;
+        pic_parms->seq_fields.bits.frame_mbs_only_flag = parser->info.active_SPS.sps_disp.frame_mbs_only_flag;
+        pic_parms->seq_fields.bits.mb_adaptive_frame_field_flag = parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag;
+        pic_parms->seq_fields.bits.direct_8x8_inference_flag = parser->info.active_SPS.sps_disp.direct_8x8_inference_flag;
+
+        /* new fields in libva 0.31 */
+        pic_parms->seq_fields.bits.gaps_in_frame_num_value_allowed_flag = parser->info.active_SPS.gaps_in_frame_num_value_allowed_flag;
+        pic_parms->seq_fields.bits.log2_max_frame_num_minus4 = parser->info.active_SPS.log2_max_frame_num_minus4;
+        pic_parms->seq_fields.bits.pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+        pic_parms->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+        pic_parms->seq_fields.bits.delta_pic_order_always_zero_flag =parser->info.active_SPS.delta_pic_order_always_zero_flag;
+
+
+        /* referened from UMG_Moorstown_TestSuites */
+        pic_parms->seq_fields.bits.MinLumaBiPredSize8x8 = (parser->info.active_SPS.level_idc > 30) ? 1 : 0;
+
+        pic_parms->num_slice_groups_minus1 = parser->info.active_PPS.num_slice_groups_minus1;
+        pic_parms->slice_group_map_type = parser->info.active_PPS.slice_group_map_type;
+        pic_parms->slice_group_change_rate_minus1 = 0;
+        pic_parms->pic_init_qp_minus26 = parser->info.active_PPS.pic_init_qp_minus26;
+        pic_parms->pic_init_qs_minus26 = 0;
+        pic_parms->chroma_qp_index_offset = parser->info.active_PPS.chroma_qp_index_offset;
+        pic_parms->second_chroma_qp_index_offset = parser->info.active_PPS.second_chroma_qp_index_offset;
+
+        pic_parms->pic_fields.value = 0;
+        pic_parms->pic_fields.bits.entropy_coding_mode_flag = parser->info.active_PPS.entropy_coding_mode_flag;
+        pic_parms->pic_fields.bits.weighted_pred_flag = parser->info.active_PPS.weighted_pred_flag;
+        pic_parms->pic_fields.bits.weighted_bipred_idc = parser->info.active_PPS.weighted_bipred_idc;
+        pic_parms->pic_fields.bits.transform_8x8_mode_flag = parser->info.active_PPS.transform_8x8_mode_flag;
+
+        /* new LibVA fields in v0.31*/
+        pic_parms->pic_fields.bits.pic_order_present_flag = parser->info.active_PPS.pic_order_present_flag;
+        pic_parms->pic_fields.bits.deblocking_filter_control_present_flag = parser->info.active_PPS.deblocking_filter_control_present_flag;
+        pic_parms->pic_fields.bits.redundant_pic_cnt_present_flag = parser->info.active_PPS.redundant_pic_cnt_present_flag;
+        pic_parms->pic_fields.bits.reference_pic_flag = parser->info.SliceHeader.nal_ref_idc != 0;
+
+        /* all slices in the pciture have the same field_pic_flag */
+        pic_parms->pic_fields.bits.field_pic_flag = parser->info.SliceHeader.field_pic_flag;
+        pic_parms->pic_fields.bits.constrained_intra_pred_flag = parser->info.active_PPS.constrained_intra_pred_flag;
+
+        pic_parms->frame_num = parser->info.SliceHeader.frame_num;
+    }
+
+
+    /* set reference frames, and num_ref_frames */
+    vbp_set_reference_frames_h264(parser, pic_parms);
+    if (parser->info.nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        int frame_idx;
+        for (frame_idx = 0; frame_idx < 16; frame_idx++)
+        {
+            pic_parms->ReferenceFrames[frame_idx].picture_id = VA_INVALID_SURFACE;
+            pic_parms->ReferenceFrames[frame_idx].frame_idx = 0;
+            pic_parms->ReferenceFrames[frame_idx].flags = VA_PICTURE_H264_INVALID;
+            pic_parms->ReferenceFrames[frame_idx].TopFieldOrderCnt = 0;
+            pic_parms->ReferenceFrames[frame_idx].BottomFieldOrderCnt = 0;
+        }
+        /* num of reference frame is 0 if current picture is IDR */
+        pic_parms->num_ref_frames = 0;
+    }
+    else
+    {
+        /* actual num_ref_frames is set in vbp_set_reference_frames_h264 */
+    }
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_h264(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 bit, byte;
+    uint8 is_emul;
+
+    vbp_data_h264 *query_data = (vbp_data_h264 *)pcontext->query_data;
+    VASliceParameterBufferH264 *slc_parms = NULL;
+    vbp_slice_data_h264 *slc_data = NULL;
+    struct h264_viddec_parser* h264_parser = NULL;
+    h264_Slice_Header_t* slice_header = NULL;
+    vbp_picture_data_h264* pic_data = NULL;
+
+
+    h264_parser = (struct h264_viddec_parser *)cxt->codec_data;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    pic_data = &(query_data->pic_data[pic_data_index]);
+
+    slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    slc_data->buffer_addr = cxt->parse_cubby.buf;
+    slc_parms = &(slc_data->slc_parms);
+
+    /* byte: how many bytes have been parsed */
+    /* bit: bits parsed within the current parsing position */
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    slc_data->nal_unit_type = h264_parser->info.nal_unit_type;
+
+    slc_parms->slice_data_size = slc_data->slice_size =
+                                     pcontext->parser_cxt->list.data[index].edpos -
+                                     pcontext->parser_cxt->list.data[index].stpos;
+
+    /* the offset to the NAL start code for this slice */
+    slc_data->slice_offset = cxt->list.data[index].stpos;
+    slc_parms->slice_data_offset = 0;
+
+    /* whole slice is in this buffer */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* bit offset from NAL start code to the beginning of slice data */
+    slc_parms->slice_data_bit_offset = bit + byte * 8;
+
+    if (is_emul)
+    {
+        WTRACE("next byte is emulation prevention byte.");
+        /*slc_parms->slice_data_bit_offset += 8; */
+    }
+
+    if (cxt->getbits.emulation_byte_counter != 0)
+    {
+        slc_parms->slice_data_bit_offset -= cxt->getbits.emulation_byte_counter * 8;
+    }
+
+    slice_header = &(h264_parser->info.SliceHeader);
+    slc_parms->first_mb_in_slice = slice_header->first_mb_in_slice;
+
+    if (h264_parser->info.active_SPS.sps_disp.mb_adaptive_frame_field_flag &
+            (!(h264_parser->info.SliceHeader.field_pic_flag)))
+    {
+        slc_parms->first_mb_in_slice /= 2;
+    }
+
+    slc_parms->slice_type = slice_header->slice_type;
+
+    slc_parms->direct_spatial_mv_pred_flag = slice_header->direct_spatial_mv_pred_flag;
+
+    slc_parms->num_ref_idx_l0_active_minus1 = 0;
+    slc_parms->num_ref_idx_l1_active_minus1 = 0;
+    if (slice_header->slice_type == h264_PtypeI)
+    {
+    }
+    else if (slice_header->slice_type == h264_PtypeP)
+    {
+        slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+    }
+    else if (slice_header->slice_type == h264_PtypeB)
+    {
+        slc_parms->num_ref_idx_l0_active_minus1 = slice_header->num_ref_idx_l0_active - 1;
+        slc_parms->num_ref_idx_l1_active_minus1 = slice_header->num_ref_idx_l1_active - 1;
+    }
+    else
+    {
+        WTRACE("slice type %d is not supported.", slice_header->slice_type);
+    }
+
+    slc_parms->cabac_init_idc = slice_header->cabac_init_idc;
+    slc_parms->slice_qp_delta = slice_header->slice_qp_delta;
+    slc_parms->disable_deblocking_filter_idc = slice_header->disable_deblocking_filter_idc;
+    slc_parms->slice_alpha_c0_offset_div2 = slice_header->slice_alpha_c0_offset_div2;
+    slc_parms->slice_beta_offset_div2 = slice_header->slice_beta_offset_div2;
+
+
+    vbp_set_pre_weight_table_h264(h264_parser, slc_parms);
+    vbp_set_slice_ref_list_h264(h264_parser, slc_parms);
+
+
+    pic_data->num_slices++;
+
+    //vbp_update_reference_frames_h264_methodB(pic_data);
+    if (pic_data->num_slices > MAX_NUM_SLICES)
+    {
+        ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    /*if (pic_data->num_slices > 1)
+    {
+        ITRACE("number of slices per picture is %d.", pic_data->num_slices);
+    }*/
+    return VBP_OK;
+}
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_h264(vbp_context* pcontext)
+{
+    /* parsing AVCDecoderConfigurationRecord structure (see MPEG-4 part 15 spec) */
+
+    uint8 configuration_version = 0;
+    uint8 AVC_profile_indication = 0;
+    uint8 profile_compatibility = 0;
+    uint8 AVC_level_indication = 0;
+    uint8 length_size_minus_one = 0;
+    uint8 num_of_sequence_parameter_sets = 0;
+    uint8 num_of_picture_parameter_sets = 0;
+    uint16 sequence_parameter_set_length = 0;
+    uint16 picture_parameter_set_length = 0;
+
+    int i = 0;
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+    vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private;
+    //Enable emulation prevention
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* check if configuration data is start code prefix */
+    viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    int ret = ops->parse_sc((void *)&cubby,
+                            NULL, /* context, not used */
+                            &(cxt->sc_prefix_info));
+    if (ret == 1)
+    {
+        WTRACE("configuration data is start-code prefixed.\n");
+        parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+        return vbp_parse_start_code_h264(pcontext);
+    }
+
+
+    uint8* cur_data = cxt->parse_cubby.buf;
+
+
+    if (cxt->parse_cubby.size < 6)
+    {
+        /* need at least 6 bytes to start parsing the structure, see spec 15 */
+        return VBP_DATA;
+    }
+
+    configuration_version = *cur_data++;
+    AVC_profile_indication = *cur_data++;
+
+    /*ITRACE("Profile indication: %d", AVC_profile_indication); */
+
+    profile_compatibility = *cur_data++;
+    AVC_level_indication = *cur_data++;
+
+    /* ITRACE("Level indication: %d", AVC_level_indication);*/
+    /* 2 bits of length_size_minus_one, 6 bits of reserved (11111) */
+    length_size_minus_one = (*cur_data) & 0x3;
+
+    if (length_size_minus_one != 3)
+    {
+        WTRACE("length size (%d) is not equal to 4.", length_size_minus_one + 1);
+    }
+
+    parser_private->NAL_length_size = length_size_minus_one + 1;
+
+    cur_data++;
+
+    /* 3 bits of reserved (111) and 5 bits of num_of_sequence_parameter_sets */
+    num_of_sequence_parameter_sets = (*cur_data) & 0x1f;
+    if (num_of_sequence_parameter_sets > 1)
+    {
+        WTRACE("num_of_sequence_parameter_sets is %d.", num_of_sequence_parameter_sets);
+    }
+    if (num_of_sequence_parameter_sets > MAX_NUM_SPS)
+    {
+        /* this would never happen as MAX_NUM_SPS = 32 */
+        WTRACE("num_of_sequence_parameter_sets (%d) exceeds the limit (%d).", num_of_sequence_parameter_sets, MAX_NUM_SPS);
+    }
+    cur_data++;
+
+    cxt->list.num_items = 0;
+    for (i = 0; i < num_of_sequence_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse sequence_parameter_set_length */
+            ETRACE("Not enough data to parse SPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        sequence_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least sequence_parameter_set_length bytes for SPS */
+            ETRACE("Not enough data to parse SPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + sequence_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += sequence_parameter_set_length;
+    }
+
+    if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
+    {
+        /* need at least one more byte to parse num_of_picture_parameter_sets */
+        ETRACE("Not enough data to parse number of PPS.");
+        return VBP_DATA;
+    }
+
+    num_of_picture_parameter_sets = *cur_data++;
+    if (num_of_picture_parameter_sets > 1)
+    {
+        /* g_warning("num_of_picture_parameter_sets is %d.", num_of_picture_parameter_sets); */
+    }
+
+    for (i = 0; i < num_of_picture_parameter_sets; i++)
+    {
+        if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
+        {
+            /* need at least 2 bytes to parse picture_parameter_set_length */
+            ETRACE("Not enough data to parse PPS length.");
+            return VBP_DATA;
+        }
+
+        /* 16 bits */
+        picture_parameter_set_length = vbp_utils_ntohs(cur_data);
+
+        cur_data += 2;
+
+        if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
+        {
+            /* need at least picture_parameter_set_length bytes for PPS */
+            ETRACE("Not enough data to parse PPS.");
+            return VBP_DATA;
+        }
+
+        cxt->list.data[cxt->list.num_items].stpos = cur_data - cxt->parse_cubby.buf;
+
+        /* end pos is exclusive */
+        cxt->list.data[cxt->list.num_items].edpos =
+            cxt->list.data[cxt->list.num_items].stpos + picture_parameter_set_length;
+
+        cxt->list.num_items++;
+
+        cur_data += picture_parameter_set_length;
+    }
+
+    if ((cur_data - cxt->parse_cubby.buf) !=  cxt->parse_cubby.size)
+    {
+        WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
+               cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
+    }
+
+    parser_private->bitstream_pattern = H264_BS_LENGTH_PREFIXED;
+    return VBP_OK;
+}
+
+static inline uint32_t vbp_get_NAL_length_h264(uint8_t* p, int *NAL_length_size)
+{
+    switch (*NAL_length_size)
+    {
+    case 4:
+        return vbp_utils_ntohl(p);
+
+    case 3:
+    {
+        uint32_t i = ((*p) << 16) + ((*(p+1)) << 8) + ((*(p+2)));
+        return i;
+    }
+
+    case 2:
+        return vbp_utils_ntohs(p);
+
+    case 1:
+        return *p;
+
+    default:
+        WTRACE("invalid NAL_length_size: %d.", NAL_length_size);
+        /* default to 4 bytes for length */
+        *NAL_length_size = 4;
+        return vbp_utils_ntohl(p);
+    }
+}
+
+/**
+** H.264 elementary stream does not have start code.
+* instead, it is comprised of size of NAL unit and payload
+* of NAL unit. See spec 15 (Sample format)
+*/
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define H264_SC_SIZE 3
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vbp_h264_parser_private *parser_private = (vbp_h264_parser_private *)pcontext->parser_private;
+
+    /* reset query data for the new sample buffer */
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+    int i;
+
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    cxt->list.num_items = 0;
+
+    /* reset start position of first item to 0 in case there is only one item */
+    cxt->list.data[0].stpos = 0;
+
+    /* start code emulation prevention byte is present in NAL */
+    cxt->getbits.is_emul_reqd = 1;
+
+    if (parser_private->bitstream_pattern == H264_BS_LENGTH_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+        int32_t size_left = 0;
+        int32_t size_parsed = 0;
+        int32_t NAL_length = 0;
+
+        cubby = &(cxt->parse_cubby);
+
+        size_left = cubby->size;
+
+        while (size_left >= parser_private->NAL_length_size)
+        {
+            NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed, &parser_private->NAL_length_size);
+            if (NAL_length <= 0 || NAL_length > size_left - parser_private->NAL_length_size)
+            {
+                ETRACE("Invalid NAL_length parsed.");
+                break;
+            }
+
+            size_parsed += parser_private->NAL_length_size;
+            cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+            size_parsed += NAL_length; /* skip NAL bytes */
+            /* end position is exclusive */
+            cxt->list.data[cxt->list.num_items].edpos = size_parsed;
+            cxt->list.num_items++;
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+                break;
+            }
+
+            size_left = cubby->size - size_parsed;
+        }
+
+        if (size_left != 0 && parser_private->length_prefix_verified == 0)
+        {
+            WTRACE("Elementary stream is not aligned (%d).", size_left);
+
+            /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will
+                    * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed
+                    */
+            parser_private->length_prefix_verified = 1;
+            viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby;
+
+            viddec_parser_ops_t *ops = pcontext->parser_ops;
+            int ret = ops->parse_sc((void *)&temp_cubby,
+                                    NULL, /* context, not used */
+                                    &(cxt->sc_prefix_info));
+
+            /* found start code */
+            if (ret == 1)
+            {
+                WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed.");
+                parser_private->NAL_length_size = 0;
+                parser_private->bitstream_pattern = H264_BS_SC_PREFIXED;
+                /* reset parsing data */
+                for (i = 0; i < MAX_NUM_PICTURES; i++)
+                {
+                    query_data->pic_data[i].num_slices = 0;
+                }
+                query_data->num_pictures = 0;
+                cxt->list.num_items = 0;
+            }
+        }
+    }
+
+
+    if (parser_private->bitstream_pattern == H264_BS_SC_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t cubby;
+        /*  memory copy without updating cxt->parse_cubby */
+        cubby = cxt->parse_cubby;
+        viddec_parser_ops_t *ops = pcontext->parser_ops;
+        int ret = 0;
+
+        while (1)
+        {
+            ret = ops->parse_sc((void *)&cubby,
+                                NULL, /* context, not used */
+                                &(cxt->sc_prefix_info));
+            if (ret == 1)
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.data[0].stpos = cubby.sc_end_pos;
+                }
+                else
+                {
+                    cxt->list.data[cxt->list.num_items].stpos =
+                        cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+                    cxt->list.data[cxt->list.num_items - 1].edpos = cxt->list.data[cxt->list.num_items].stpos - H264_SC_SIZE;
+                }
+
+                cubby.phase = 0;
+                cubby.buf = cxt->parse_cubby.buf +
+                            cxt->list.data[cxt->list.num_items].stpos;
+
+                cubby.size = cxt->parse_cubby.size -
+                             cxt->list.data[cxt->list.num_items].stpos;
+
+                cxt->list.num_items++;
+                if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+                {
+                    WTRACE("Num items exceeds the limit!");
+                    /* not fatal, just stop parsing */
+                    break;
+                }
+            }
+            else
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.num_items = 1;
+                    parser_private->bitstream_pattern = H264_BS_SINGLE_NAL;
+                    WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL.");
+                }
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+        }
+
+    }
+
+    if (parser_private->bitstream_pattern == H264_BS_SINGLE_NAL)
+    {
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_h264( vbp_context *pcontext, int i)
+{
+    if (i >= MAX_NUM_SLICES)
+    {
+        return VBP_PARM;
+    }
+
+    uint32 error = VBP_OK;
+
+    struct h264_viddec_parser* parser = NULL;
+    parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+    vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data;
+    switch (parser->info.nal_unit_type)
+    {
+    case h264_NAL_UNIT_TYPE_SLICE:
+        //ITRACE("slice header is parsed.");
+        error = vbp_add_pic_data_h264(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264(pcontext, i);
+        }
+        break;
+
+    case  h264_NAL_UNIT_TYPE_IDR:
+        //ITRACE("IDR header is parsed.");
+        error = vbp_add_pic_data_h264(pcontext, i);
+        if (VBP_OK == error)
+        {
+            error = vbp_add_slice_data_h264(pcontext, i);
+        }
+        break;
+    case h264_NAL_UNIT_TYPE_SEI:
+        //ITRACE("SEI header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_SPS:
+        ITRACE("SPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_PPS:
+        ITRACE("PPS header is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        //ITRACE("ACC unit delimiter is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+        ITRACE("EOSeq is parsed.");
+        break;
+
+    case h264_NAL_UNIT_TYPE_EOstream:
+        ITRACE("EOStream is parsed");
+        break;
+
+    default:
+        WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type);
+        break;
+    }
+
+    if (query_data->num_pictures == MAX_NUM_PICTURES && parser->info.img.field_pic_flag != 1)
+    {
+        WTRACE("more than one frame in the buffer is found(%d)", query_data->num_pictures);
+        return (error == VBP_OK ? VBP_MULTI : error);
+    }
+    return error;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext)
+{
+    vbp_data_h264 *query_data = NULL;
+    struct h264_viddec_parser *parser = NULL;
+    struct vbp_h264_parser_private_t* private = NULL;
+
+    parser = (struct h264_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_h264 *)pcontext->query_data;
+    private = (struct vbp_h264_parser_private_t *)pcontext->parser_private;
+
+    vbp_set_codec_data_h264(parser, query_data);
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* VQIAMatrixBufferH264 */
+    vbp_set_scaling_list_h264(parser, query_data->IQ_matrix_buf);
+
+    if (query_data->num_pictures > 0)
+    {
+        /*
+        * picture parameter buffer and slice parameter buffer have been populated
+        */
+    }
+    else
+    {
+        /**
+        * add a dummy picture that contains picture parameters parsed
+          from SPS and PPS.
+        */
+        vbp_add_pic_data_h264(pcontext, 0);
+    }
+
+    return VBP_OK;
+}
+
+
+
diff --git a/mixvbp/vbp_manager/vbp_h264_parser.h b/mixvbp/vbp_manager/vbp_h264_parser.h
new file mode 100755
index 0000000..0094edb
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_h264_parser.h
@@ -0,0 +1,67 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_H264_PARSER_H
+#define VBP_H264_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_h264(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_h264(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_h264(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_h264(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_h264(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_h264(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_h264(vbp_context *pcontext);
+
+
+
+#endif /*VBP_H264_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_loader.c b/mixvbp/vbp_manager/vbp_loader.c
new file mode 100755
index 0000000..972ab2d
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_loader.c
@@ -0,0 +1,205 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+
+/**
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext)
+{
+    vbp_context **ppcontext;
+    uint32 error;
+
+    if (NULL == hcontext)
+    {
+        return VBP_PARM;
+    }
+
+    *hcontext = NULL;  /* prepare for failure. */
+
+    ppcontext = (vbp_context **)hcontext;
+
+    // TODO: check if vbp context has been created.
+
+
+    error = vbp_utils_create_context(parser_type, ppcontext);
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to create context: %d.", error);
+    }
+
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_close(Handle hcontext)
+{
+    uint32 error;
+
+    if (NULL == hcontext)
+    {
+        return VBP_PARM;
+    }
+
+    vbp_context *pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        /* not a valid vbp context. */
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+    error = vbp_utils_destroy_context(pcontext);
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to destroy context: %d.", error);
+    }
+
+    return error;
+}
+
+
+/**
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == data) || (0 == size))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_parse_buffer(pcontext, data, size, init_data_flag);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to parse buffer: %d.", error);
+    }
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == data))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_query(pcontext, data);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to query parsing result: %d.", error);
+    }
+    return error;
+}
+
+/**
+ *
+ */
+uint32 vbp_flush(Handle hcontext)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if (NULL == hcontext)
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_flush(pcontext);
+
+    return error;
+}
+
+#ifdef USE_AVC_SHORT_FORMAT
+uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data)
+{
+    vbp_context *pcontext;
+    uint32 error = VBP_OK;
+
+    if ((NULL == hcontext) || (NULL == newdata) || (0 == size) || (NULL == data))
+    {
+        ETRACE("Invalid input parameters.");
+        return VBP_PARM;
+    }
+
+    pcontext = (vbp_context *)hcontext;
+
+    if (MAGIC_NUMBER != pcontext->identifier)
+    {
+        ETRACE("context is not initialized");
+        return VBP_INIT;
+    }
+
+    error = vbp_utils_update(pcontext, newdata, size, data);
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to query parsing result: %d.", error);
+    }
+    return error;
+}
+#endif
diff --git a/mixvbp/vbp_manager/vbp_loader.h b/mixvbp/vbp_manager/vbp_loader.h
new file mode 100755
index 0000000..ad4b106
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_loader.h
@@ -0,0 +1,476 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_LOADER_H
+#define VBP_LOADER_H
+
+#include <va/va.h>
+
+#ifdef USE_HW_VP8
+#include <va/va_dec_vp8.h>
+#endif
+
+#ifndef TRUE
+#define TRUE 1
+#endif
+
+#ifndef FALSE
+#define FALSE 0
+#endif
+
+
+#ifndef uint8
+typedef unsigned char uint8;
+#endif
+#ifndef uint16
+typedef unsigned short uint16;
+#endif
+#ifndef uint32
+typedef unsigned int uint32;
+#endif
+
+typedef void *Handle;
+
+/*
+ * MPEG-4 Part 2 data structure
+ */
+
+typedef struct _vbp_codec_data_mp42
+{
+    uint8  profile_and_level_indication;
+    uint32 video_object_layer_width;
+    uint32 video_object_layer_height;
+
+    // 0 for unspecified, PAL/NTSC/SECAM
+    uint8  video_format;
+
+    // 0 short range, 1 full range
+    uint8  video_range;
+
+    // default 2 (unspecified), 1 for BT709.
+    uint8  matrix_coefficients;
+
+    uint8  short_video_header;
+
+    // always exist for mpeg-4,
+    uint8   aspect_ratio_info;
+    uint8   par_width;
+    uint8   par_height;
+
+    // bit rate
+    int bit_rate;
+} vbp_codec_data_mp42;
+
+typedef struct _vbp_slice_data_mp42
+{
+    uint8* buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferMPEG4 slice_param;
+} vbp_slice_data_mp42;
+
+typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42;
+
+struct _vbp_picture_data_mp42
+{
+    uint8 vop_coded;
+    uint16 vop_time_increment;
+    /* indicates if current buffer contains parameter for the first slice of the picture */
+    uint8 new_picture_flag;
+    VAPictureParameterBufferMPEG4 picture_param;
+    vbp_slice_data_mp42 slice_data;
+
+    vbp_picture_data_mp42* next_picture_data;
+};
+
+typedef struct _vbp_data_mp42
+{
+    vbp_codec_data_mp42 codec_data;
+    VAIQMatrixBufferMPEG4 iq_matrix_buffer;
+
+    uint32 number_picture_data;
+    uint32 number_pictures;
+
+    vbp_picture_data_mp42 *picture_data;
+
+} vbp_data_mp42;
+
+/*
+ * H.264 data structure
+ */
+
+typedef struct _vbp_codec_data_h264
+{
+    uint8 pic_parameter_set_id;
+    uint8 seq_parameter_set_id;
+
+    uint8 profile_idc;
+    uint8 level_idc;
+    /*constraint flag sets (h.264 Spec v2009)*/
+    uint8 constraint_set0_flag;
+    uint8 constraint_set1_flag;
+    uint8 constraint_set2_flag;
+    uint8 constraint_set3_flag;
+    uint8 constraint_set4_flag;
+
+    uint8 num_ref_frames;
+    uint8 gaps_in_frame_num_value_allowed_flag;
+
+    uint8 frame_mbs_only_flag;
+    uint8 mb_adaptive_frame_field_flag;
+
+    int frame_width;
+    int frame_height;
+
+    uint8 vui_parameters_present_flag;
+
+    /* aspect ratio */
+    uint8 aspect_ratio_idc;
+    uint16 sar_width;
+    uint16 sar_height;
+
+    /* cropping information */
+    int crop_top;
+    int crop_bottom;
+    int crop_left;
+    int crop_right;
+
+    /* video fromat */
+
+    // default 5 unspecified
+    uint8 video_format;
+    uint8 video_full_range_flag;
+
+    // default 2 unspecified
+    uint8 matrix_coefficients;
+
+    uint8 pic_order_cnt_type;
+    int log2_max_pic_order_cnt_lsb_minus4;
+
+    int bit_rate;
+
+} vbp_codec_data_h264;
+
+typedef struct _vbp_slice_data_h264
+{
+    uint8* buffer_addr;
+
+    uint32 slice_offset; /* slice data offset */
+
+    uint32 slice_size; /* slice data size */
+
+    uint8 nal_unit_type;
+
+    VASliceParameterBufferH264 slc_parms;
+
+} vbp_slice_data_h264;
+
+
+typedef struct _vbp_picture_data_h264
+{
+    VAPictureParameterBufferH264* pic_parms;
+
+    uint32 num_slices;
+
+    vbp_slice_data_h264* slc_data;
+
+} vbp_picture_data_h264;
+
+
+typedef struct _vbp_data_h264
+{
+    /* rolling counter of buffers sent by vbp_parse */
+    uint32 buf_number;
+
+    uint32 num_pictures;
+
+    /* if SPS has been received */
+    uint8  has_sps;
+
+    /* if PPS has been received */
+    uint8  has_pps;
+
+    uint8  new_sps;
+
+    uint8  new_pps;
+
+    vbp_picture_data_h264* pic_data;
+
+    /**
+        * do we need to send matrix to VA for each picture? If not, we need
+        * a flag indicating whether it is updated.
+        */
+    VAIQMatrixBufferH264* IQ_matrix_buf;
+
+    vbp_codec_data_h264* codec_data;
+
+} vbp_data_h264;
+
+/*
+ * vc1 data structure
+ */
+typedef struct _vbp_codec_data_vc1
+{
+    /* Sequence layer. */
+    uint8  PROFILE;
+    uint8  LEVEL;
+    uint8  POSTPROCFLAG;
+    uint8  PULLDOWN;
+    uint8  INTERLACE;
+    uint8  TFCNTRFLAG;
+    uint8  FINTERPFLAG;
+    uint8  PSF;
+
+    // default 2: unspecified
+    uint8  MATRIX_COEF;
+
+    /* Entry point layer. */
+    uint8  BROKEN_LINK;
+    uint8  CLOSED_ENTRY;
+    uint8  PANSCAN_FLAG;
+    uint8  REFDIST_FLAG;
+    uint8  LOOPFILTER;
+    uint8  FASTUVMC;
+    uint8  EXTENDED_MV;
+    uint8  DQUANT;
+    uint8  VSTRANSFORM;
+    uint8  OVERLAP;
+    uint8  QUANTIZER;
+    uint16 CODED_WIDTH;
+    uint16 CODED_HEIGHT;
+    uint8  EXTENDED_DMV;
+    uint8  RANGE_MAPY_FLAG;
+    uint8  RANGE_MAPY;
+    uint8  RANGE_MAPUV_FLAG;
+    uint8  RANGE_MAPUV;
+
+    /* Others. */
+    uint8  RANGERED;
+    uint8  MAXBFRAMES;
+    uint8  MULTIRES;
+    uint8  SYNCMARKER;
+    uint8  RNDCTRL;
+    uint8  REFDIST;
+    uint16 widthMB;
+    uint16 heightMB;
+
+    uint8  INTCOMPFIELD;
+    uint8  LUMSCALE2;
+    uint8  LUMSHIFT2;
+
+    // aspect ratio
+
+    // default unspecified
+    uint8 ASPECT_RATIO;
+
+    uint8 ASPECT_HORIZ_SIZE;
+    uint8 ASPECT_VERT_SIZE;
+    // bit rate
+    int bit_rate;
+} vbp_codec_data_vc1;
+
+typedef struct _vbp_slice_data_vc1
+{
+    uint8 *buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferVC1 slc_parms;     /* pointer to slice parms */
+} vbp_slice_data_vc1;
+
+
+typedef struct _vbp_picture_data_vc1
+{
+    uint32 picture_is_skipped;                /* VC1_PTYPE_SKIPPED is PTYPE is skipped. */
+    VAPictureParameterBufferVC1 *pic_parms;   /* current parsed picture header */
+    uint32 size_bitplanes;                    /* based on number of MBs */
+    uint8 *packed_bitplanes;                  /* contains up to three bitplanes packed for libVA */
+    uint32 num_slices;                        /* number of slices.  always at least one */
+    vbp_slice_data_vc1 *slc_data;             /* pointer to array of slice data */
+} vbp_picture_data_vc1;
+
+typedef struct _vbp_data_vc1
+{
+    uint32 buf_number;                        /* rolling counter of buffers sent by vbp_parse */
+    vbp_codec_data_vc1 *se_data;              /* parsed SH/EPs */
+
+    uint32 num_pictures;
+
+    vbp_picture_data_vc1* pic_data;
+} vbp_data_vc1;
+
+#ifdef USE_HW_VP8
+typedef struct _vbp_codec_data_vp8
+{
+    uint8 frame_type;
+    uint8 version_num;
+    int show_frame;
+
+    uint32 frame_width;
+    uint32 frame_height;
+
+    int refresh_alt_frame;
+    int refresh_golden_frame;
+    int refresh_last_frame;
+
+    /* cropping information */
+    int crop_top;
+    int crop_bottom;
+    int crop_left;
+    int crop_right;
+
+    int golden_copied;
+    int altref_copied;
+} vbp_codec_data_vp8;
+
+typedef struct _vbp_slice_data_vp8
+{
+    uint8 *buffer_addr;
+    uint32 slice_offset;
+    uint32 slice_size;
+    VASliceParameterBufferVP8 slc_parms;     /* pointer to slice parms */
+} vbp_slice_data_vp8;
+
+typedef struct _vbp_picture_data_vp8
+{
+    VAPictureParameterBufferVP8* pic_parms;   /* current parsed picture header */
+
+    uint32 num_slices;                        /* number of slices.  always one for VP8 */
+    vbp_slice_data_vp8 *slc_data;             /* pointer to array of slice data */
+} vbp_picture_data_vp8;
+
+typedef struct _vbp_data_vp8
+{
+    uint32 buf_number;                        /* rolling counter of buffers sent by vbp_parse */
+    vbp_codec_data_vp8 *codec_data;
+
+    uint32 num_pictures;
+
+    vbp_picture_data_vp8* pic_data;
+
+    VAProbabilityDataBufferVP8* prob_data;
+    VAIQMatrixBufferVP8* IQ_matrix_buf;
+} vbp_data_vp8;
+#endif
+
+enum _picture_type
+{
+    VC1_PTYPE_I,
+    VC1_PTYPE_P,
+    VC1_PTYPE_B,
+    VC1_PTYPE_BI,
+    VC1_PTYPE_SKIPPED
+};
+
+enum _vbp_parser_error
+{
+    VBP_OK,
+    VBP_TYPE,
+    VBP_LOAD,
+    VBP_INIT,
+    VBP_DATA,
+    VBP_DONE,
+    VBP_MEM,
+    VBP_PARM,
+    VBP_PARTIAL,
+    VBP_MULTI
+};
+
+enum _vbp_parser_type
+{
+    VBP_VC1,
+    VBP_MPEG2,
+    VBP_MPEG4,
+    VBP_H264,
+#ifdef USE_HW_VP8
+    VBP_VP8,
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+    VBP_H264SECURE,
+#endif
+};
+
+
+/*
+ * open video bitstream parser to parse a specific media type.
+ * @param  parser_type: one of the types defined in #vbp_parser_type
+ * @param  hcontext: pointer to hold returned VBP context handle.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_open(uint32 parser_type, Handle *hcontext);
+
+/*
+ * close video bitstream parser.
+ * @param hcontext: VBP context handle.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_close(Handle hcontext);
+
+/*
+ * parse bitstream.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to bitstream buffer.
+ * @param size: size of bitstream buffer.
+ * @param init_flag: 1 if buffer contains bitstream configuration data, 0 otherwise.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_parse(Handle hcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result.
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * Structure of data blob is determined by the media type.
+ * @return VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_query(Handle hcontext, void **data);
+
+
+/*
+ * flush any un-parsed bitstream.
+ * @param hcontext: handle to VBP context.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+ */
+uint32 vbp_flush(Handle hcontent);
+
+
+#ifdef USE_AVC_SHORT_FORMAT
+/*
+ * update the the vbp context using the new data
+ * @param hcontext: handle to VBP context.
+ * @param data: pointer to the new data buffer.
+ * @param size: size of new data buffer.
+ * @param data: pointer to hold a data blob that contains parsing result.
+ * @returns VBP_OK on success, anything else on failure.
+ *
+*/
+uint32 vbp_update(Handle hcontext, void *newdata, uint32 size, void **data);
+#endif
+
+#endif /* VBP_LOADER_H */
diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.c b/mixvbp/vbp_manager/vbp_mp42_parser.c
new file mode 100755
index 0000000..6eff5a0
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_mp42_parser.c
@@ -0,0 +1,1483 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+
+#include <dlfcn.h>
+
+#include <string.h>
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_mp42_parser.h"
+#include "vbp_common.h"
+#include "viddec_mp4_parse.h"
+
+
+
+typedef struct vbp_mp42_parser_private_t vbp_mp42_parser_private;
+
+struct vbp_mp42_parser_private_t
+{
+    bool short_video_header;
+};
+
+static uint8 mp4_aspect_ratio_table[][2] =
+{
+    // forbidden
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+
+    // reserved
+    {0, 0}
+};
+
+
+/*
+ * Some divX avi files contains 2 frames in one gstbuffer.
+ */
+
+
+uint32 vbp_get_sc_pos_mp42(
+    uint8 *buf,
+    uint32 length,
+    uint32 *sc_end_pos,
+    uint8 *is_normal_sc,
+    uint8* resync_marker,
+    const bool svh_search);
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
+void vbp_fill_codec_data(vbp_context *pcontext);
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data);
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext);
+
+static inline uint32 vbp_sprite_trajectory_mp42(
+    void *parent,
+    mp4_VideoObjectLayer_t *vidObjLay,
+    mp4_VideoObjectPlane_t *vidObjPlane);
+
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
+    void * parent,
+    int32_t *dmv_length);
+
+
+/**
+ *
+ */
+uint32 vbp_init_parser_entries_mp42( vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        // absolutely impossible, just sanity check
+        return VBP_PARM;
+    }
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
+    if (pcontext->parser_ops->init == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->parse_sc = NULL;
+#else
+    pcontext->parser_ops->parse_sc = dlsym(pcontext->fd_parser, "viddec_parse_sc_mp4");
+    if (pcontext->parser_ops->parse_sc == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_mp4_parse");
+    if (pcontext->parser_ops->parse_syntax == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size =dlsym(pcontext->fd_parser, "viddec_mp4_get_context_size");
+    if (pcontext->parser_ops->get_cxt_size == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#ifdef VBP
+    pcontext->parser_ops->is_wkld_done = NULL;
+#else
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_mp4_wkld_done");
+    if (pcontext->parser_ops->is_wkld_done == NULL)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+#endif
+
+    /* entry point not needed */
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+
+/*
+ * For the codec_data passed by gstreamer
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
+{
+    uint32 ret = VBP_OK;
+    ret = vbp_parse_start_code_mp42(pcontext);
+    return ret;
+}
+
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_mp4_parser_t *parser =
+        (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+    uint8 is_svh = 0;
+    uint32 current_sc = parser->current_sc;
+    is_svh = parser->cur_sc_prefix ? false : true;
+
+    if (!is_svh)
+    {
+        // remove prefix from current_sc
+        current_sc &= 0x0FF;
+        switch (current_sc)
+        {
+        case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+            VTRACE ("Visual Object Sequence is parsed.\n");
+            query_data->codec_data.profile_and_level_indication
+                    = parser->info.profile_and_level_indication;
+            VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication);
+            break;
+
+        case MP4_SC_VIDEO_OBJECT_PLANE:
+            //VTRACE ("Video Object Plane is parsed.\n");
+            vbp_on_vop_mp42(pcontext, list_index);
+            break;
+
+        default:
+            if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) &&
+                (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX))
+            {
+                VTRACE ("Video Object Layer is parsed\n");
+                parser_private->short_video_header = FALSE;
+                vbp_fill_codec_data(pcontext);
+            }
+            else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX &&
+                     current_sc >= MP4_SC_VIDEO_OBJECT_MIN)
+            {
+                if (parser->sc_seen == MP4_SC_SEEN_SVH)
+                {
+                    // this should never happen!!!!
+                    WTRACE ("Short video header is parsed.\n");
+                    // vbp_on_vop_svh_mp42(pcontext, list_index);
+                    return VBP_TYPE;
+                }
+            }
+            break;
+        }
+    }
+    else
+    {
+        if (parser->sc_seen == MP4_SC_SEEN_SVH)
+        {
+            //VTRACE ("Short video header is parsed.\n");
+            vbp_on_vop_svh_mp42(pcontext, list_index);
+        }
+    }
+
+    return VBP_OK;
+}
+
+
+
+/*
+* partial frame handling:
+*
+* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be
+* conveyed in the packet  (RFC 4629) for each following GOB, otherwise,
+* picture can't be decoded.
+*
+* MPEG4:  VideoObjectPlane header is lost if the first slice is discarded. However, picture
+* is still decodable as long as the header_extension_code is 1 in video_packet_header.
+*
+*MPEG-4 with short header:   video_plane_with_short_header is lost if the first GOB
+* is discarded. As this header is not duplicated (RFC 3016), picture is not decodable.
+*
+* In sum:
+* If buffer contains the 32-bit start code (0x000001xx), proceed  as normal.
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263
+* picture start code or short_video_start_marker, proceed as normal.
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which
+* indicates  h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with
+* short header, we should report packet as a partial frame - no more parsing is needed.
+*
+* If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker,
+* the buffer will be immeidately parsed and num_items is set to 0.
+*/
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint8 *buf = NULL;
+    uint32 size = 0;
+    uint32 sc_end_pos = -1;
+    uint32 bytes_parsed = 0;
+    viddec_mp4_parser_t *pinfo = NULL;
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+
+    // reset query data for the new sample buffer
+    query_data->number_picture_data= 0;
+    query_data->number_pictures = 0;
+
+    // emulation prevention byte is not needed
+    cxt->getbits.is_emul_reqd = 0;
+
+    cxt->list.num_items = 0;
+    cxt->list.data[0].stpos = 0;
+    cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+    buf = cxt->parse_cubby.buf;
+    size = cxt->parse_cubby.size;
+
+    pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
+
+    uint8 is_normal_sc = 0;
+    uint8 resync_marker = 0;
+    uint32 found_sc = 0;
+    uint32 ret = VBP_OK;
+
+    while (1)
+    {
+        found_sc = vbp_get_sc_pos_mp42(
+                        buf + bytes_parsed,
+                        size - bytes_parsed,
+                        &sc_end_pos,
+                        &is_normal_sc,
+                        &resync_marker,
+                        parser_private->short_video_header);
+
+        if (found_sc)
+        {
+            cxt->list.data[cxt->list.num_items].stpos = bytes_parsed + sc_end_pos - 3;
+            if (cxt->list.num_items != 0)
+            {
+                cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed + sc_end_pos - 3;
+            }
+            bytes_parsed += sc_end_pos;
+
+            cxt->list.num_items++;
+            pinfo->cur_sc_prefix = is_normal_sc;
+        }
+        else
+        {
+            if (cxt->list.num_items != 0)
+            {
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+            else
+            {
+                WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size);
+                cxt->list.num_items = 1;
+                cxt->list.data[0].stpos = 0;
+                cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+                if (resync_marker)
+                {
+                    // either the first slice (GOB) is lost or parser receives a single slice (GOB)
+                    if (parser_private->short_video_header)
+                    {
+                        // TODO: revisit if HW supportd GOB layer decoding for h.263
+                        WTRACE("Partial frame: GOB buffer.\n");
+                        ret = VBP_PARTIAL;
+                    }
+                    else
+                    {
+                        WTRACE("Partial frame: video packet header buffer.\n");
+                        ret =  vbp_process_video_packet_mp42(pcontext);
+                    }
+
+                    // set num_items to 0 so buffer will not be parsed again
+                    cxt->list.num_items = 0;
+                }
+                else
+                {
+                    ETRACE("Invalid data received.\n");
+                    cxt->list.num_items = 0;
+                    return VBP_DATA;
+                }
+
+                break;
+            }
+        }
+    }
+
+    return ret;
+}
+
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext)
+{
+#if 0
+    vbp_dump_query_data(pcontext);
+#endif
+    return VBP_OK;
+}
+
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data)
+{
+    vbp_picture_data_mp42 *picture_data = query_data->picture_data;
+    int num_pictures = query_data->number_picture_data;
+    while (num_pictures > 1)
+    {
+        picture_data = picture_data->next_picture_data;
+        num_pictures--;
+    }
+
+    return picture_data;
+}
+
+void vbp_fill_codec_data(vbp_context *pcontext)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_codec_data_mp42* codec_data = &(query_data->codec_data);
+    vbp_mp42_parser_private *parser_private = (vbp_mp42_parser_private *)pcontext->parser_private;
+
+    codec_data->bit_rate = parser->info.VisualObject.VideoObject.VOLControlParameters.bit_rate;
+
+    codec_data->profile_and_level_indication
+            = parser->info.profile_and_level_indication;
+
+    codec_data->video_object_layer_width =
+            parser->info.VisualObject.VideoObject.video_object_layer_width;
+
+    codec_data->video_object_layer_height =
+            parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+    if (parser->info.VisualObject.VideoSignalType.is_video_signal_type)
+    {
+        codec_data->video_format =
+                parser->info.VisualObject.VideoSignalType.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format =  5;
+    }
+
+    codec_data->video_range =
+            parser->info.VisualObject.VideoSignalType.video_range;
+
+    if (parser->info.VisualObject.VideoSignalType.is_colour_description)
+    {
+        codec_data->matrix_coefficients =
+                parser->info.VisualObject.VideoSignalType.matrix_coefficients;
+    }
+    else if (parser_private->short_video_header)
+    {
+        // SMPTE 170M
+        codec_data->matrix_coefficients = 6;
+    }
+    else
+    {
+        // ITU-R Recommendation BT.709
+        codec_data->matrix_coefficients = 1;
+    }
+
+    codec_data->short_video_header = parser_private->short_video_header;
+
+    // aspect ratio
+    codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info;
+    if (codec_data->aspect_ratio_info < 6)
+    {
+        codec_data->par_width = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][0];
+        codec_data->par_height = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][1];
+    }
+    else if (codec_data->aspect_ratio_info == 15)
+    {
+        codec_data->par_width = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_width;
+        codec_data->par_height = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_height;
+    }
+    else
+    {
+        codec_data->par_width = 0;
+        codec_data->par_height = 0;
+    }
+}
+
+void vbp_fill_slice_data(vbp_context *pcontext, int list_index)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+
+    if (!parser->info.VisualObject.VideoObject.short_video_header)
+    {
+        vbp_process_slices_mp42(pcontext, list_index);
+    }
+    else
+    {
+        vbp_process_slices_svh_mp42(pcontext, list_index);
+    }
+}
+
+void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    VAPictureParameterBufferMPEG4 *picture_param = NULL;
+
+    if (new_picture_flag)
+    {
+        query_data->number_pictures++;
+    }
+
+    picture_data = query_data->picture_data;
+    if (picture_data == NULL || query_data->number_picture_data == 0)
+    {
+        // first entry
+        if (picture_data == NULL)
+        {
+            picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1);
+            query_data->picture_data = picture_data;
+            if (picture_data == NULL) {
+                query_data->number_picture_data = 0;
+                return;
+            }
+        }
+        query_data->number_picture_data = 1;
+    }
+    else
+    {
+        // find the last active one
+        int i = query_data->number_picture_data;
+        while (i > 1)
+        {
+            picture_data = picture_data->next_picture_data;
+            i--;
+        }
+        if (picture_data->next_picture_data == NULL)
+        {
+            picture_data->next_picture_data = vbp_malloc_set0(vbp_picture_data_mp42, 1);
+            if (picture_data->next_picture_data == NULL) {
+                return;
+            }
+        }
+
+        query_data->number_picture_data++;
+
+        picture_data = picture_data->next_picture_data;
+    }
+
+    picture_param = &(picture_data->picture_param);
+
+    uint8 idx = 0;
+
+    picture_data->new_picture_flag = new_picture_flag;
+
+    picture_data->vop_coded
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
+
+
+
+    picture_data->vop_time_increment =
+            parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment;
+
+    // fill picture_param
+
+
+    /*
+     * NOTE: for short video header, the parser saves vop_width and vop_height
+     * to VOL->video_object_layer_width and VOL->video_object_layer_height
+     */
+    picture_param->vop_width
+            = parser->info.VisualObject.VideoObject.video_object_layer_width;
+    picture_param->vop_height
+            = parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+    picture_param->forward_reference_picture = VA_INVALID_SURFACE;
+    picture_param->backward_reference_picture = VA_INVALID_SURFACE;
+
+    // Fill VAPictureParameterBufferMPEG4::vol_fields
+
+    picture_param->vol_fields.bits.short_video_header
+            = parser->info.VisualObject.VideoObject.short_video_header;
+    picture_param->vol_fields.bits.chroma_format
+            = parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
+
+    // TODO: find out why testsuite always set this value to be 0
+    picture_param->vol_fields.bits.chroma_format = 0;
+
+    picture_param->vol_fields.bits.interlaced
+            = parser->info.VisualObject.VideoObject.interlaced;
+    picture_param->vol_fields.bits.obmc_disable
+            = parser->info.VisualObject.VideoObject.obmc_disable;
+    picture_param->vol_fields.bits.sprite_enable
+            = parser->info.VisualObject.VideoObject.sprite_enable;
+    picture_param->vol_fields.bits.sprite_warping_accuracy
+            = parser->info.VisualObject.VideoObject.sprite_info.sprite_warping_accuracy;
+    picture_param->vol_fields.bits.quant_type
+            = parser->info.VisualObject.VideoObject.quant_type;
+    picture_param->vol_fields.bits.quarter_sample
+            = parser->info.VisualObject.VideoObject.quarter_sample;
+    picture_param->vol_fields.bits.data_partitioned
+            = parser->info.VisualObject.VideoObject.data_partitioned;
+    picture_param->vol_fields.bits.reversible_vlc
+            = parser->info.VisualObject.VideoObject.reversible_vlc;
+    picture_param->vol_fields.bits.resync_marker_disable
+            = parser->info.VisualObject.VideoObject.resync_marker_disable;
+    picture_param->no_of_sprite_warping_points
+            = parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
+
+    for (idx = 0; idx < 3; idx++)
+    {
+        picture_param->sprite_trajectory_du[idx]
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
+        picture_param->sprite_trajectory_dv[idx]
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_dv[idx];
+    }
+
+    picture_param->quant_precision
+            = parser->info.VisualObject.VideoObject.quant_precision;
+
+    // fill VAPictureParameterBufferMPEG4::vop_fields
+
+
+    if (!parser->info.VisualObject.VideoObject.short_video_header)
+    {
+        picture_param->vop_fields.bits.vop_coding_type
+                = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
+    }
+    else
+    {
+        picture_param->vop_fields.bits.vop_coding_type
+                = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
+    }
+
+      // TODO: fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
+      // This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
+
+    if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B)
+    {
+        picture_param->vop_fields.bits.backward_reference_vop_coding_type
+                = picture_param->vop_fields.bits.vop_coding_type;
+    }
+
+    picture_param->vop_fields.bits.vop_rounding_type
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_rounding_type;
+    picture_param->vop_fields.bits.intra_dc_vlc_thr
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.intra_dc_vlc_thr;
+    picture_param->vop_fields.bits.top_field_first
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.top_field_first;
+    picture_param->vop_fields.bits.alternate_vertical_scan_flag
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.alternate_vertical_scan_flag;
+
+    picture_param->vop_fcode_forward
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_forward;
+    picture_param->vop_fcode_backward
+            = parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_fcode_backward;
+    picture_param->vop_time_increment_resolution
+            = parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
+
+    // short header related
+    picture_param->num_gobs_in_vop
+            = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
+    picture_param->num_macroblocks_in_gob
+            = parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
+
+    // for direct mode prediction
+    picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
+    picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
+}
+
+void vbp_fill_iq_matrix_buffer(vbp_context *pcontext)
+{
+    viddec_mp4_parser_t *parser =
+            (viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+
+    mp4_VOLQuant_mat_t *quant_mat_info =
+            &(parser->info.VisualObject.VideoObject.quant_mat_info);
+
+    VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
+
+    iq_matrix = &(query_data->iq_matrix_buffer);
+
+    iq_matrix->load_intra_quant_mat = 1; //quant_mat_info->load_intra_quant_mat;
+    iq_matrix->load_non_intra_quant_mat = 1; // = quant_mat_info->load_nonintra_quant_mat;
+    memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
+    memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64);
+}
+
+
+void vbp_on_vop_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_fill_codec_data(pcontext);
+    vbp_fill_picture_param(pcontext, 1);
+    vbp_fill_iq_matrix_buffer(pcontext);
+    vbp_fill_slice_data(pcontext, list_index);
+}
+
+void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_fill_codec_data(pcontext);
+    vbp_fill_picture_param(pcontext, 1);
+    vbp_fill_iq_matrix_buffer(pcontext);
+    vbp_fill_slice_data(pcontext, list_index);
+}
+
+uint32 vbp_get_sc_pos_mp42(
+    uint8 *buf,
+    uint32 length,
+    uint32 *sc_end_pos,
+    uint8 *is_normal_sc,
+    uint8 *resync_marker,
+    const bool svh_search)
+{
+    uint8 *ptr = buf;
+    uint32 size;
+    uint32 data_left = 0, phase = 0, ret = 0;
+    size = 0;
+
+    data_left = length;
+    *sc_end_pos = -1;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) && (phase < 3))
+    {
+        /* Check if we are byte aligned & phase=0, if thats the case we can check
+         work at a time instead of byte*/
+        if (((((uint32) ptr) & 0x3) == 0) && (phase == 0))
+        {
+            while (data_left > 3)
+            {
+                uint32 data;
+                char mask1 = 0, mask2 = 0;
+
+                data = *((uint32 *) ptr);
+#ifndef MFDBIGENDIAN
+                data = SWAP_WORD(data);
+#endif
+                mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+                mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+                /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+                 two consecutive zero bytes for a start code pattern */
+                if (mask1 && mask2)
+                {
+                    /* Success so skip 4 bytes and start over */
+                    ptr += 4;
+                    size += 4;
+                    data_left -= 4;
+                    continue;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+         two zero bytes in the word so we look one byte at a time*/
+        if (data_left > 0)
+        {
+            if (*ptr == FIRST_STARTCODE_BYTE)
+            {
+                /* Phase can be 3 only if third start code byte is found */
+                phase++;
+                ptr++;
+                size++;
+                data_left--;
+                if (phase > 2)
+                {
+                    phase = 2;
+
+                    if ((((uint32) ptr) & 0x3) == 0)
+                    {
+                        while (data_left > 3)
+                        {
+                            if (*((uint32 *) ptr) != 0)
+                            {
+                                break;
+                            }
+                            ptr += 4;
+                            size += 4;
+                            data_left -= 4;
+                        }
+                    }
+                }
+            }
+            else
+            {
+                uint8 normal_sc = 0, short_sc = 0;
+                if (phase == 2)
+                {
+                    normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+                    if (svh_search)
+                    {
+                       short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
+                    }
+                    *is_normal_sc = normal_sc;
+
+                    // at least 16-bit 0, may be GOB start code or
+                    // resync marker.
+                    *resync_marker = 1;
+                }
+
+                if (!(normal_sc | short_sc))
+                {
+                    phase = 0;
+                }
+                else
+                {
+                    /* Match for start code so update context with byte position */
+                    *sc_end_pos = size;
+                    phase = 3;
+                }
+                ptr++;
+                size++;
+                data_left--;
+            }
+        }
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        (*sc_end_pos)++;
+        phase++;
+        ret = 1;
+    }
+
+    // Return 1 only if phase is 4, else always return 0
+    return ret;
+}
+
+
+uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
+{
+    uint32 length = 0;
+    numOfMbs--;
+    do
+    {
+        numOfMbs >>= 1;
+        length++;
+    }
+    while (numOfMbs);
+    return length;
+}
+
+uint32 vbp_parse_video_packet_header_mp42(
+    void *parent,
+    viddec_mp4_parser_t *parser_cxt,
+    uint16_t *quant_scale,
+    uint32 *macroblock_number)
+{
+    uint32 ret = VBP_DATA;
+    mp4_Info_t *pInfo = &(parser_cxt->info);
+    mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
+    mp4_VideoObjectPlane_t *vidObjPlane =
+            &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+    uint32 code = 0;
+    int32_t getbits = 0;
+
+    uint16_t _quant_scale = 0;
+    uint32 _macroblock_number = 0;
+    uint32 header_extension_codes = 0;
+    uint8 vop_coding_type = vidObjPlane->vop_coding_type;
+
+    if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+    {
+        return VBP_DATA;
+    }
+
+    do
+    {
+        // get macroblock_number
+        uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
+        uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
+        uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y);
+
+        getbits = viddec_pm_get_bits(parent, &code, length);
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        _macroblock_number = code;
+
+        // quant_scale
+        if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            _quant_scale = code;
+        }
+
+        // header_extension_codes
+        if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            header_extension_codes = code;
+        }
+
+        if (header_extension_codes)
+        {
+            // modulo time base
+            do
+            {
+                getbits = viddec_pm_get_bits(parent, &code, 1);
+                BREAK_GETBITS_FAIL(getbits, ret);
+            } while (code);
+
+            // marker_bit
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            // vop_time_increment
+            uint32 numbits = 0;
+            numbits = vidObjLay->vop_time_increment_resolution_bits;
+            if (numbits == 0)
+            {
+                // ??
+                numbits = 1;
+            }
+            getbits = viddec_pm_get_bits(parent, &code, numbits);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            vidObjPlane->vop_time_increment = code;
+
+
+            // marker_bit
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            // vop_coding_type
+            getbits = viddec_pm_get_bits(parent, &code, 2);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            vop_coding_type = code & 0x3;
+            vidObjPlane->vop_coding_type = vop_coding_type;
+
+
+            if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+            {
+                // intra_dc_vlc_thr
+                getbits = viddec_pm_get_bits(parent, &code, 3);
+                BREAK_GETBITS_FAIL(getbits, ret);
+
+                vidObjPlane->intra_dc_vlc_thr = code;
+                if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) &&
+                    (vop_coding_type == MP4_VOP_TYPE_S) &&
+                    (vidObjLay->sprite_info.no_of_sprite_warping_points> 0))
+                {
+                    if (vbp_sprite_trajectory_mp42(parent, vidObjLay, vidObjPlane) != VBP_OK)
+                    {
+                        break;
+                    }
+                }
+
+                if (vidObjLay->reduced_resolution_vop_enable &&
+                   (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+                   ((vop_coding_type == MP4_VOP_TYPE_I) ||
+                    (vop_coding_type == MP4_VOP_TYPE_P)))
+                {
+                    // vop_reduced_resolution
+                    getbits = viddec_pm_get_bits(parent, &code, 1);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                }
+
+                if (vop_coding_type != MP4_VOP_TYPE_I)
+                {
+                    // vop_fcode_forward
+                    getbits = viddec_pm_get_bits(parent, &code, 3);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                    vidObjPlane->vop_fcode_forward = code;
+                }
+
+                if (vop_coding_type == MP4_VOP_TYPE_B)
+                {
+                    // vop_fcode_backward
+                    getbits = viddec_pm_get_bits(parent, &code, 3);
+                    BREAK_GETBITS_FAIL(getbits, ret);
+                    vidObjPlane->vop_fcode_backward = code;
+                }
+            }
+        }
+
+        if (vidObjLay->newpred_enable)
+        {
+            // New pred mode not supported in HW, but, does libva support this?
+            ret = VBP_DATA;
+            break;
+        }
+
+        *quant_scale = _quant_scale;
+        *macroblock_number = _macroblock_number;
+
+        ret = VBP_OK;
+    }
+    while (0);
+    return ret;
+}
+
+uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
+{
+    mp4_Info_t *pInfo = &(parser_cxt->info);
+    mp4_VideoObjectPlane_t *vidObjPlane =
+            &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+    uint32 resync_marker_length = 0;
+    if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I)
+    {
+        resync_marker_length = 17;
+    }
+    else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B)
+    {
+        uint8 fcode_max = vidObjPlane->vop_fcode_forward;
+        if (fcode_max < vidObjPlane->vop_fcode_backward)
+        {
+            fcode_max = vidObjPlane->vop_fcode_backward;
+        }
+        resync_marker_length = 16 + fcode_max;
+
+        // resync_marker is max(15+fcode,17) zeros followed by a one
+        if (resync_marker_length < 18)
+            resync_marker_length = 18;
+    }
+    else
+    {
+        resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
+    }
+    return resync_marker_length;
+}
+
+uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
+{
+    uint32 ret = VBP_OK;
+
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt =
+            (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+    vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data);
+    vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data);
+    VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
+
+    uint8 is_emul = 0;
+    uint32 bit_offset = 0;
+    uint32 byte_offset = 0;
+
+    // The offsets are relative to parent->parse_cubby.buf
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+    slice_data->buffer_addr = parent->parse_cubby.buf;
+
+    slice_data->slice_offset =
+            byte_offset + parent->list.data[list_index].stpos;
+    slice_data->slice_size =
+            parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+    slice_param->slice_data_size = slice_data->slice_size;
+    slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+    slice_param->slice_data_offset = 0;
+    slice_param->macroblock_offset = bit_offset;
+    slice_param->macroblock_number = 0;
+    slice_param->quant_scale
+            = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
+
+    return ret;
+}
+#define SEARCH_SYNC_OPT
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    vbp_slice_data_mp42 *slice_data = NULL;
+    VASliceParameterBufferMPEG4* slice_param = NULL;
+
+    uint32 ret = VBP_OK;
+
+    uint8 is_emul = 0;
+    uint32 bit_offset = 0;
+    uint32 byte_offset = 0;
+
+    uint32 code = 0;
+    int32_t getbits = 0;
+    uint32 resync_marker_length = 0;
+
+    /* The offsets are relative to parent->parse_cubby.buf */
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+    picture_data = vbp_get_mp42_picture_data(query_data);
+    slice_data = &(picture_data->slice_data);
+    slice_param = &(slice_data->slice_param);
+
+    slice_data->buffer_addr = parent->parse_cubby.buf;
+
+    slice_data->slice_offset = byte_offset + parent->list.data[list_index].stpos;
+    slice_data->slice_size =
+            parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+    slice_param->slice_data_size = slice_data->slice_size;
+    slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+    slice_param->slice_data_offset = 0;
+    slice_param->macroblock_offset = bit_offset;
+    slice_param->macroblock_number = 0;
+    slice_param->quant_scale
+            = parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
+
+    if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable)
+    {
+        // no resync_marker
+        return VBP_OK;
+    }
+
+    // scan for resync_marker
+    viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+    if (bit_offset)
+    {
+        // byte-aligned
+        getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+        if (getbits == -1)
+        {
+            return VBP_DATA;
+        }
+    }
+
+    // get resync_marker_length
+    resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
+
+    uint16_t quant_scale = 0;
+    uint32 macroblock_number = 0;
+
+    while (1)
+    {
+#ifndef SEARCH_SYNC_OPT
+        getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
+
+        // return VBP_OK as resync_marker may not be present
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        if (code != 1)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 8);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            continue;
+        }
+#else
+
+        // read 3 bytes since resync_marker_length is between 17 bits and 23 bits
+        if (parent->getbits.bstrm_buf.buf_index + 3 > parent->getbits.bstrm_buf.buf_end)
+        {
+            break;
+        }
+
+        code = parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index] << 16 |
+                parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+1] << 8 |
+                parent->getbits.bstrm_buf.buf[parent->getbits.bstrm_buf.buf_index+2];
+
+        if (code >> (24-resync_marker_length) != 1)
+        {
+            int byte0 = code & 0xff;
+            int byte1 = (code >> 8) & 0xff;
+            if (byte0 != 0)
+            {
+                parent->getbits.bstrm_buf.buf_index += 3;
+            }
+            else if (byte1 != 0)
+            {
+                parent->getbits.bstrm_buf.buf_index += 2;
+            }
+            else
+            {
+                parent->getbits.bstrm_buf.buf_index += 1;
+            }
+            continue;
+        }
+#endif
+        // We found resync_marker
+        viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+        // update slice data as we found resync_marker
+        slice_data->slice_size -=
+                (parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset);
+        slice_param->slice_data_size = slice_data->slice_size;
+
+        // skip resync marker
+        getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
+
+        // return VBP_DATA, this should never happen!
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        // parse video_packet_header
+        ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt,
+                &quant_scale, &macroblock_number);
+
+        if (ret != VBP_OK)
+        {
+            ETRACE("Failed to parse video packet header.\n");
+            return ret;
+        }
+
+        // new_picture_flag = 0, this is not the first slice of a picture
+        vbp_fill_picture_param(pcontext, 0);
+
+        picture_data = vbp_get_mp42_picture_data(query_data);
+        slice_data = &(picture_data->slice_data);
+        slice_param = &(slice_data->slice_param);
+
+
+        viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+        slice_data->buffer_addr = parent->parse_cubby.buf;
+
+        slice_data->slice_offset =
+                    byte_offset + parent->list.data[list_index].stpos;
+        slice_data->slice_size =
+                    parent->list.data[list_index].edpos - parent->list.data[list_index].stpos - byte_offset;
+
+        slice_param->slice_data_size = slice_data->slice_size;
+        slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+        slice_param->slice_data_offset = 0;
+        slice_param->macroblock_offset = bit_offset;
+        slice_param->macroblock_number = macroblock_number;
+        slice_param->quant_scale = quant_scale;
+
+        if (bit_offset)
+        {
+            // byte-align parsing position
+            getbits = viddec_pm_skip_bits(parent,  8 - bit_offset);
+            if (getbits == -1)
+            {
+                ETRACE("Failed to align parser to byte position.\n");
+                return VBP_DATA;
+            }
+        }
+
+    }
+
+    return VBP_OK;
+}
+
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+    viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+    uint32 code = 0;
+    int32_t getbits = 0;
+
+    uint32 ret = VBP_DATA;
+
+
+    // setup bitstream parser
+    parent->getbits.list = &(parent->list);
+
+    parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf;
+    parent->getbits.bstrm_buf.buf_index = 0;
+    parent->getbits.bstrm_buf.buf_st = 0;
+    parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size;
+    parent->getbits.bstrm_buf.buf_bitoff = 0;
+
+    parent->getbits.au_pos = 0;
+    parent->getbits.list_off = 0;
+    parent->getbits.phase = 0;
+    parent->getbits.emulation_byte_counter = 0;
+
+    parent->list.start_offset = 0;
+    parent->list.end_offset = parent->parse_cubby.size;
+    parent->list.total_bytes = parent->parse_cubby.size;
+
+
+    // skip leading zero-byte
+    while (code == 0)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 8);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        getbits = viddec_pm_peek_bits(parent, &code, 8);
+        BREAK_GETBITS_FAIL(getbits, ret);
+    }
+
+    if (getbits != 0)
+    {
+        return VBP_DATA;
+    }
+
+    // resync-marker is represented as 17-23 bits. (16-22 bits of 0)
+    // as 16-bit '0' has been skipped, we try to parse buffer bit by bit
+    // until bit 1 is encounted or up to 7 bits are parsed.
+    code = 0;
+    uint8 count = 0;
+    while (code == 0  && count < 7)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        count++;
+    }
+
+    if (code == 0 || getbits != 0)
+    {
+        ETRACE("no resync-marker in the buffer.\n");
+        return ret;
+    }
+
+    // resync marker is skipped
+    uint16_t quant_scale = 0;
+    uint32 macroblock_number = 0;
+
+    // parse video_packet_header
+    vbp_parse_video_packet_header_mp42(parent, parser_cxt, &quant_scale, &macroblock_number);
+
+    // new_picture_flag = 0, this is not the first slice of a picture
+    vbp_fill_picture_param(pcontext, 0);
+
+    vbp_picture_data_mp42 *picture_data = NULL;
+    vbp_slice_data_mp42 *slice_data = NULL;
+    VASliceParameterBufferMPEG4* slice_param = NULL;
+
+    picture_data = vbp_get_mp42_picture_data(query_data);
+    slice_data = &(picture_data->slice_data);
+    slice_param = &(slice_data->slice_param);
+
+    ret = vbp_process_slices_mp42(pcontext, 0);
+
+    // update slice's QP and macro_block number as it is set to 0 by default.
+    slice_param->macroblock_number = macroblock_number;
+    slice_param->quant_scale = quant_scale;
+
+    // VOP must be coded!
+    picture_data->vop_coded = 1;
+    return ret;
+
+}
+
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
+    void * parent,
+    int32_t *dmv_length)
+{
+    uint32 code, skip;
+    int32_t getbits = 0;
+    uint32 ret = VBP_DATA;
+    *dmv_length = 0;
+    skip = 3;
+    do
+    {
+        getbits = viddec_pm_peek_bits(parent, &code, skip);
+        BREAK_GETBITS_FAIL(getbits, ret);
+
+        if (code == 7)
+        {
+            viddec_pm_skip_bits(parent, skip);
+            getbits = viddec_pm_peek_bits(parent, &code, 9);
+            BREAK_GETBITS_FAIL(getbits, ret);
+
+            skip = 1;
+            while ((code & 256) != 0)
+            {
+                // count number of 1 bits
+                code <<= 1;
+                skip++;
+            }
+            *dmv_length = 5 + skip;
+        }
+        else
+        {
+            skip = (code <= 1) ? 2 : 3;
+            *dmv_length = code - 1;
+        }
+        viddec_pm_skip_bits(parent, skip);
+        ret = VBP_OK;
+
+    }
+    while (0);
+    return ret;
+}
+
+
+static inline uint32 vbp_sprite_trajectory_mp42(
+    void *parent,
+    mp4_VideoObjectLayer_t *vidObjLay,
+    mp4_VideoObjectPlane_t *vidObjPlane)
+{
+    uint32 code, i;
+    int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
+    uint32 ret = VBP_OK;
+    for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++)
+    {
+        ret = VBP_DATA;
+        ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+        if (ret != VBP_OK)
+        {
+            break;
+        }
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            dmv_code = (int32_t) code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        if (code != 1)
+        {
+            ret = VBP_DATA;
+            break;
+        }
+        vidObjPlane->warping_mv_code_du[i] = dmv_code;
+        // TODO: create another inline function to avoid code duplication
+        ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
+        if (ret != VBP_OK)
+        {
+            break;
+        }
+        // reset return value in case early break
+        ret = VBP_DATA;
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            dmv_code = (int32_t) code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        if (code != 1)
+        {
+            break;
+        }
+        vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+        // set to VBP_OK
+        ret = VBP_OK;
+
+    }
+    return ret;
+}
+
+
+/*
+ * free memory of vbp_data_mp42 structure and its members
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+    vbp_picture_data_mp42* current = NULL;
+    vbp_picture_data_mp42* next = NULL;
+
+    if (pcontext->parser_private)
+    {
+        free(pcontext->parser_private);
+        pcontext->parser_private = NULL;
+    }
+    if (query_data)
+    {
+        current = query_data->picture_data;
+        while (current != NULL)
+        {
+            next = current->next_picture_data;
+            free(current);
+            current = next;
+        }
+
+        free(query_data);
+    }
+
+    pcontext->query_data = NULL;
+    return VBP_OK;
+}
+
+/*
+ * Allocate memory for vbp_data_mp42 structure and all its members.
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext)
+{
+    vbp_data_mp42 *query_data;
+    pcontext->query_data = NULL;
+
+    query_data = vbp_malloc_set0(vbp_data_mp42, 1);
+    if (query_data == NULL)
+    {
+        goto cleanup;
+    }
+
+    pcontext->query_data = (void *) query_data;
+    query_data->picture_data = NULL;
+    query_data->number_picture_data = 0;
+    query_data->number_pictures = 0;
+
+    pcontext->parser_private = NULL;
+    vbp_mp42_parser_private *parser_private = NULL;
+
+    parser_private = vbp_malloc_set0(vbp_mp42_parser_private, 1);
+    if (NULL == parser_private)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->parser_private = (void *)parser_private;
+
+    /* init the pointer */
+    parser_private->short_video_header = TRUE;
+    return VBP_OK;
+
+cleanup:
+
+    vbp_free_query_data_mp42(pcontext);
+
+    return VBP_MEM;
+}
diff --git a/mixvbp/vbp_manager/vbp_mp42_parser.h b/mixvbp/vbp_manager/vbp_mp42_parser.h
new file mode 100755
index 0000000..93416b7
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_mp42_parser.h
@@ -0,0 +1,66 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_MP42_PARSER_H
+#define VBP_MP42_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+
+uint32 vbp_init_parser_entries_mp42(vbp_context *pcontext);
+
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_mp42(vbp_context *pcontext);
+
+/*
+ * parse start code.
+ */
+uint32 vbp_parse_start_code_mp42(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_mp42(vbp_context *pcontext);
+
+#endif /*VBP_MP42_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_utils.c b/mixvbp/vbp_manager/vbp_utils.c
new file mode 100755
index 0000000..72548f0
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_utils.c
@@ -0,0 +1,618 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#include <dlfcn.h>
+
+#include "vc1.h"
+#include "h264.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+#include "vbp_h264_parser.h"
+#include "vbp_mp42_parser.h"
+#ifdef USE_HW_VP8
+#include "vbp_vp8_parser.h"
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+#include "vbp_h264secure_parser.h"
+#endif
+
+
+/* buffer counter */
+uint32 buffer_counter = 0;
+
+
+void* vbp_try_malloc0(uint32 size) {
+    void* pMem = malloc(size);
+    if (pMem)
+        memset(pMem, 0, size);
+    return pMem;
+}
+
+/**
+ *
+ * uninitialize parser context
+ *
+ */
+static uint32 vbp_utils_uninitialize_context(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+
+    if (NULL == pcontext)
+    {
+        return error;
+    }
+
+    /* not need to reset parser entry points. */
+
+    free(pcontext->parser_ops);
+    pcontext->parser_ops = NULL;
+
+
+    if (pcontext->fd_parser)
+    {
+        dlclose(pcontext->fd_parser);
+        pcontext->fd_parser = NULL;
+    }
+
+    return error;
+}
+
+/**
+ *
+ * initialize parser context
+ *
+ */
+static uint32 vbp_utils_initialize_context(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+    char *parser_name;
+
+    switch (pcontext->parser_type)
+    {
+    case VBP_VC1:
+#ifndef ANDROID
+        parser_name = "libmixvbp_vc1.so.0";
+#else
+        parser_name = "libmixvbp_vc1.so";
+#endif
+        break;
+
+        /* MPEG-2 parser is not supported. */
+
+        /*  case VBP_MPEG2:
+        parser_name = "libmixvbp_mpeg2.so.0";
+        break;*/
+
+    case VBP_MPEG4:
+#ifndef ANDROID
+        parser_name = "libmixvbp_mpeg4.so.0";
+#else
+        parser_name = "libmixvbp_mpeg4.so";
+#endif
+        break;
+
+    case VBP_H264:
+#ifndef ANDROID
+        parser_name = "libmixvbp_h264.so.0";
+#else
+        parser_name = "libmixvbp_h264.so";
+#endif
+        break;
+#ifdef USE_HW_VP8
+    case VBP_VP8:
+#ifndef ANDROID
+        parser_name = "libmixvbp_vp8.so.0";
+#else
+        parser_name = "libmixvbp_vp8.so";
+#endif
+        break;
+#endif
+
+#ifdef USE_AVC_SHORT_FORMAT
+    case VBP_H264SECURE:
+        parser_name = "libmixvbp_h264secure.so";
+        break;
+#endif
+
+    default:
+        WTRACE("Unsupported parser type!");
+        return VBP_TYPE;
+    }
+
+    pcontext->fd_parser = dlopen(parser_name, RTLD_LAZY);
+    if (NULL == pcontext->fd_parser)
+    {
+        ETRACE("Failed to load parser %s.", parser_name);
+        error =  VBP_LOAD;
+        goto cleanup;
+    }
+
+    pcontext->parser_ops = vbp_malloc(viddec_parser_ops_t, 1);
+    if (NULL == pcontext->parser_ops)
+    {
+        ETRACE("Failed to allocate memory");
+        error =  VBP_MEM;
+        goto cleanup;
+    }
+
+#define SET_FUNC_POINTER(X, Y)\
+    case X:\
+    pcontext->func_init_parser_entries = vbp_init_parser_entries_##Y;\
+    pcontext->func_allocate_query_data = vbp_allocate_query_data_##Y;\
+    pcontext->func_free_query_data = vbp_free_query_data_##Y;\
+    pcontext->func_parse_init_data = vbp_parse_init_data_##Y;\
+    pcontext->func_parse_start_code = vbp_parse_start_code_##Y;\
+    pcontext->func_process_parsing_result = vbp_process_parsing_result_##Y;\
+    pcontext->func_populate_query_data = vbp_populate_query_data_##Y;\
+    break;
+
+    switch (pcontext->parser_type)
+    {
+        SET_FUNC_POINTER(VBP_VC1, vc1);
+        SET_FUNC_POINTER(VBP_MPEG4, mp42);
+        SET_FUNC_POINTER(VBP_H264, h264);
+#ifdef USE_HW_VP8
+        SET_FUNC_POINTER(VBP_VP8, vp8);
+#endif
+#ifdef USE_AVC_SHORT_FORMAT
+        SET_FUNC_POINTER(VBP_H264SECURE, h264secure);
+#endif
+    }
+#ifdef USE_AVC_SHORT_FORMAT
+    if (pcontext->parser_type == VBP_H264SECURE) {
+        pcontext->func_update_data = vbp_update_data_h264secure;
+    }
+#endif
+
+    /* set entry points for parser operations:
+    	init
+    	parse_sc
+    	parse_syntax
+    	get_cxt_size
+    	is_wkld_done
+    	is_frame_start
+    */
+    error = pcontext->func_init_parser_entries(pcontext);
+
+cleanup:
+
+    if (VBP_OK != error)
+    {
+        /* no need to log error.  the loader would have done so already. */
+        vbp_utils_uninitialize_context(pcontext);
+    }
+
+    return error;
+}
+
+/**
+*
+* free allocated memory.
+*
+*/
+static uint32 vbp_utils_free_parser_memory(vbp_context *pcontext)
+{
+    if (NULL == pcontext)
+    {
+        return VBP_OK;
+    }
+
+    if (pcontext->func_free_query_data)
+    {
+        pcontext->func_free_query_data(pcontext);
+    }
+
+    free(pcontext->workload2);
+    pcontext->workload2 = NULL;
+
+    free(pcontext->workload1);
+    pcontext->workload1 = NULL;
+
+    free(pcontext->persist_mem);
+    pcontext->persist_mem = NULL;
+
+    free(pcontext->parser_cxt);
+    pcontext->parser_cxt = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * allocate memory
+ *
+ */
+static uint32 vbp_utils_allocate_parser_memory(vbp_context *pcontext)
+{
+    /* pcontext is guaranteed to be valid input. */
+    uint32 error = VBP_OK;
+    viddec_parser_memory_sizes_t sizes;
+
+    pcontext->parser_cxt = vbp_malloc(viddec_pm_cxt_t, 1);
+    if (NULL == pcontext->parser_cxt)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* invoke parser entry to get context size */
+    /* no return value, should always succeed. */
+    pcontext->parser_ops->get_cxt_size(&sizes);
+
+    /* allocate persistent memory for parser */
+    if (sizes.persist_size)
+    {
+        pcontext->persist_mem = malloc(sizes.persist_size);
+        if (NULL == pcontext->persist_mem)
+        {
+            ETRACE("Failed to allocate memory");
+            error = VBP_MEM;
+            goto cleanup;
+        }
+    }
+    else
+    {
+        /* OK for VC-1, MPEG2 and MPEG4. */
+        if ((VBP_VC1 == pcontext->parser_type) ||
+            (VBP_MPEG2 == pcontext->parser_type) ||
+            (VBP_MPEG4 == pcontext->parser_type)
+#ifdef USE_HW_VP8
+            || (VBP_VP8 == pcontext->parser_type)
+#endif
+)
+        {
+            pcontext->persist_mem = NULL;
+        }
+        else
+        {
+            /* mandatory for H.264 */
+            ETRACE("Failed to allocate memory");
+            error =  VBP_TYPE;
+            goto cleanup;
+        }
+    }
+
+    /* allocate a new workload with 1000 items. */
+    pcontext->workload1 = malloc(sizeof(viddec_workload_t) +
+                                       (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+    if (NULL == pcontext->workload1)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* allocate a second workload with 1000 items. */
+    pcontext->workload2 = malloc(sizeof(viddec_workload_t) +
+                                       (MAX_WORKLOAD_ITEMS * sizeof(viddec_workload_item_t)));
+    if (NULL == pcontext->workload2)
+    {
+        ETRACE("Failed to allocate memory");
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    /* allocate format-specific query data */
+    error = pcontext->func_allocate_query_data(pcontext);
+
+cleanup:
+    if (error != VBP_OK)
+    {
+        vbp_utils_free_parser_memory(pcontext);
+    }
+    return error;
+}
+
+
+
+/**
+ *
+ * parse the elementary sample buffer or codec configuration data
+ *
+ */
+static uint32 vbp_utils_parse_es_buffer(vbp_context *pcontext, uint8 init_data_flag)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    uint32 error = VBP_OK;
+    int i;
+
+    /* reset list number. func_parse_init_data or func_parse_start_code will
+    * set it equal to number of sequence headers, picture headers or slices headers
+    * found in the sample buffer
+    */
+    cxt->list.num_items = 0;
+
+    /**
+    * READ THIS NOTE: cxt->getbits.is_emul_reqd must be set to 1
+    * for H.264 and MPEG-4, VC1 advanced profile and set to 0
+    * for VC1 simple or main profile when parsing the frame
+    * buffer. When parsing the sequence header, it must be set to 1
+    * always.
+    *
+    * PARSER IMPLEMENTOR: set this flag in the parser.
+    */
+
+    /*
+    if ((codec_type == VBP_H264)  || (codec_type == VBP_MPEG4))
+    {
+    	cxt->getbits.is_emul_reqd = 1;
+    }
+    */
+
+
+    /* populate the list.*/
+    if (init_data_flag)
+    {
+        error = pcontext->func_parse_init_data(pcontext);
+    }
+    else
+    {
+        error = pcontext->func_parse_start_code(pcontext);
+    }
+
+    if (VBP_OK != error)
+    {
+        ETRACE("Failed to parse the start code!");
+        return error;
+    }
+
+    /* set up bitstream buffer */
+    cxt->getbits.list = &(cxt->list);
+
+    /* setup buffer pointer */
+    cxt->getbits.bstrm_buf.buf = cxt->parse_cubby.buf;
+
+    // TODO: check if cxt->getbits.is_emul_reqd is set properly
+
+    for (i = 0; i < cxt->list.num_items; i++)
+    {
+        /* setup bitstream parser */
+        cxt->getbits.bstrm_buf.buf_index = cxt->list.data[i].stpos;
+        cxt->getbits.bstrm_buf.buf_st = cxt->list.data[i].stpos;
+        cxt->getbits.bstrm_buf.buf_end = cxt->list.data[i].edpos;
+
+        /* It is possible to end up with buf_offset not equal zero. */
+        cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+        cxt->getbits.au_pos = 0;
+        cxt->getbits.list_off = 0;
+        cxt->getbits.phase = 0;
+        cxt->getbits.emulation_byte_counter = 0;
+
+        cxt->list.start_offset = cxt->list.data[i].stpos;
+        cxt->list.end_offset = cxt->list.data[i].edpos;
+        cxt->list.total_bytes = cxt->list.data[i].edpos - cxt->list.data[i].stpos;
+
+        /* invoke parse entry point to parse the buffer */
+        error = ops->parse_syntax((void *)cxt, (void *)&(cxt->codec_data[0]));
+
+        /* can't return error for now. Neet further investigation */
+#if 0
+        if (0 != error)
+        {
+            ETRACE("failed to parse the syntax: %d!", error);
+            return error;
+        }
+#endif
+
+        /* process parsing result */
+        error = pcontext->func_process_parsing_result(pcontext, i);
+
+        if (VBP_MULTI == error) {
+            return VBP_OK;
+        }
+        else if (0 != error)
+        {
+            ETRACE("Failed to process parsing result.");
+            return error;
+        }
+    }
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * create the parser context
+ *
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext)
+{
+    uint32 error = VBP_OK;
+    vbp_context *pcontext = NULL;
+
+    /* prevention from the failure */
+    *ppcontext =  NULL;
+
+    pcontext = vbp_malloc_set0(vbp_context, 1);
+    if (NULL == pcontext)
+    {
+        error = VBP_MEM;
+        goto cleanup;
+    }
+
+    pcontext->parser_type = parser_type;
+
+    /* load parser, initialize parser operators and entry points */
+    error = vbp_utils_initialize_context(pcontext);
+    if (VBP_OK != error)
+    {
+        goto cleanup;
+    }
+
+    /* allocate parser context, persistent memory, query data and workload */
+    error = vbp_utils_allocate_parser_memory(pcontext);
+    if (VBP_OK != error)
+    {
+        goto cleanup;
+    }
+
+    viddec_pm_utils_bstream_init(&(pcontext->parser_cxt->getbits), NULL, 0);
+    pcontext->parser_cxt->cur_buf.list_index = -1;
+    pcontext->parser_cxt->parse_cubby.phase = 0;
+
+    /* invoke the entry point to initialize the parser. */
+    pcontext->parser_ops->init(
+        (uint32_t *)pcontext->parser_cxt->codec_data,
+        (uint32_t *)pcontext->persist_mem,
+        FALSE);
+
+    /* set up to find the first start code. */
+    pcontext->parser_cxt->sc_prefix_info.first_sc_detect = 1;
+
+    /* indicates initialized OK. */
+    pcontext->identifier = MAGIC_NUMBER;
+    *ppcontext = pcontext;
+    error = VBP_OK;
+
+cleanup:
+
+    if (VBP_OK != error)
+    {
+        vbp_utils_free_parser_memory(pcontext);
+        vbp_utils_uninitialize_context(pcontext);
+        free(pcontext);
+        pcontext = NULL;
+    }
+
+    return error;
+}
+
+/**
+ *
+ * destroy the context.
+ *
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext)
+{
+    /* entry point, not need to validate input parameters. */
+    vbp_utils_free_parser_memory(pcontext);
+    vbp_utils_uninitialize_context(pcontext);
+    free(pcontext);
+    pcontext = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ * parse the sample buffer or parser configuration data.
+ *
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size,  uint8 init_data_flag)
+{
+    /* entry point, not need to validate input parameters. */
+
+    uint32 error = VBP_OK;
+
+    //ITRACE("buffer counter: %d",buffer_counter);
+
+    /* reset bit offset */
+    pcontext->parser_cxt->getbits.bstrm_buf.buf_bitoff = 0;
+
+
+    /* set up cubby. */
+    pcontext->parser_cxt->parse_cubby.buf = data;
+    pcontext->parser_cxt->parse_cubby.size = size;
+    pcontext->parser_cxt->parse_cubby.phase = 0;
+
+    error = vbp_utils_parse_es_buffer(pcontext, init_data_flag);
+
+    /* rolling count of buffers. */
+    if (0 == init_data_flag)
+    {
+        buffer_counter++;
+    }
+    return error;
+}
+
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data)
+{
+    /* entry point, not need to validate input parameters. */
+    uint32 error = VBP_OK;
+
+    error = pcontext->func_populate_query_data(pcontext);
+    if (VBP_OK == error)
+    {
+        *data = pcontext->query_data;
+    }
+    else
+    {
+        *data = NULL;
+    }
+    return error;
+}
+
+/**
+ *
+ * flush parsing buffer. Currently always succeed.
+ *
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    if (ops->flush != NULL) {
+        ops->flush((void *)cxt, (void *)&(cxt->codec_data[0]));
+    }
+    return VBP_OK;
+}
+
+
+#ifdef USE_AVC_SHORT_FORMAT
+/**
+ *
+ * provide query data back to the consumer
+ *
+ */
+uint32 vbp_utils_update(vbp_context *pcontext, void *newdata, uint32 size, void **data)
+{
+    /* entry point, not need to validate input parameters. */
+    uint32 error = VBP_OK;
+
+    error = pcontext->func_update_data(pcontext,newdata,size);
+
+    if (VBP_OK == error)
+    {
+        *data = pcontext->query_data;
+    }
+    else
+    {
+        *data = NULL;
+    }
+    return error;
+}
+#endif
diff --git a/mixvbp/vbp_manager/vbp_utils.h b/mixvbp/vbp_manager/vbp_utils.h
new file mode 100755
index 0000000..7761c26
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_utils.h
@@ -0,0 +1,140 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_UTILS_H
+#define VBP_UTILS_H
+
+#include "viddec_parser_ops.h"
+#include "viddec_pm_parse.h"
+#include "viddec_pm.h"
+#include "vbp_trace.h"
+#include <stdlib.h>
+
+#define MAGIC_NUMBER 0x0DEADBEEF
+#define MAX_WORKLOAD_ITEMS 1000
+
+/* maximum 256 slices per sample buffer */
+#define MAX_NUM_SLICES 256
+
+/* maximum two pictures per sample buffer */
+#define MAX_NUM_PICTURES 2
+
+
+#define vbp_malloc(struct_type, n_structs) \
+    ((struct_type *) malloc(sizeof(struct_type) * n_structs))
+
+#define vbp_malloc_set0(struct_type, n_structs) \
+    ((struct_type *) vbp_try_malloc0(sizeof(struct_type) * n_structs))
+
+
+
+extern uint32 viddec_parse_sc(void *in, void *pcxt, void *sc_state);
+
+/* rolling counter of sample buffer */
+extern uint32 buffer_counter;
+
+typedef struct vbp_context_t vbp_context;
+
+typedef uint32 (*function_init_parser_entries)(vbp_context* cxt);
+typedef uint32 (*function_allocate_query_data)(vbp_context* cxt);
+typedef uint32 (*function_free_query_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_init_data)(vbp_context* cxt);
+typedef uint32 (*function_parse_start_code)(vbp_context* cxt);
+typedef uint32 (*function_process_parsing_result)(vbp_context* cxt, int i);
+typedef uint32 (*function_populate_query_data)(vbp_context* cxt);
+#ifdef USE_AVC_SHORT_FORMAT
+typedef uint32 (*function_update_data)(vbp_context* cxt, void *newdata, uint32 size);
+#endif
+
+struct vbp_context_t
+{
+    /* magic number */
+    uint32 identifier;
+
+    /* parser type, eg, MPEG-2, MPEG-4, H.264, VC1 */
+    uint32 parser_type;
+
+    /* handle to parser (shared object) */
+    void *fd_parser;
+
+    /* parser (shared object) entry points */
+    viddec_parser_ops_t *parser_ops;
+
+    /* parser context */
+    viddec_pm_cxt_t *parser_cxt;
+
+    /* work load */
+    viddec_workload_t *workload1, *workload2;
+
+    /* persistent memory for parser */
+    uint32 *persist_mem;
+
+    /* format specific query data */
+    void *query_data;
+
+    /* parser type specific data*/
+    void *parser_private;
+
+    function_init_parser_entries func_init_parser_entries;
+    function_allocate_query_data func_allocate_query_data;
+    function_free_query_data func_free_query_data;
+    function_parse_init_data func_parse_init_data;
+    function_parse_start_code func_parse_start_code;
+    function_process_parsing_result func_process_parsing_result;
+    function_populate_query_data func_populate_query_data;
+#ifdef USE_AVC_SHORT_FORMAT
+    function_update_data func_update_data;
+#endif
+};
+
+
+void* vbp_try_malloc0(uint32 size);
+
+/**
+ * create VBP context
+ */
+uint32 vbp_utils_create_context(uint32 parser_type, vbp_context **ppcontext);
+
+/*
+ * destroy VBP context
+ */
+uint32 vbp_utils_destroy_context(vbp_context *pcontext);
+
+/*
+ * parse bitstream
+ */
+uint32 vbp_utils_parse_buffer(vbp_context *pcontext, uint8 *data, uint32 size, uint8 init_data_flag);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_utils_query(vbp_context *pcontext, void **data);
+
+/*
+ * flush un-parsed bitstream
+ */
+uint32 vbp_utils_flush(vbp_context *pcontext);
+
+#endif /* VBP_UTILS_H */
diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.c b/mixvbp/vbp_manager/vbp_vc1_parser.c
new file mode 100755
index 0000000..4a8d6d3
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vc1_parser.c
@@ -0,0 +1,1126 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009, 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include <dlfcn.h>
+#include <string.h>
+
+#include "vc1.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vc1_parser.h"
+
+/* maximum number of Macroblock divided by 2, see va.h */
+#define MAX_BITPLANE_SIZE 16384
+
+/* Start code prefix is 001 which is 3 bytes. */
+#define PREFIX_SIZE 3
+
+static uint32 b_fraction_table[][9] = {
+    /* num       0  1  2  3  4  5   6   7   8   den */
+    /* 0 */    { 0, 0, 0, 0, 0, 0,  0,  0,  0 },
+    /* 1 */    { 0, 0, 0, 1, 3, 5,  9, 11, 17 },
+    /* 2 */    { 0, 0, 0, 2, 0, 6,  0, 12,  0 },
+    /* 3 */    { 0, 0, 0, 0, 4, 7,  0, 13, 18 },
+    /* 4 */    { 0, 0, 0, 0, 0, 8,  0, 14,  0 },
+    /* 5 */    { 0, 0, 0, 0, 0, 0, 10, 15, 19 },
+    /* 6 */    { 0, 0, 0, 0, 0, 0,  0, 16,  0 },
+    /* 7 */    { 0, 0, 0, 0, 0, 0,  0,  0, 20 }
+};
+
+
+static uint8 vc1_aspect_ratio_table[][2] =
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+
+    // reserved
+    {0, 0}
+};
+
+
+
+/**
+ * set parser entry points
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        /* impossible, just sanity check */
+        return VBP_PARM;
+    }
+
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vc1_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = viddec_parse_sc;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vc1_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vc1_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_wkld_done = dlsym(pcontext->fd_parser, "viddec_vc1_wkld_done");
+    if (NULL == pcontext->parser_ops->is_wkld_done)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_frame_start = dlsym(pcontext->fd_parser, "viddec_vc1_is_start_frame");
+    if (NULL == pcontext->parser_ops->is_frame_start)
+    {
+        ETRACE ("Failed to set entry point.");
+        return VBP_LOAD;
+    }
+
+    /* entry point not needed */
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+/**
+ * allocate query data structure
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        /* impossible, just sanity check */
+        return VBP_PARM;
+    }
+
+    pcontext->query_data = NULL;
+
+    vbp_data_vc1 *query_data = NULL;
+    query_data = vbp_malloc_set0(vbp_data_vc1, 1);
+    if (NULL == query_data)
+    {
+        return VBP_MEM;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->se_data = vbp_malloc_set0(vbp_codec_data_vc1, 1);
+    if (NULL == query_data->se_data)
+    {
+        goto cleanup;
+    }
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vc1, MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVC1, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+
+        query_data->pic_data[i].packed_bitplanes = vbp_try_malloc0(MAX_BITPLANE_SIZE);
+        if (NULL == query_data->pic_data[i].packed_bitplanes)
+        {
+            goto cleanup;
+        }
+
+        query_data->pic_data[i].slc_data = vbp_try_malloc0(MAX_NUM_SLICES * sizeof(vbp_slice_data_vc1));
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_vc1(pcontext);
+
+    return VBP_MEM;
+}
+
+
+/**
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext)
+{
+    vbp_data_vc1 *query_data = NULL;
+
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    if (query_data->pic_data)
+    {
+        int i = 0;
+        for (i = 0; i < MAX_NUM_PICTURES; i++)
+        {
+            free(query_data->pic_data[i].slc_data);
+            free(query_data->pic_data[i].packed_bitplanes);
+            free(query_data->pic_data[i].pic_parms);
+        }
+    }
+
+    free(query_data->pic_data);
+
+    free(query_data->se_data);
+
+    free(query_data);
+
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+ * We want to create a list of buffer segments where each segment is a start
+ * code followed by all the data up to the next start code or to the end of
+ * the buffer.  In VC-1, it is common to get buffers with no start codes.  The
+ * parser proper, doesn't really handle the situation where there are no SCs.
+ * In this case, I will bypass the stripping of the SC code and assume a frame.
+ */
+static uint32 vbp_parse_start_code_helper_vc1(
+    viddec_pm_cxt_t *cxt,
+    viddec_parser_ops_t *ops,
+    int init_data_flag)
+{
+    uint32_t ret = VBP_OK;
+    viddec_sc_parse_cubby_cxt_t cubby;
+
+    /* make copy of cubby */
+    /* this doesn't copy the buffer, merely the structure that holds the buffer */
+    /* pointer.  Below, where we call parse_sc() the code starts the search for */
+    /* SCs at the beginning of the buffer pointed to by the cubby, so in our */
+    /* cubby copy we increment the pointer as we move through the buffer.  If */
+    /* you think of each start code followed either by another start code or the */
+    /* end of the buffer, then parse_sc() is returning information relative to */
+    /* current segment. */
+
+    cubby = cxt->parse_cubby;
+
+    cxt->list.num_items = 0;
+    cxt->list.data[0].stpos = 0;
+    cxt->getbits.is_emul_reqd = 1;
+
+    /* codec initialization data is always start code prefixed. (may not start at position 0)
+     * sample buffer for AP has three start code patterns here:
+     * pattern 0: no start code at all, the whole buffer is a single segment item
+     * pattern 1: start codes for all segment items
+     * pattern 2: no start code for the first segment item, start codes for the rest segment items
+     */
+
+    bool is_pattern_two = FALSE;
+
+    unsigned char start_code = 0;
+
+    while (1)
+    {
+        /* parse the created buffer for sc */
+        ret = ops->parse_sc((void *)&cubby, (void *)&(cxt->codec_data[0]), &(cxt->sc_prefix_info));
+        if (ret == 1)
+        {
+            cubby.phase = 0;
+            start_code = *(unsigned char*)(cubby.buf + cubby.sc_end_pos);
+#if 1
+            if (0 == init_data_flag &&
+                    PREFIX_SIZE != cubby.sc_end_pos &&
+                    0 == cxt->list.num_items)
+            {
+                /* buffer does not have start code at the beginning */
+                vc1_viddec_parser_t *parser = NULL;
+                vc1_metadata_t *seqLayerHeader = NULL;
+
+                parser = (vc1_viddec_parser_t *)cxt->codec_data;
+                seqLayerHeader = &(parser->info.metadata);
+                if (1 == seqLayerHeader->INTERLACE)
+                {
+                    /* this is a hack for interlaced field coding */
+                    /* handle field interlace coding. One sample contains two fields, where:
+                     * the first field does not have start code prefix,
+                     * the second field has start code prefix.
+                     */
+                    cxt->list.num_items = 1;
+                    cxt->list.data[0].stpos = 0;
+                    is_pattern_two = TRUE;
+                }
+            }
+#endif
+            if (cxt->list.num_items == 0)  /* found first SC. */
+            {
+                /* sc_end_pos gets us to the SC type.  We need to back up to the first zero */
+                cxt->list.data[0].stpos = cubby.sc_end_pos - PREFIX_SIZE;
+            }
+            else
+            {
+                /* First we set the end position of the last segment. */
+                /* Since the SC parser searches from SC type to SC type and the */
+                /* sc_end_pos is relative to this segment only, we merely add */
+                /* sc_end_pos to the start to find the end. */
+                cxt->list.data[cxt->list.num_items - 1].edpos =
+                    cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+
+                /* Then we set the start position of the current segment. */
+                /* So I need to subtract 1 ??? */
+                cxt->list.data[cxt->list.num_items].stpos =
+                    cxt->list.data[cxt->list.num_items - 1].edpos;
+
+                if (is_pattern_two)
+                {
+                    cxt->list.data[cxt->list.num_items].stpos -= PREFIX_SIZE;
+                    /* restore to normal pattern */
+                    is_pattern_two = FALSE;
+                }
+            }
+            /* We need to set up the cubby buffer for the next time through parse_sc(). */
+            /* But even though we want the list to contain a segment as described */
+            /* above, we want the cubby buffer to start just past the prefix, or it will */
+            /* find the same SC again.  So I bump the cubby buffer past the prefix. */
+            cubby.buf = /*cubby.buf +*/
+                cxt->parse_cubby.buf +
+                cxt->list.data[cxt->list.num_items].stpos +
+                PREFIX_SIZE;
+
+            cubby.size = cxt->parse_cubby.size -
+                         cxt->list.data[cxt->list.num_items].stpos -
+                         PREFIX_SIZE;
+
+            if (start_code >= 0x0A && start_code <= 0x0F)
+            {
+                /* only put known start code to the list
+                 * 0x0A: end of sequence
+                 * 0x0B: slice header
+                 * 0x0C: frame header
+                 * 0x0D: field header
+                 * 0x0E: entry point header
+                 * 0x0F: sequence header
+                 */
+                cxt->list.num_items++;
+            }
+            else
+            {
+                ITRACE("skipping unknown start code :%d", start_code);
+            }
+
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                WTRACE("Num items exceeds the limit!");
+                /* not fatal, just stop parsing */
+                break;
+            }
+        }
+        else
+        {
+            /* we get here, if we reach the end of the buffer while looking or a SC. */
+            /* If we never found a SC, then num_items will never get incremented. */
+            if (cxt->list.num_items == 0)
+            {
+                /* If we don't find a SC we probably still have a frame of data. */
+                /* So let's bump the num_items or else later we will not parse the */
+                /* frame.   */
+                cxt->list.num_items = 1;
+            }
+            /* now we can set the end position of the last segment. */
+            cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+            break;
+        }
+    }
+    return VBP_OK;
+}
+
+/*
+* parse initialization data (decoder configuration data)
+* for VC1 advanced profile, data is sequence header and
+* entry pointer header.
+* for VC1 main/simple profile, data format
+* is defined in VC1 spec: Annex J, (Decoder initialization metadata
+* structure 1 and structure 3
+*/
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext)
+{
+    /**
+    * init data (aka decoder configuration data) must
+    * be start-code prefixed
+    */
+
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+    return vbp_parse_start_code_helper_vc1(cxt, ops, 1);
+}
+
+
+
+/**
+* Parse start codes, VC1 main/simple profile does not have start code;
+* VC1 advanced may not have start code either.
+*/
+uint32_t vbp_parse_start_code_vc1(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    viddec_parser_ops_t *ops = pcontext->parser_ops;
+
+    vc1_viddec_parser_t *parser = NULL;
+    vc1_metadata_t *seqLayerHeader = NULL;
+
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *) pcontext->query_data;
+
+    /* Reset query data for the new sample buffer */
+    int i = 0;
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->num_pictures = 0;
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].picture_is_skipped = 0;
+    }
+
+    parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    seqLayerHeader = &(parser->info.metadata);
+
+
+    /* WMV codec data will have a start code, but the WMV picture data won't. */
+    if (VC1_PROFILE_ADVANCED == seqLayerHeader->PROFILE)
+    {
+        return vbp_parse_start_code_helper_vc1(cxt, ops, 0);
+    }
+    else
+    {
+        /* WMV: vc1 simple or main profile. No start code present. */
+
+        /* must set is_emul_reqd to 0! */
+        cxt->getbits.is_emul_reqd = 0;
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+    return VBP_OK;
+}
+
+
+/**
+ *
+ */
+static inline uint8 vbp_get_bit_vc1(uint32 *data, uint32 *current_word, uint32 *current_bit)
+{
+    uint8 value;
+
+    value = (data[*current_word] >> *current_bit) & 1;
+
+    /* Fix up bit/byte offsets.  endianess?? */
+    if (*current_bit < 31)
+    {
+        ++(*current_bit);
+    }
+    else
+    {
+        ++(*current_word);
+        *current_bit = 0;
+    }
+
+    return value;
+}
+
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplane_vc1(
+    uint32 *from_plane,
+    uint8 *to_plane,
+    uint32 width,
+    uint32 height,
+    uint32 nibble_shift)
+{
+    uint32 error = VBP_OK;
+    uint32 current_word = 0;
+    uint32 current_bit = 0;  /* must agree with number in vbp_get_bit_vc1 */
+    uint32 i, j, n;
+    uint8 value;
+    uint32 stride = 0;
+
+    stride = 32 * ((width + 31) / 32);
+
+    for (i = 0, n = 0; i < height; i++)
+    {
+        for (j = 0; j < stride; j++)
+        {
+            if (j < width)
+            {
+                value = vbp_get_bit_vc1(
+                            from_plane,
+                            &current_word,
+                            &current_bit);
+
+                to_plane[n / 2] |= value << (nibble_shift + ((n % 2) ? 0 : 4));
+                n++;
+            }
+            else
+            {
+                break;
+            }
+        }
+        if (stride > width)
+        {
+            current_word++;
+            current_bit = 0;
+        }
+    }
+
+    return error;
+}
+
+
+/**
+ *
+ */
+static inline uint32 vbp_map_bfraction(uint32 numerator, uint32 denominator)
+{
+    uint32 b_fraction = 0;
+
+    if ((numerator < 8) && (denominator < 9))
+    {
+        b_fraction = b_fraction_table[numerator][denominator];
+    }
+
+    return b_fraction;
+}
+
+/**
+ *
+ */
+static uint32 vbp_pack_bitplanes_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    uint32 error = VBP_OK;
+    if (0 == pic_data->pic_parms->bitplane_present.value)
+    {
+        /* return if bitplane is not present */
+        pic_data->size_bitplanes = 0;
+        memset(pic_data->packed_bitplanes, 0, MAX_BITPLANE_SIZE);
+        return error;
+    }
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+    vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+    /* set bit plane size */
+    pic_data->size_bitplanes = ((seqLayerHeader->widthMB * seqLayerHeader->heightMB) + 1) / 2;
+
+
+    memset(pic_data->packed_bitplanes, 0, pic_data->size_bitplanes);
+
+    /* see libva library va.h for nibble bit */
+    switch (picLayerHeader->PTYPE)
+    {
+    case VC1_I_FRAME:
+    case VC1_BI_FRAME:
+        if (picLayerHeader->OVERFLAGS.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->OVERFLAGS.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->ACPRED.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->ACPRED.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->FIELDTX.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->FIELDTX.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->MVTYPEMB.imode ||
+                picLayerHeader->DIRECTMB.imode ||
+                picLayerHeader->SKIPMB.imode ||
+                picLayerHeader->FORWARDMB.imode)
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+
+    case VC1_P_FRAME:
+        if (picLayerHeader->MVTYPEMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->MVTYPEMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->SKIPMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->SKIPMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->DIRECTMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->DIRECTMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->FIELDTX.imode ||
+                picLayerHeader->FORWARDMB.imode ||
+                picLayerHeader->ACPRED.imode ||
+                picLayerHeader->OVERFLAGS.imode )
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+
+    case VC1_B_FRAME:
+        if (picLayerHeader->FORWARDMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->FORWARDMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                2);
+        }
+        if (picLayerHeader->SKIPMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->SKIPMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                1);
+        }
+        if (picLayerHeader->DIRECTMB.imode)
+        {
+            vbp_pack_bitplane_vc1(
+                picLayerHeader->DIRECTMB.databits,
+                pic_data->packed_bitplanes,
+                seqLayerHeader->widthMB,
+                seqLayerHeader->heightMB,
+                0);
+        }
+        /* sanity check */
+        if (picLayerHeader->MVTYPEMB.imode ||
+                picLayerHeader->FIELDTX.imode ||
+                picLayerHeader->ACPRED.imode ||
+                picLayerHeader->OVERFLAGS.imode)
+        {
+            ETRACE("Unexpected bit-plane type.");
+            error = VBP_TYPE;
+        }
+        break;
+    }
+    return error;
+}
+
+
+/**
+ * fill the query data structure after sequence header, entry point header
+ * or a complete frame is parsed.
+ * NOTE: currently partial frame is not handled properly
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext)
+{
+    uint32 error = VBP_OK;
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)pcontext->parser_cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    /* first we get the SH/EP data.  Can we cut down on this? */
+    vbp_codec_data_vc1 *se_data = query_data->se_data;
+
+
+    uint32_t curHrdNum = seqLayerHeader->HRD_NUM_LEAKY_BUCKETS;
+
+    se_data->bit_rate = curHrdNum ?
+                        seqLayerHeader->hrd_initial_state.sLeakyBucket[curHrdNum -1].HRD_RATE :
+                        seqLayerHeader->hrd_initial_state.sLeakyBucket[0].HRD_RATE;
+
+    se_data->PROFILE = seqLayerHeader->PROFILE;
+    se_data->LEVEL = seqLayerHeader->LEVEL;
+    se_data->POSTPROCFLAG = seqLayerHeader->POSTPROCFLAG;
+    se_data->PULLDOWN = seqLayerHeader->PULLDOWN;
+    se_data->INTERLACE = seqLayerHeader->INTERLACE;
+    se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG;
+    se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG;
+    se_data->PSF = seqLayerHeader->PSF;
+
+    // color matrix
+    if (seqLayerHeader->COLOR_FORMAT_FLAG)
+    {
+        se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF;
+    }
+    else
+    {
+        //ITU-R BT. 601-5.
+        se_data->MATRIX_COEF = 6;
+    }
+
+    // aspect ratio
+    if (seqLayerHeader->ASPECT_RATIO_FLAG == 1)
+    {
+        se_data->ASPECT_RATIO = seqLayerHeader->ASPECT_RATIO;
+        if (se_data->ASPECT_RATIO < 14)
+        {
+            se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0];
+            se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1];
+        }
+        else if (se_data->ASPECT_RATIO == 15)
+        {
+            se_data->ASPECT_HORIZ_SIZE = seqLayerHeader->ASPECT_HORIZ_SIZE;
+            se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE;
+        }
+        else  // se_data->ASPECT_RATIO == 14
+        {
+            se_data->ASPECT_HORIZ_SIZE = 0;
+            se_data->ASPECT_VERT_SIZE = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        se_data->ASPECT_RATIO = 0;
+        se_data->ASPECT_HORIZ_SIZE = 0;
+        se_data->ASPECT_VERT_SIZE = 0;
+    }
+
+    se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK;
+    se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY;
+    se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG;
+    se_data->REFDIST_FLAG = seqLayerHeader->REFDIST_FLAG;
+    se_data->LOOPFILTER = seqLayerHeader->LOOPFILTER;
+    se_data->FASTUVMC = seqLayerHeader->FASTUVMC;
+    se_data->EXTENDED_MV = seqLayerHeader->EXTENDED_MV;
+    se_data->DQUANT = seqLayerHeader->DQUANT;
+    se_data->VSTRANSFORM = seqLayerHeader->VSTRANSFORM;
+    se_data->OVERLAP = seqLayerHeader->OVERLAP;
+    se_data->QUANTIZER = seqLayerHeader->QUANTIZER;
+    se_data->CODED_WIDTH = (seqLayerHeader->width + 1) << 1;
+    se_data->CODED_HEIGHT = (seqLayerHeader->height + 1) << 1;
+    se_data->EXTENDED_DMV = seqLayerHeader->EXTENDED_DMV;
+    se_data->RANGE_MAPY_FLAG = seqLayerHeader->RANGE_MAPY_FLAG;
+    se_data->RANGE_MAPY = seqLayerHeader->RANGE_MAPY;
+    se_data->RANGE_MAPUV_FLAG = seqLayerHeader->RANGE_MAPUV_FLAG;
+    se_data->RANGE_MAPUV = seqLayerHeader->RANGE_MAPUV;
+    se_data->RANGERED = seqLayerHeader->RANGERED;
+    se_data->MAXBFRAMES = seqLayerHeader->MAXBFRAMES;
+    se_data->MULTIRES = seqLayerHeader->MULTIRES;
+    se_data->SYNCMARKER = seqLayerHeader->SYNCMARKER;
+    se_data->RNDCTRL = seqLayerHeader->RNDCTRL;
+    se_data->REFDIST = seqLayerHeader->REFDIST;
+    se_data->widthMB = seqLayerHeader->widthMB;
+    se_data->heightMB = seqLayerHeader->heightMB;
+    se_data->INTCOMPFIELD = seqLayerHeader->INTCOMPFIELD;
+    se_data->LUMSCALE2 = seqLayerHeader->LUMSCALE2;
+    se_data->LUMSHIFT2 = seqLayerHeader->LUMSHIFT2;
+
+    /* update buffer number */
+    query_data->buf_number = buffer_counter;
+
+    if (query_data->num_pictures > 2)
+    {
+        WTRACE("sampe buffer contains %d pictures", query_data->num_pictures);
+    }
+    return error;
+}
+
+
+
+static void vbp_pack_picture_params_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    vc1_metadata_t *seqLayerHeader = &(parser->info.metadata);
+    vc1_PictureLayerHeader *picLayerHeader = &(parser->info.picLayerHeader);
+
+
+    VAPictureParameterBufferVC1 *pic_parms = pic_data->pic_parms;
+
+    /* Then we get the picture header data.  Picture type need translation. */
+    pic_parms->forward_reference_picture = VA_INVALID_SURFACE;
+    pic_parms->backward_reference_picture = VA_INVALID_SURFACE;
+    pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE;
+
+    pic_parms->sequence_fields.value = 0;
+    pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN;
+    pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE;
+    pic_parms->sequence_fields.bits.tfcntrflag =  seqLayerHeader->TFCNTRFLAG;
+    pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG;
+    pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF;
+    pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES;
+    pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
+    pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER;
+    pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED;
+    pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES;
+
+    pic_parms->coded_width = (seqLayerHeader->width + 1) << 1;
+    pic_parms->coded_height = (seqLayerHeader->height + 1) << 1;
+
+    pic_parms->entrypoint_fields.value = 0;
+    pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY;
+    pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK;
+    pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER;
+    pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG;
+
+    pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER;
+    pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC;
+
+    pic_parms->range_mapping_fields.value = 0;
+    pic_parms->range_mapping_fields.bits.luma_flag = seqLayerHeader->RANGE_MAPY_FLAG;
+    pic_parms->range_mapping_fields.bits.luma = seqLayerHeader->RANGE_MAPY;
+    pic_parms->range_mapping_fields.bits.chroma_flag = seqLayerHeader->RANGE_MAPUV_FLAG;
+    pic_parms->range_mapping_fields.bits.chroma = seqLayerHeader->RANGE_MAPUV;
+
+    pic_parms->b_picture_fraction =
+        vbp_map_bfraction(picLayerHeader->BFRACTION_NUM, picLayerHeader->BFRACTION_DEN);
+
+    pic_parms->cbp_table = picLayerHeader->CBPTAB;
+    pic_parms->mb_mode_table = picLayerHeader->MBMODETAB;
+    pic_parms->range_reduction_frame = picLayerHeader->RANGEREDFRM;
+    pic_parms->rounding_control = picLayerHeader->RNDCTRL;
+    pic_parms->post_processing = picLayerHeader->POSTPROC;
+    /* fix this.  Add RESPIC to parser.  */
+    pic_parms->picture_resolution_index = 0;
+    pic_parms->luma_scale = picLayerHeader->LUMSCALE;
+    pic_parms->luma_shift = picLayerHeader->LUMSHIFT;
+
+    pic_parms->picture_fields.value = 0;
+    switch (picLayerHeader->PTYPE)
+    {
+    case VC1_I_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_I;
+        break;
+
+    case VC1_P_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_P;
+        break;
+
+    case VC1_B_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_B;
+        break;
+
+    case VC1_BI_FRAME:
+        pic_parms->picture_fields.bits.picture_type = VC1_PTYPE_BI;
+        break;
+
+    case VC1_SKIPPED_FRAME:
+        pic_data->picture_is_skipped = VC1_PTYPE_SKIPPED;
+        break;
+
+    default:
+        // TODO: handle this case
+        break;
+    }
+    pic_parms->picture_fields.bits.frame_coding_mode = picLayerHeader->FCM;
+    if (0 == seqLayerHeader->PROFILE || 1 == seqLayerHeader->PROFILE)
+    {
+        /* simple or main profile, top field flag is not present, default to 1.*/
+        pic_parms->picture_fields.bits.top_field_first = 1;
+    }
+    else
+    {
+        pic_parms->picture_fields.bits.top_field_first = picLayerHeader->TFF;
+    }
+
+    pic_parms->picture_fields.bits.is_first_field = !(picLayerHeader->CurrField);
+    /* This seems to be set based on the MVMODE and MVMODE2 syntax. */
+    /* This is a hack.  Probably will need refining. */
+    if ((VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE) ||
+            (VC1_MVMODE_INTENSCOMP == picLayerHeader->MVMODE2))
+    {
+        pic_parms->picture_fields.bits.intensity_compensation = 1;
+    }
+    else
+    {
+        pic_parms->picture_fields.bits.intensity_compensation = picLayerHeader->INTCOMP;
+    }
+
+    /* Lets store the raw-mode BP bits. */
+    pic_parms->raw_coding.value = 0;
+    pic_parms->raw_coding.flags.mv_type_mb = picLayerHeader->raw_MVTYPEMB;
+    pic_parms->raw_coding.flags.direct_mb = picLayerHeader->raw_DIRECTMB;
+    pic_parms->raw_coding.flags.skip_mb = picLayerHeader->raw_SKIPMB;
+    pic_parms->raw_coding.flags.field_tx = picLayerHeader->raw_FIELDTX;
+    pic_parms->raw_coding.flags.forward_mb = picLayerHeader->raw_FORWARDMB;
+    pic_parms->raw_coding.flags.ac_pred = picLayerHeader->raw_ACPRED;
+    pic_parms->raw_coding.flags.overflags = picLayerHeader->raw_OVERFLAGS;
+
+    /* imode 1/0 indicates bitmap presence in Pic Hdr. */
+    pic_parms->bitplane_present.value = 0;
+
+    pic_parms->bitplane_present.flags.bp_mv_type_mb =
+        pic_parms->raw_coding.flags.mv_type_mb ? 1 :
+        (picLayerHeader->MVTYPEMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_direct_mb =
+        pic_parms->raw_coding.flags.direct_mb ? 1 :
+        (picLayerHeader->DIRECTMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_skip_mb =
+        pic_parms->raw_coding.flags.skip_mb ? 1 :
+        (picLayerHeader->SKIPMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_field_tx =
+        pic_parms->raw_coding.flags.field_tx ? 1 :
+        (picLayerHeader->FIELDTX.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_forward_mb =
+        pic_parms->raw_coding.flags.forward_mb ? 1 :
+        (picLayerHeader->FORWARDMB.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_ac_pred =
+        pic_parms->raw_coding.flags.ac_pred ? 1 :
+        (picLayerHeader->ACPRED.imode ? 1: 0);
+
+    pic_parms->bitplane_present.flags.bp_overflags =
+        pic_parms->raw_coding.flags.overflags ? 1 :
+        (picLayerHeader->OVERFLAGS.imode ? 1: 0);
+
+    pic_parms->reference_fields.value = 0;
+    pic_parms->reference_fields.bits.reference_distance_flag =
+        seqLayerHeader->REFDIST_FLAG;
+
+    pic_parms->reference_fields.bits.reference_distance =
+        seqLayerHeader->REFDIST;
+
+    pic_parms->reference_fields.bits.num_reference_pictures =
+        picLayerHeader->NUMREF;
+
+    pic_parms->reference_fields.bits.reference_field_pic_indicator =
+        picLayerHeader->REFFIELD;
+
+    pic_parms->mv_fields.value = 0;
+    pic_parms->mv_fields.bits.mv_mode = picLayerHeader->MVMODE;
+    pic_parms->mv_fields.bits.mv_mode2 = picLayerHeader->MVMODE2;
+
+    pic_parms->mv_fields.bits.mv_table = picLayerHeader->MVTAB;
+    pic_parms->mv_fields.bits.two_mv_block_pattern_table = picLayerHeader->MV2BPTAB;
+    pic_parms->mv_fields.bits.four_mv_switch = picLayerHeader->MV4SWITCH;
+    pic_parms->mv_fields.bits.four_mv_block_pattern_table = picLayerHeader->MV4BPTAB;
+    pic_parms->mv_fields.bits.extended_mv_flag = seqLayerHeader->EXTENDED_MV;
+    pic_parms->mv_fields.bits.extended_mv_range = picLayerHeader->MVRANGE;
+    pic_parms->mv_fields.bits.extended_dmv_flag = seqLayerHeader->EXTENDED_DMV;
+    pic_parms->mv_fields.bits.extended_dmv_range = picLayerHeader->DMVRANGE;
+
+    pic_parms->pic_quantizer_fields.value = 0;
+    pic_parms->pic_quantizer_fields.bits.dquant = seqLayerHeader->DQUANT;
+    pic_parms->pic_quantizer_fields.bits.quantizer = seqLayerHeader->QUANTIZER;
+    pic_parms->pic_quantizer_fields.bits.half_qp = picLayerHeader->HALFQP;
+    pic_parms->pic_quantizer_fields.bits.pic_quantizer_scale = picLayerHeader->PQUANT;
+    pic_parms->pic_quantizer_fields.bits.pic_quantizer_type = picLayerHeader->UniformQuant;
+    pic_parms->pic_quantizer_fields.bits.dq_frame = picLayerHeader->DQUANTFRM;
+    pic_parms->pic_quantizer_fields.bits.dq_profile = picLayerHeader->DQPROFILE;
+    pic_parms->pic_quantizer_fields.bits.dq_sb_edge = picLayerHeader->DQSBEDGE;
+    pic_parms->pic_quantizer_fields.bits.dq_db_edge = picLayerHeader->DQDBEDGE;
+    pic_parms->pic_quantizer_fields.bits.dq_binary_level = picLayerHeader->DQBILEVEL;
+    pic_parms->pic_quantizer_fields.bits.alt_pic_quantizer = picLayerHeader->ALTPQUANT;
+
+    pic_parms->transform_fields.value = 0;
+    pic_parms->transform_fields.bits.variable_sized_transform_flag =
+        seqLayerHeader->VSTRANSFORM;
+
+    pic_parms->transform_fields.bits.mb_level_transform_type_flag = picLayerHeader->TTMBF;
+    pic_parms->transform_fields.bits.frame_level_transform_type = picLayerHeader->TTFRM;
+
+    pic_parms->transform_fields.bits.transform_ac_codingset_idx1 =
+        (picLayerHeader->TRANSACFRM > 0) ? picLayerHeader->TRANSACFRM - 1 : 0;
+
+    pic_parms->transform_fields.bits.transform_ac_codingset_idx2 =
+        (picLayerHeader->TRANSACFRM2 > 0) ? picLayerHeader->TRANSACFRM2 - 1 : 0;
+
+    pic_parms->transform_fields.bits.intra_transform_dc_table = picLayerHeader->TRANSDCTAB;
+    pic_parms->sequence_fields.bits.profile = seqLayerHeader->PROFILE;
+}
+
+
+static void vbp_pack_slice_data_vc1(
+    vbp_context *pcontext,
+    int index,
+    vbp_picture_data_vc1* pic_data)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 slice_size = cxt->list.data[index].edpos - cxt->list.data[index].stpos;
+    uint32 bit;
+    uint32 byte;
+    uint8 is_emul;
+    viddec_pm_get_au_pos(cxt, &bit, &byte, &is_emul);
+
+    vbp_slice_data_vc1 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+    VASliceParameterBufferVC1 *slc_parms = &(slc_data->slc_parms);
+
+    /*uint32 data_offset = byte - cxt->list.data[index].stpos;*/
+
+    slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos;
+    slc_data->slice_size = slice_size;
+    slc_data->slice_offset = 0;
+
+    slc_parms->slice_data_size = slc_data->slice_size;
+    slc_parms->slice_data_offset = 0;
+
+    /* fix this.  we need to be able to handle partial slices. */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    slc_parms->macroblock_offset = bit + byte * 8;
+
+    /* fix this.  we need o get the slice_vertical_position from the code */
+    slc_parms->slice_vertical_position = pic_data->num_slices;
+
+    pic_data->num_slices++;
+}
+
+/**
+ * process parsing result
+ */
+uint32_t vbp_process_parsing_result_vc1(vbp_context *pcontext, int index)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint32 error = VBP_OK;
+
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)cxt->codec_data;
+    if (parser->start_code != VC1_SC_FRM && 
+        parser->start_code != VC1_SC_FLD &&
+        parser->start_code != VC1_SC_SLC)
+    {
+        /* only handle frame data, field data and slice data here
+         */
+        return VBP_OK;
+    }
+    vbp_data_vc1 *query_data = (vbp_data_vc1 *)pcontext->query_data;
+
+    if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+    {
+        query_data->num_pictures++;
+    }
+
+    if (query_data->num_pictures > MAX_NUM_PICTURES)
+    {
+        ETRACE("Num of pictures per sample buffer exceeds the limit (%d).", MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    if (query_data->num_pictures == 0)
+    {
+        ETRACE("Unexpected num of pictures.");
+        return VBP_DATA;
+    }
+
+    /* start packing data */
+    int picture_index = query_data->num_pictures - 1;
+    vbp_picture_data_vc1* pic_data = &(query_data->pic_data[picture_index]);
+
+    if (parser->start_code == VC1_SC_FRM || parser->start_code == VC1_SC_FLD)
+    {
+        /* setup picture parameter first*/
+        vbp_pack_picture_params_vc1(pcontext, index, pic_data);
+
+        /* setup bitplane after setting up picture parameter (so that bitplane_present is updated) */
+        error = vbp_pack_bitplanes_vc1(pcontext, index, pic_data);
+        if (VBP_OK != error)
+        {
+            ETRACE("Failed to pack bitplane.");
+            return error;
+        }
+
+    }
+
+    /* Always pack slice parameter. The first macroblock in the picture CANNOT
+     * be preceeded by a slice header, so we will have first slice parsed always.
+     *
+     */
+
+    if (pic_data->num_slices >= MAX_NUM_SLICES)
+    {
+        ETRACE("Num of slices exceeds the limit (%d).", MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+
+    /* set up slice parameter */
+    vbp_pack_slice_data_vc1(pcontext, index, pic_data);
+
+
+    return VBP_OK;
+}
diff --git a/mixvbp/vbp_manager/vbp_vc1_parser.h b/mixvbp/vbp_manager/vbp_vc1_parser.h
new file mode 100755
index 0000000..aec7a56
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vc1_parser.h
@@ -0,0 +1,70 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2009 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef VBP_VC1_PARSER_H
+#define VBP_VC1_PARSER_H
+
+
+/*
+ * setup parser's entry pointer
+ */
+uint32 vbp_init_parser_entries_vc1(vbp_context *pcontext);
+
+/*
+ * allocate query data structure - vbp_vc1_data
+ */
+uint32 vbp_allocate_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * free query data structure
+ */
+uint32 vbp_free_query_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream configuration data
+ */
+uint32 vbp_parse_init_data_vc1(vbp_context *pcontext);
+
+/*
+ * parse bitstream start code and fill the viddec_input_buffer_t list.
+ * WMV has no start code so the whole buffer will be treated as a single frame.
+ * For VC1 progressive, if start code is not found, the whole buffer will be treated as a
+ * single frame as well.
+ * For VC1 interlace, the first field is not start code prefixed, but the second field
+ * is always start code prefixed.
+ */
+uint32 vbp_parse_start_code_vc1(vbp_context *pcontext);
+
+/*
+ * processe parsing result
+ */
+uint32 vbp_process_parsing_result_vc1(vbp_context *pcontext, int list_index);
+
+/*
+ * populate query data structure
+ */
+uint32 vbp_populate_query_data_vc1(vbp_context *pcontext);
+
+
+#endif /*VBP_VC1_PARSER_H*/
diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.c b/mixvbp/vbp_manager/vbp_vp8_parser.c
new file mode 100755
index 0000000..72dcfa9
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vp8_parser.c
@@ -0,0 +1,532 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include <dlfcn.h>
+
+#include "vp8.h"
+#include "vbp_loader.h"
+#include "vbp_utils.h"
+#include "vbp_vp8_parser.h"
+
+uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext)
+{
+    if (NULL == pcontext->parser_ops)
+    {
+        return VBP_PARM;
+    }
+
+    pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_vp8_init");
+    if (NULL == pcontext->parser_ops->init)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->parse_sc = NULL;
+
+    pcontext->parser_ops->parse_syntax = dlsym(pcontext->fd_parser, "viddec_vp8_parse");
+    if (NULL == pcontext->parser_ops->parse_syntax)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->get_cxt_size = dlsym(pcontext->fd_parser, "viddec_vp8_get_context_size");
+    if (NULL == pcontext->parser_ops->get_cxt_size)
+    {
+        ETRACE ("Failed to set entry point." );
+        return VBP_LOAD;
+    }
+
+    pcontext->parser_ops->is_wkld_done = NULL;
+
+    /* entry point not needed */
+    pcontext->parser_ops->is_frame_start = NULL;
+
+    pcontext->parser_ops->flush = NULL;
+
+    return VBP_OK;
+}
+
+uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext)
+{
+    if (NULL != pcontext->query_data)
+    {
+        return VBP_PARM;
+    }
+
+    vbp_data_vp8 *query_data = vbp_malloc_set0(vbp_data_vp8, 1);
+    if (NULL == query_data)
+    {
+        goto cleanup;
+    }
+
+    /* assign the pointer */
+    pcontext->query_data = (void *)query_data;
+
+    query_data->pic_data = vbp_malloc_set0(vbp_picture_data_vp8, VP8_MAX_NUM_PICTURES);
+    if (NULL == query_data->pic_data)
+    {
+        goto cleanup;
+    }
+
+    int i = 0;
+    for (i = 0; i < VP8_MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].pic_parms = vbp_malloc_set0(VAPictureParameterBufferVP8, 1);
+        if (NULL == query_data->pic_data[i].pic_parms)
+        {
+            goto cleanup;
+        }
+        query_data->pic_data[i].num_slices = 0;
+        query_data->pic_data[i].slc_data = vbp_malloc_set0(vbp_slice_data_vp8, VP8_MAX_NUM_SLICES);
+        if (NULL == query_data->pic_data[i].slc_data)
+        {
+            goto cleanup;
+        }
+    }
+
+    query_data->codec_data = vbp_malloc_set0(vbp_codec_data_vp8, 1);
+    if (NULL == query_data->codec_data)
+    {
+        goto cleanup;
+    }
+
+    query_data->prob_data = vbp_malloc_set0(VAProbabilityDataBufferVP8, 1);
+    if (NULL == query_data->prob_data)
+    {
+        goto cleanup;
+    }
+
+    query_data->IQ_matrix_buf = vbp_malloc_set0(VAIQMatrixBufferVP8, 1);
+    if (NULL == query_data->IQ_matrix_buf)
+    {
+        goto cleanup;
+    }
+
+    pcontext->parser_private = NULL;
+
+    return VBP_OK;
+
+cleanup:
+    vbp_free_query_data_vp8(pcontext);
+
+    return VBP_MEM;
+}
+
+uint32 vbp_free_query_data_vp8(vbp_context *pcontext)
+{
+    if (NULL == pcontext->query_data)
+    {
+        return VBP_OK;
+    }
+
+    vbp_data_vp8 *query_data = (vbp_data_vp8 *)pcontext->query_data;
+    if (query_data->pic_data)
+    {
+        int i = 0;
+        for (i = 0; i < VP8_MAX_NUM_PICTURES; i++)
+        {
+            if (query_data->pic_data[i].pic_parms)
+            {
+                free(query_data->pic_data[i].pic_parms);
+                query_data->pic_data[i].pic_parms = NULL;
+            }
+            if (query_data->pic_data[i].slc_data)
+            {
+                free(query_data->pic_data[i].slc_data);
+                query_data->pic_data[i].slc_data = NULL;
+            }
+        }
+        free(query_data->pic_data);
+        query_data->pic_data = NULL;
+    }
+
+    if (query_data->codec_data)
+    {
+        free(query_data->codec_data);
+        query_data->codec_data = NULL;
+    }
+
+    if (query_data->prob_data)
+    {
+        free(query_data->prob_data);
+        query_data->prob_data = NULL;
+    }
+
+    if (query_data->IQ_matrix_buf)
+    {
+        free(query_data->IQ_matrix_buf);
+        query_data->IQ_matrix_buf = NULL;
+    }
+
+    free(query_data);
+    pcontext->query_data = NULL;
+
+    return VBP_OK;
+}
+
+
+/**
+* parse decoder configuration data
+*/
+uint32 vbp_parse_init_data_vp8(vbp_context* pcontext)
+{
+    // could never be there
+    return VBP_OK;
+}
+
+uint32 vbp_parse_start_code_vp8(vbp_context *pcontext)
+{
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+    uint8 *buf = cxt->parse_cubby.buf;
+    uint32 length = cxt->parse_cubby.size;
+    if (length < 3)
+    {
+        return VBP_DATA;
+    }
+
+    // check whether it is a key frame
+    if ((length >= 10) && !(buf[0] & 0x01))
+    {
+        uint8 *c = buf + 3;
+
+        // check start code
+        if ((c[0] != 0x9d) || (c[1] != 0x01) || (c[2] != 0x2a))
+        {
+            return VBP_PARM;
+        }
+    }
+
+    // ugly behavior
+    cxt->list.num_items = 1;
+
+    vbp_data_vp8 *query_data = (vbp_data_vp8*)pcontext->query_data;
+    query_data->num_pictures = 0;
+
+    return VBP_OK;
+}
+
+/**
+*
+* process parsing result after a NAL unit is parsed
+*
+*/
+uint32 vbp_process_parsing_result_vp8( vbp_context *pcontext, int i)
+{
+    vp8_viddec_parser *parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data;
+    switch (parser->info.frame_tag.frame_type)
+    {
+    case KEY_FRAME:
+        //ITRACE("This is a key frame.");
+        parser->info.decoded_frame_number++;
+        break;
+    case INTER_FRAME:
+        //ITRACE("This is an inter frame.");
+        parser->info.decoded_frame_number++;
+        break;
+    case SKIPPED_FRAME:
+        WTRACE("This is skipped frame. We have done nothing.");
+        break;
+    default:
+        ETRACE("Unknown frame type %d", parser->info.frame_tag.frame_type);
+        break;
+    }
+
+    //ITRACE("Decoded frame ID = %d", parser->info.decoded_frame_number);
+
+    return VBP_OK;
+}
+
+static void vbp_add_quantization_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    VAIQMatrixBufferVP8 *IQ_buf = query_data->IQ_matrix_buf;
+
+    int i = 0;
+    if (pi->Segmentation.Enabled)
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta)
+            {
+                IQ_buf->quantization_index[i][0] = pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i];
+            }
+            else
+            {
+                int temp = pi->Quantization.Y1_AC + pi->Segmentation.FeatureData[MB_LVL_ALT_Q][i];
+                IQ_buf->quantization_index[i][0] = (temp >= 0) ? ((temp <= MAX_QINDEX) ? temp : MAX_QINDEX) : 0;
+            }
+        }
+    }
+    else
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            IQ_buf->quantization_index[i][0] = pi->Quantization.Y1_AC;
+        }
+    }
+
+    for (i = 0; i < MAX_MB_SEGMENTS; i++)
+    {
+        IQ_buf->quantization_index[i][1] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y1_DC_Delta;
+        IQ_buf->quantization_index[i][2] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_DC_Delta;
+        IQ_buf->quantization_index[i][3] = IQ_buf->quantization_index[i][0] + pi->Quantization.Y2_AC_Delta;
+        IQ_buf->quantization_index[i][4] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_DC_Delta;
+        IQ_buf->quantization_index[i][5] = IQ_buf->quantization_index[i][0] + pi->Quantization.UV_AC_Delta;
+    }
+}
+
+static void vbp_add_probs_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    FrameContextData *fc = &(parser->info.FrameContext);
+    VAProbabilityDataBufferVP8 *prob_data = query_data->prob_data;
+
+    /* DCT coefficients probability */
+    memcpy(prob_data->dct_coeff_probs, fc->DCT_Coefficients, 4*8*3*11*sizeof(uint8_t));
+}
+
+static void vbp_set_codec_data_vp8(vp8_viddec_parser *parser, vbp_codec_data_vp8* codec_data)
+{
+    vp8_Info *pi = &(parser->info);
+
+    codec_data->frame_type = pi->frame_tag.frame_type;
+    codec_data->version_num = pi->frame_tag.version;
+    codec_data->show_frame = pi->frame_tag.show_frame;
+
+    codec_data->frame_width = ((pi->width + 15) / 16) * 16;
+    codec_data->frame_height = ((pi->height + 15) / 16) * 16;
+
+    codec_data->crop_top = 0;
+    codec_data->crop_bottom = codec_data->frame_height - pi->height;
+    codec_data->crop_left = 0;
+    codec_data->crop_right = codec_data->frame_width - pi->width;
+
+    codec_data->refresh_alt_frame = pi->refresh_af;
+    codec_data->refresh_golden_frame = pi->refresh_gf;
+    codec_data->refresh_last_frame = pi->refresh_lf;
+
+    codec_data->golden_copied = pi->golden_copied;
+    codec_data->altref_copied = pi->altref_copied;
+}
+
+static uint32_t vbp_add_pic_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    query_data->num_pictures++;
+
+    if (query_data->num_pictures > 1)
+    {
+        ETRACE("Num of pictures (%d) per sample buffer exceeds the limit %d.", query_data->num_pictures, VP8_MAX_NUM_PICTURES);
+        return VBP_DATA;
+    }
+
+    int i = 0;
+    int pic_data_index = query_data->num_pictures - 1;
+    if (pic_data_index < 0)
+    {
+        ETRACE("MB address does not start from 0!");
+        return VBP_DATA;
+    }
+
+    vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_data_index]);
+    VAPictureParameterBufferVP8 *pic_parms = pic_data->pic_parms;
+
+    pic_parms->frame_width = pi->width;
+    pic_parms->frame_height = pi->height;
+
+    pic_parms->pic_fields.value = 0;
+    pic_parms->pic_fields.bits.key_frame = pi->frame_tag.frame_type;
+    pic_parms->pic_fields.bits.version = pi->frame_tag.version;
+
+    /* Segmentation */
+    pic_parms->pic_fields.bits.segmentation_enabled = pi->Segmentation.Enabled;
+    pic_parms->pic_fields.bits.update_mb_segmentation_map = pi->Segmentation.UpdateMap;
+    pic_parms->pic_fields.bits.update_segment_feature_data = pi->Segmentation.UpdateData;
+    memcpy(pic_parms->mb_segment_tree_probs, pi->Segmentation.TreeProbs, sizeof(unsigned char) * MB_FEATURE_TREE_PROBS);
+
+    /* Loop filter data */
+    pic_parms->pic_fields.bits.filter_type = pi->LoopFilter.Type;
+    pic_parms->pic_fields.bits.sharpness_level = pi->LoopFilter.Sharpness;
+    pic_parms->pic_fields.bits.loop_filter_adj_enable = pi->LoopFilter.DeltaEnabled;
+    pic_parms->pic_fields.bits.mode_ref_lf_delta_update = pi->LoopFilter.DeltaUpdate;
+
+    int baseline_filter_level[MAX_MB_SEGMENTS];
+    if (pi->Segmentation.Enabled)
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            if (SEGMENT_ABSDATA == pi->Segmentation.AbsDelta)
+            {
+                baseline_filter_level[i] = pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i];
+            }
+            else
+            {
+                baseline_filter_level[i] = pi->LoopFilter.Level + pi->Segmentation.FeatureData[MB_LVL_ALT_LF][i];
+                baseline_filter_level[i] = (baseline_filter_level[i] >= 0) ? ((baseline_filter_level[i] <= MAX_LOOP_FILTER) ? baseline_filter_level[i] : MAX_LOOP_FILTER) : 0;  /* Clamp to valid range */
+            }
+        }
+    }
+    else
+    {
+        for (i = 0; i < MAX_MB_SEGMENTS; i++)
+        {
+            baseline_filter_level[i] = pi->LoopFilter.Level;
+        }
+    }
+    for (i = 0; i < MAX_MB_SEGMENTS; i++)
+    {
+        pic_parms->loop_filter_level[i] = baseline_filter_level[i];
+    }
+    if ((pic_parms->pic_fields.bits.version == 0) || (pic_parms->pic_fields.bits.version == 1))
+    {
+        pic_parms->pic_fields.bits.loop_filter_disable = pic_parms->loop_filter_level[0] > 0 ? true : false;
+    }
+    memcpy(pic_parms->loop_filter_deltas_ref_frame, pi->LoopFilter.DeltasRef, sizeof(char) * MAX_REF_LF_DELTAS);
+    memcpy(pic_parms->loop_filter_deltas_mode, pi->LoopFilter.DeltasMode, sizeof(char) * MAX_MODE_LF_DELTAS);
+
+    pic_parms->pic_fields.bits.sign_bias_golden = pi->sign_bias_golden;
+    pic_parms->pic_fields.bits.sign_bias_alternate = pi->sign_bias_alternate;
+
+    pic_parms->pic_fields.bits.mb_no_coeff_skip = pi->mb_no_coeff_skip;
+    pic_parms->pic_fields.bits.mb_skip_coeff = pi->mb_skip_coeff;
+
+    pic_parms->prob_skip_false = pi->prob_skip_false;
+    pic_parms->prob_intra = pi->prob_intra;
+    pic_parms->prob_last = pi->prob_lf;
+    pic_parms->prob_gf = pi->prob_gf;
+
+    FrameContextData *fc = &(parser->info.FrameContext);
+    memcpy(pic_parms->y_mode_probs, fc->Y_Mode_Prob, sizeof(unsigned char) * 4);
+    memcpy(pic_parms->uv_mode_probs, fc->UV_Mode_Prob, sizeof(unsigned char) * 3);
+    /* Motion vector context */
+    for (i = 0; i < 2; i++)
+    {
+        memcpy(pic_parms->mv_probs[i], fc->MVContext[i], sizeof(unsigned char) * 19);
+    }
+
+    /* Bool coder */
+    pic_parms->bool_coder_ctx.range = pi->bool_coder.range;
+    pic_parms->bool_coder_ctx.value = (pi->bool_coder.value >> 24) & 0xFF;
+    pic_parms->bool_coder_ctx.count = pi->bool_coder.count;
+
+    //pic_parms->current_picture = VA_INVALID_SURFACE;
+    pic_parms->last_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->golden_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->alt_ref_frame = VA_INVALID_SURFACE;
+    pic_parms->out_of_loop_frame = VA_INVALID_SURFACE; //Reserved for future use
+
+    /* specify the slice number */
+    pic_data->num_slices = 0;
+
+    return VBP_OK;
+}
+
+static uint32_t vbp_add_slice_data_vp8(vp8_viddec_parser *parser, vbp_data_vp8 *query_data)
+{
+    vp8_Info *pi = &(parser->info);
+    uint32_t pic_index = query_data->num_pictures - 1;
+    uint32_t part_index = 0;
+    if (pic_index < 0)
+    {
+        ETRACE("Invalid picture data index.");
+        return VBP_DATA;
+    }
+
+    vbp_picture_data_vp8 *pic_data = &(query_data->pic_data[pic_index]);
+    vbp_slice_data_vp8 *slc_data = &(pic_data->slc_data[pic_data->num_slices]);
+
+    slc_data->buffer_addr = pi->source;
+    slc_data->slice_offset = 0;
+    slc_data->slice_size = pi->source_sz;
+
+    VASliceParameterBufferVP8 *slc_parms = &(slc_data->slc_parms);
+    /* number of bytes in the slice data buffer for this slice */
+    slc_parms->slice_data_size = slc_data->slice_size;
+
+    /* the offset to the first byte of slice data */
+    slc_parms->slice_data_offset = 0;
+
+    /* see VA_SLICE_DATA_FLAG_XXX definitions */
+    slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+
+    /* the offset to the first bit of MB from the first byte of slice data */
+    slc_parms->macroblock_offset = pi->header_bits;
+
+    /* Token Partitions */
+    slc_parms->num_of_partitions = pi->partition_count;
+    slc_parms->partition_size[0] = pi->frame_tag.first_part_size;
+    for (part_index = 1; part_index < 9; part_index++)
+    {
+        slc_parms->partition_size[part_index] = pi->partition_size[part_index - 1];
+    }
+
+    pic_data->num_slices++;
+    if (pic_data->num_slices > VP8_MAX_NUM_SLICES) {
+        ETRACE("Number of slices (%d) per picture exceeds the limit (%d).", pic_data->num_slices, VP8_MAX_NUM_SLICES);
+        return VBP_DATA;
+    }
+    return VBP_OK;
+}
+
+/*
+*
+* fill query data structure after sample buffer is parsed
+*
+*/
+uint32 vbp_populate_query_data_vp8(vbp_context *pcontext)
+{
+    int32_t error = VBP_OK;
+
+    vbp_data_vp8 *query_data = NULL;
+    vp8_viddec_parser *parser = NULL;
+
+    parser = (vp8_viddec_parser *)pcontext->parser_cxt->codec_data;
+    query_data = (vbp_data_vp8 *)pcontext->query_data;
+
+    /* buffer number */
+    query_data->buf_number = buffer_counter;
+
+    /* Populate picture data */
+    error = vbp_add_pic_data_vp8(parser, query_data);
+
+    /* Populate slice data */
+    if (error == VBP_OK)
+    {
+        error = vbp_add_slice_data_vp8(parser, query_data);
+        if (error != VBP_OK)
+            return error;
+    }
+
+    /* Populate codec data */
+    vbp_set_codec_data_vp8(parser, query_data->codec_data);
+
+    /* Populate probability table */
+    vbp_add_probs_data_vp8(parser, query_data);
+
+    /* Populate quantization */
+    vbp_add_quantization_data_vp8(parser, query_data);
+
+    return VBP_OK;
+}
diff --git a/mixvbp/vbp_manager/vbp_vp8_parser.h b/mixvbp/vbp_manager/vbp_vp8_parser.h
new file mode 100755
index 0000000..3b6407e
--- /dev/null
+++ b/mixvbp/vbp_manager/vbp_vp8_parser.h
@@ -0,0 +1,67 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+
+#ifndef VBP_VP8_PARSER_H
+#define VBP_VP8_PARSER_H
+
+/*
+ * setup parser's entry points
+ */
+uint32 vbp_init_parser_entries_vp8(vbp_context *pcontext);
+
+/*
+ * allocate query data
+ */
+uint32 vbp_allocate_query_data_vp8(vbp_context *pcontext);
+
+/*
+ * free query data
+ */
+uint32 vbp_free_query_data_vp8(vbp_context *pcontext);
+
+/*
+ * parse initialization data
+ */
+uint32 vbp_parse_init_data_vp8(vbp_context *pcontext);
+
+/*
+ * parse start code. Only support lenght prefixed mode. Start
+ * code prefixed is not supported.
+ */
+uint32 vbp_parse_start_code_vp8(vbp_context *pcontext);
+
+/*
+ * process parsing result
+ */
+uint32 vbp_process_parsing_result_vp8(vbp_context *pcontext, int list_index);
+
+/*
+ * query parsing result
+ */
+uint32 vbp_populate_query_data_vp8(vbp_context *pcontext);
+
+
+
+#endif /*VBP_VP8_PARSER_H*/
diff --git a/mixvbp/vbp_manager/viddec_parse_sc.c b/mixvbp/vbp_manager/viddec_parse_sc.c
new file mode 100755
index 0000000..b3f8d0b
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_parse_sc.c
@@ -0,0 +1,218 @@
+#include "viddec_pm_parse.h"
+
+#ifndef MFDBIGENDIAN
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+    uint8_t *ptr;
+    uint32_t data_left=0, phase = 0, ret = 0;
+    uint32_t single_byte_table[3][2] = {{1, 0}, {2, 0}, {2, 3}};
+    viddec_sc_parse_cubby_cxt_t *cxt;
+    /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+       Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+       if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+       we are looking for. Its incremented to 4 once we see a byte after this pattern */
+    cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+    data_left = cxt->size;
+    ptr = cxt->buf;
+    phase = cxt->phase;
+    cxt->sc_end_pos = -1;
+    pcxt=pcxt;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) && (phase < 3))
+    {
+        /* Check if we are 16 bytes aligned & phase=0 & more than 16 bytes left,
+           if thats the case we can check work at a time instead of byte */
+
+        if (((((uint32_t)ptr) & 0xF) == 0) && (phase == 0) && (data_left > 0xF))
+        {
+            // 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01 00      -- check 16 bytes at one time
+            // 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ?? 00 ??      -- if no 00 at byte position: 15,13,11,09,07,05,03,01
+            // it is impossible to have 0x010000 at these 16 bytes.
+            // so we cound drop 16 bytes one time (increase ptr, decrease data_left and keep phase = 0)
+            __asm__(
+            //Data input
+            "movl %1, %%ecx\n\t"                   //ptr-->ecx
+            "movl %0, %%eax\n\t"                   //data_left-->eax
+
+            //Main compare loop
+            "MATCH_8_ZERO:\n\t"
+            "pxor %%xmm0,%%xmm0\n\t"               //0 --> xmm0
+            "pcmpeqb (%%ecx),%%xmm0\n\t"           //uint128_data[ptr] eq xmm0 --> xmm0 , For each byte do calculation,  (byte == 0x00)?0xFF:0x00
+            "pmovmskb %%xmm0, %%edx\n\t"           //xmm0(128)-->edx(32), edx[0]=xmm0[7], edx[1]=xmm0[15], ... , edx[15]=xmm0[127], edx[31-16]=0x0000
+            "test $0xAAAA, %%edx\n\t"              //edx& 1010 1010 1010 1010b
+            "jnz DATA_RET\n\t"                     //Not equal to zero means that at least one byte is 0x00.
+
+            "PREPARE_NEXT_MATCH:\n\t"
+            "add $0x10, %%ecx\n\t"                 //16 + ecx --> ecx
+            "sub $0x10, %%eax\n\t"                 //eax-16 --> eax
+            "cmp $0x10, %%eax\n\t"                 //eax >= 16?
+            "jge MATCH_8_ZERO\n\t"                 //search next 16 bytes
+
+            "DATA_RET:\n\t"
+            "movl %%ecx, %1\n\t"                   //ecx --> ptr
+            "movl %%eax, %0\n\t"                   //eax --> data_left
+            : "+m"(data_left), "+m"(ptr)           //data_left --> eax, ptr -> ecx
+            :
+            :"eax", "ecx", "edx", "xmm0"
+            );
+
+            if (data_left <= 0)
+            {
+                 break;
+            }
+        }
+
+        //check byte one by one
+        //  (*ptr)    0       1      >=2
+        // phase=0    1       0      0
+        // phase=1    2       0      0
+        // phase=2    2       3      0
+        if (*ptr >= 2)
+        {
+            phase = 0;
+        }
+        else
+        {
+            phase = single_byte_table[phase][*ptr];
+        }
+        ptr ++;
+        data_left --;
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+        cxt->sc_end_pos = cxt->size - data_left;
+        state->next_sc = cxt->buf[cxt->sc_end_pos];
+        state->second_scprfx_length = 3;
+        phase++;
+        ret = 1;
+    }
+    cxt->phase = phase;
+    /* Return SC found only if phase is 4, else always success */
+    return ret;
+}
+
+#else
+#define FIRST_STARTCODE_BYTE        0x00
+#define SECOND_STARTCODE_BYTE       0x00
+#define THIRD_STARTCODE_BYTE        0x01
+
+/* BIG ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* LITTLE ENDIAN: Must be the second and fourth byte of the bytestream for this to work */
+/* these are little-endian defines */
+#define SC_BYTE_MASK0               0x00ff0000  /* little-endian */
+#define SC_BYTE_MASK1               0x000000ff  /* little-endian */
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+   The conext is updated with current phase and sc_code position in the buffer.
+*/
+uint32_t viddec_parse_sc(void *in, void *pcxt, void *sc_state)
+{
+    uint8_t *ptr;
+    uint32_t size;
+    uint32_t data_left=0, phase = 0, ret = 0;
+    viddec_sc_parse_cubby_cxt_t *cxt;
+    /* What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+       Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+       if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+       we are looking for. Its incremented to 4 once we see a byte after this pattern */
+    cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+    size = 0;
+    data_left = cxt->size;
+    ptr = cxt->buf;
+    phase = cxt->phase;
+    cxt->sc_end_pos = -1;
+    pcxt=pcxt;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) &&(phase < 3))
+    {
+        /* Check if we are byte aligned & phase=0, if thats the case we can check
+           work at a time instead of byte*/
+        if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+        {
+            while (data_left > 3)
+            {
+                uint32_t data;
+                char mask1 = 0, mask2=0;
+
+                data = *((uint32_t *)ptr);
+                mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+                mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+                /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+                   two consecutive zero bytes for a start code pattern */
+                if (mask1 && mask2)
+                {/* Success so skip 4 bytes and start over */
+                    ptr+=4;
+                    size+=4;
+                    data_left-=4;
+                    continue;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+           two zero bytes in the word so we look one byte at a time*/
+        if (data_left > 0)
+        {
+            if (*ptr == FIRST_STARTCODE_BYTE)
+            {/* Phase can be 3 only if third start code byte is found */
+                phase++;
+                ptr++;
+                size++;
+                data_left--;
+                if (phase > 2)
+                {
+                    phase = 2;
+
+                    if ( (((uint32_t)ptr) & 0x3) == 0 )
+                    {
+                        while ( data_left > 3 )
+                        {
+                            if (*((uint32_t *)ptr) != 0)
+                            {
+                                break;
+                            }
+                            ptr+=4;
+                            size+=4;
+                            data_left-=4;
+                        }
+                    }
+                }
+            }
+            else
+            {
+                if ((*ptr == THIRD_STARTCODE_BYTE) && (phase == 2))
+                {/* Match for start code so update context with byte position */
+                    phase = 3;
+                    cxt->sc_end_pos = size;
+                }
+                else
+                {
+                    phase = 0;
+                }
+                ptr++;
+                size++;
+                data_left--;
+            }
+        }
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+        cxt->sc_end_pos++;
+        state->next_sc = cxt->buf[cxt->sc_end_pos];
+        state->second_scprfx_length = 3;
+        phase++;
+        ret = 1;
+    }
+    cxt->phase = phase;
+    /* Return SC found only if phase is 4, else always success */
+    return ret;
+}
+#endif
diff --git a/mixvbp/vbp_manager/viddec_pm_parser_ops.c b/mixvbp/vbp_manager/viddec_pm_parser_ops.c
new file mode 100755
index 0000000..6879a6a
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_pm_parser_ops.c
@@ -0,0 +1,97 @@
+#include <stdint.h>
+#include <vbp_common.h>
+#include "viddec_pm.h"
+#include "viddec_parser_ops.h"
+#include "viddec_pm_utils_bstream.h"
+
+int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 1);
+    if (ret == -1)
+    {
+        DEB("FAILURE!!!! getbits returned %d\n", ret);
+    }
+
+    return ret;
+}
+
+int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_peekbits(&(cxt->getbits), data, num_bits, 0);
+    return ret;
+}
+
+int32_t viddec_pm_skip_bits(void *parent, uint32_t num_bits)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_skipbits(&(cxt->getbits), num_bits);
+    return ret;
+}
+
+int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, uint8_t *is_emul)
+{
+    int32_t ret = 1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    viddec_pm_utils_skip_if_current_is_emulation(&(cxt->getbits));
+    viddec_pm_utils_bstream_get_au_offsets(&(cxt->getbits), bit, byte, is_emul);
+
+    return ret;
+
+}
+
+int32_t viddec_pm_is_nomoredata(void *parent)
+{
+    int32_t ret=0;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_nomorerbspdata(&(cxt->getbits));
+    return ret;
+}
+
+uint32_t viddec_pm_get_cur_byte(void *parent, uint8_t *byte)
+{
+    int32_t ret=-1;
+    viddec_pm_cxt_t *cxt;
+
+    cxt = (viddec_pm_cxt_t *)parent;
+    ret = viddec_pm_utils_bstream_get_current_byte(&(cxt->getbits), byte);
+    return ret;
+}
+
+void viddec_pm_set_next_frame_error_on_eos(void *parent, uint32_t error)
+{
+    viddec_pm_cxt_t *cxt;
+    cxt = (viddec_pm_cxt_t *)parent;
+    cxt->next_workload_error_eos = error;
+}
+
+void viddec_pm_set_late_frame_detect(void *parent)
+{
+    viddec_pm_cxt_t *cxt;
+    cxt = (viddec_pm_cxt_t *)parent;
+    cxt->late_frame_detect = true;
+}
+
+int32_t viddec_pm_append_workitem(void *parent, viddec_workload_item_t *item, uint32_t next)
+{
+    return 1;
+}
+
+void viddec_pm_setup_userdata(viddec_workload_item_t *wi)
+{
+    wi=wi;
+}
diff --git a/mixvbp/vbp_manager/viddec_pm_utils_bstream.c b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c
new file mode 100755
index 0000000..6939fef
--- /dev/null
+++ b/mixvbp/vbp_manager/viddec_pm_utils_bstream.c
@@ -0,0 +1,500 @@
+#include <stdint.h>
+#include <vbp_common.h>
+#include "viddec_pm_utils_bstream.h"
+
+/* Internal data structure for calculating required bits. */
+typedef union
+{
+    uint8_t byte[8];
+    uint32_t word[2];
+} viddec_pm_utils_getbits_t;
+
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt);
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+
+/* Bytes left in cubby buffer which were not consumed yet */
+static inline uint32_t viddec_pm_utils_bstream_bytesincubby(viddec_pm_utils_bstream_buf_cxt_t *cxt)
+{
+    return (cxt->buf_end - cxt->buf_index);
+}
+
+/*
+  This function checks to see if we are at the last valid byte for current access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomorerbspdata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    uint32_t data_remaining = 0;
+    uint8_t ret = 0;
+
+    /* How much data is remaining including current byte to be processed.*/
+    data_remaining = cxt->list->total_bytes - (cxt->au_pos + (cxt->bstrm_buf.buf_index - cxt->bstrm_buf.buf_st));
+
+    /* Start code prefix can be 000001 or 0000001. We always only check for 000001.
+       data_reamining should be 1 for 000001, as we don't count sc prefix and 1 represents current byte.
+       data_reamining should be 2 for 00000001, as we don't count sc prefix its current byte and extra 00 as we check for 000001.
+       NOTE: This is used for H264 only.
+    */
+    switch (data_remaining)
+    {
+    case 2:
+        /* If next byte is 0 and its the last byte in access unit */
+        ret = (cxt->bstrm_buf.buf[cxt->bstrm_buf.buf_index+1] == 0x0);
+        break;
+    case 1:
+        /* if the current byte is last byte */
+        ret = 1;
+        break;
+    default:
+        break;
+    }
+    return ret;
+}
+
+#ifndef VBP
+/*
+  This function returns true if cubby buffer has the last byte of access unit.
+*/
+uint8_t viddec_pm_utils_bstream_nomoredata(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    uint32_t last_byte_offset_plus_one=0;
+    uint8_t ret = 0;
+    /* Check to see if the last byte Acces unit offset is the last byte for current access unit.
+     End represents the first invalid byte, so (end - st) will give number of bytes.*/
+    last_byte_offset_plus_one = cxt->au_pos + (cxt->bstrm_buf.buf_end - cxt->bstrm_buf.buf_st);
+    if ((int32_t)last_byte_offset_plus_one >= cxt->list->total_bytes)
+    {
+        ret = 1;
+    }
+    return ret;
+}
+#endif
+
+/* This function initializes scratch buffer, which is used for staging already read data, due to DMA limitations */
+static inline void viddec_pm_utils_bstream_scratch_init(viddec_pm_utils_bstream_scratch_cxt_t *cxt)
+{
+    cxt->st = cxt->size = cxt->bitoff=0;
+}
+#ifndef VBP
+/* This function tells us how much more data is in the current es buffer from current position. Its used to figure out if
+   we need to go to next es buffer
+*/
+static inline uint32_t viddec_pm_utils_bstream_datafromindex(viddec_pm_utils_list_t *list, uint32_t index, uint32_t offset)
+{
+    uint32_t ret=0;
+    int32_t val=0;
+    val = (list->data[index].edpos <= (uint32_t)list->total_bytes) ? list->data[index].edpos: (uint32_t)list->total_bytes;
+    val = val - (int32_t)offset;
+    if (val > 0) ret = (uint32_t)val;
+    return val;
+}
+
+/* This function seeks to byte offset position starting from lst_index, if more data is present in current ES buffer pointed by
+ lst_index returns the remaining data in current buffer along with physical address of byte offset. The lst_index parameter
+ at returns index of ES buffer in list which has byte_offset
+*/
+static inline uint32_t viddec_pm_utils_bstream_maxbytes_from_index(viddec_pm_utils_bstream_cxt_t *cxt,
+        uint32_t *lst_index,
+        uint32_t byte_offset,
+        uint32_t *physaddr)
+{
+    viddec_pm_utils_list_t *list;
+    uint32_t last_byte_offst=0, bytes_left=0;/* default return value is 0 bytes */
+
+    list = cxt->list;
+    while (*lst_index < list->num_items)
+    {
+        /* Check to see if we reached the buffer with last valid byte of current access unit, List can have data beyond current access unit */
+        last_byte_offst = (list->data[*lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[*lst_index].edpos: (uint32_t)list->total_bytes;
+        if (byte_offset < last_byte_offst)
+        {/* Found a match so return with data remaining */
+            bytes_left = viddec_pm_utils_bstream_datafromindex(list, *lst_index, byte_offset);
+            *physaddr = viddec_pm_utils_bstream_getphys(cxt, byte_offset, *lst_index);
+            break;
+        }
+        *lst_index+=1;
+    }
+    return bytes_left;
+}
+
+/* This function is for copying trailing bytes of cubby bitstream buffer to scratch buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyto(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data, uint32_t num_bytes)
+{
+    uint32_t i=0;
+    for (i=0; i<num_bytes; i++)
+    {
+        cxt->buf_scratch[i] = *data;
+        data++;
+        cxt->size++;
+    }
+}
+
+/* This function is for copying trailing bytes from scratch buffer to  bitstream buffer */
+static inline void viddec_pm_utils_bstream_scratch_copyfrom(viddec_pm_utils_bstream_scratch_cxt_t *cxt, uint8_t *data)
+{
+    uint32_t i=0;
+    for (i=0; i<cxt->size; i++)
+    {
+        *data = cxt->buf_scratch[i];
+        data++;
+    }
+}
+#endif
+
+/* This function populates requested number of bytes into data parameter, skips emulation prevention bytes if needed */
+static inline int32_t viddec_pm_utils_getbytes(viddec_pm_utils_bstream_buf_cxt_t *bstream,
+        viddec_pm_utils_getbits_t *data,/* gets populated with read bytes*/
+        uint32_t *act_bytes, /* actual number of bytes read can be more due to emulation prev bytes*/
+        uint32_t *phase,    /* Phase for emulation */
+        uint32_t num_bytes,/* requested number of bytes*/
+        uint32_t emul_reqd, /* On true we look for emulation prevention */
+        uint8_t is_offset_zero /* Are we on aligned byte position for first byte*/
+                                              )
+{
+    int32_t ret = 1;
+    uint8_t cur_byte = 0, valid_bytes_read = 0;
+    *act_bytes = 0;
+
+    while (valid_bytes_read < num_bytes)
+    {
+        cur_byte = bstream->buf[bstream->buf_index + *act_bytes];
+        if (emul_reqd && (cur_byte == 0x3) &&(*phase == 2))
+        {/* skip emulation byte. we update the phase only if emulation prevention is enabled */
+            *phase = 0;
+        }
+        else
+        {
+            data->byte[valid_bytes_read] = cur_byte;
+            /*
+              We only update phase for first byte if bit offset is 0. If its not 0 then it was already accounted for in the past.
+              From second byte onwards we always look to update phase.
+             */
+            if ((*act_bytes != 0) || (is_offset_zero))
+            {
+                if (cur_byte == 0)
+                {
+                    /* Update phase only if emulation prevention is required */
+                    *phase +=( ((*phase < 2) && emul_reqd ) ? 1: 0 );
+                }
+                else
+                {
+                    *phase=0;
+                }
+            }
+            valid_bytes_read++;
+        }
+        *act_bytes +=1;
+    }
+    /* Check to see if we reached end during above operation. We might be out of range buts it safe since our array
+       has at least MIN_DATA extra bytes and the maximum out of bounds we will go is 5 bytes */
+    if ((bstream->buf_index + *act_bytes -1) >= bstream->buf_end)
+    {
+        ret = -1;
+    }
+    return ret;
+}
+
+/*
+  This function checks to see if we have minimum amount of data else tries to reload as much as it can.
+  Always returns the data left in current buffer in parameter.
+*/
+static inline void viddec_pm_utils_check_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *data_left)
+{
+#ifdef VBP
+    *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+#else
+    uint8_t isReload=0;
+
+    *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+    /* If we have minimum data we should continue, else try to read more data */
+    if (*data_left <MIN_DATA)
+    {
+        /* Check to see if we already read last byte of current access unit */
+        isReload = !(viddec_pm_utils_bstream_nomoredata(cxt) == 1);
+        while (isReload)
+        {
+            /* We have more data in access unit so keep reading until we get at least minimum data */
+            viddec_pm_utils_bstream_reload(cxt);
+            *data_left = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+            /* Break out of loop if we reached last byte or we have enough data */
+            isReload = !((*data_left >= MIN_DATA) || (viddec_pm_utils_bstream_nomoredata(cxt) == 1));
+        }
+    }
+#endif
+}
+
+/*
+  This function moves the stream position by N bits(parameter bits). The bytes parameter tells us how many bytes were
+  read for this N bits(can be different due to emulation bytes).
+*/
+static inline void viddec_pm_utils_update_skipoffsets(viddec_pm_utils_bstream_buf_cxt_t *bstream, uint32_t bits, uint32_t bytes)
+{
+    if ((bits & 0x7) == 0)
+    {
+        bstream->buf_bitoff = 0;
+        bstream->buf_index +=bytes;
+    }
+    else
+    {
+        bstream->buf_bitoff = bits & 0x7;
+        bstream->buf_index +=(bytes - 1);
+    }
+}
+
+/*
+  This function skips emulation byte if necessary.
+  During Normal flow we skip emulation byte only if we read at least one bit after the the two zero bytes.
+  However in some cases we might send data to HW without reading the next bit, in which case we are on
+  emulation byte. To avoid sending invalid data, this function has to be called first to skip.
+*/
+
+void viddec_pm_utils_skip_if_current_is_emulation(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    viddec_pm_utils_bstream_buf_cxt_t *bstream = &(cxt->bstrm_buf);
+
+    if (cxt->is_emul_reqd &&
+            (cxt->phase >= 2) &&
+            (bstream->buf_bitoff == 0) &&
+            (bstream->buf[bstream->buf_index] == 0x3) )
+    {
+        bstream->buf_index += 1;
+        cxt->phase = 0;
+    }
+}
+
+#ifndef VBP
+/*
+  This function gets physical address of the requested au offset(pos).
+*/
+
+uint32_t viddec_pm_utils_bstream_getphys(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t pos, uint32_t lst_index)
+{
+    uint32_t ret = 0, last_byte_offst=0;
+    viddec_pm_utils_list_t *list;
+
+    list = cxt->list;
+    while (lst_index < list->num_items)
+    {
+        last_byte_offst = (list->data[lst_index].edpos <= (uint32_t)list->total_bytes) ? list->data[lst_index].edpos: (uint32_t)list->total_bytes;
+        if (pos < last_byte_offst)
+        {
+#ifndef MFDBIGENDIAN
+            ret = (uint32_t)list->sc_ibuf[lst_index].buf;
+#else
+            ret = list->sc_ibuf[lst_index].phys;
+#endif
+            ret +=(pos - list->data[lst_index].stpos);
+            if (lst_index == 0) ret+=list->start_offset;
+            break;
+        }
+        lst_index++;
+    }
+    return ret;
+}
+
+/*
+  Actual reload function which uses dma to refill bitstream buffer.
+*/
+void viddec_pm_utils_bstream_reload(viddec_pm_utils_bstream_cxt_t *cxt)
+{
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+
+    /* Update current offset positions */
+    cxt->au_pos +=  (bstream->buf_index - bstream->buf_st);
+    bstream->buf_st = bstream->buf_index;
+    /* copy leftover bytes into scratch */
+    {
+        int32_t cur_bytes=0;
+        viddec_pm_utils_bstream_scratch_init(&(cxt->scratch));
+        cur_bytes = viddec_pm_utils_bstream_bytesincubby(&(cxt->bstrm_buf));
+        if (cur_bytes > 0)
+        {
+            viddec_pm_utils_bstream_scratch_copyto(&(cxt->scratch), &(bstream->buf[bstream->buf_index]), cur_bytes);
+            cxt->scratch.bitoff = bstream->buf_bitoff;
+        }
+    }
+    /* Initiate DMA and copyback scratch data */
+    {
+        uint32_t data_left = 0, ddr_mask=0;
+        /* calculate necesary aligmnets and copy data */
+        {
+            uint32_t ddr_addr=0, data_wrote=0;
+            uint32_t byte_pos;
+            /* byte pos points to the position from where we want to read data.*/
+            byte_pos = cxt->au_pos + cxt->scratch.size;
+            data_left = viddec_pm_utils_bstream_maxbytes_from_index(cxt, &(cxt->list_off), byte_pos, &ddr_addr);
+            if (data_left > CUBBY_SIZE)
+            {
+                data_left = CUBBY_SIZE;
+            }
+            if (data_left != 0)
+            {
+                ddr_mask = ddr_addr & 0x3;
+                ddr_addr = ddr_addr & ~0x3;
+                data_wrote = cp_using_dma(ddr_addr, (uint32_t)&(bstream->buf[MIN_DATA]), (data_left + ddr_mask), 0, 1);
+            }
+        }
+        /* copy scratch data back to buffer and update offsets */
+        {
+            uint32_t index=0;
+            index = MIN_DATA + ddr_mask;
+            index -= cxt->scratch.size;
+            viddec_pm_utils_bstream_scratch_copyfrom(&(cxt->scratch), &(bstream->buf[index]));
+            bstream->buf_st = bstream->buf_index = index;
+            bstream->buf_end = data_left + cxt->scratch.size + bstream->buf_st;
+            bstream->buf_bitoff = cxt->scratch.bitoff;
+        }
+    }
+}
+#endif
+
+/*
+  Init function called by parser manager after sc code detected.
+*/
+void viddec_pm_utils_bstream_init(viddec_pm_utils_bstream_cxt_t *cxt, viddec_pm_utils_list_t *list, uint32_t is_emul)
+{
+#ifdef VBP
+    cxt->emulation_byte_counter = 0;
+#endif
+
+    cxt->au_pos = 0;
+    cxt->list = list;
+    cxt->list_off = 0;
+    cxt->phase = 0;
+    cxt->is_emul_reqd = is_emul;
+    cxt->bstrm_buf.buf_st = cxt->bstrm_buf.buf_end = cxt->bstrm_buf.buf_index = cxt->bstrm_buf.buf_bitoff = 0;
+}
+
+/* Get the requested byte position. If the byte is already present in cubby its returned
+   else we seek forward and get the requested byte.
+   Limitation:Once we seek forward we can't return back.
+*/
+int32_t viddec_pm_utils_bstream_get_current_byte(viddec_pm_utils_bstream_cxt_t *cxt, uint8_t *byte)
+{
+    int32_t ret = -1;
+    uint32_t data_left=0;
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+    if (data_left != 0)
+    {
+        *byte = bstream->buf[bstream->buf_index];
+        ret = 1;
+    }
+    return ret;
+}
+
+/*
+  Function to skip N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_skipbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t num_bits)
+{
+    int32_t ret = -1;
+    uint32_t data_left=0;
+    viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+    bstream = &(cxt->bstrm_buf);
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+    if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+    {
+        uint8_t bytes_required=0;
+
+        bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+        if (bytes_required <= data_left)
+        {
+            viddec_pm_utils_getbits_t data;
+            uint32_t act_bytes =0;
+            if (viddec_pm_utils_getbytes(bstream, &data,  &act_bytes, &(cxt->phase), bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+            {
+                uint32_t total_bits=0;
+                total_bits=num_bits+bstream->buf_bitoff;
+                viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+                ret=1;
+
+#ifdef VBP
+                if (act_bytes > bytes_required)
+                {
+                    cxt->emulation_byte_counter = act_bytes - bytes_required;
+                }
+#endif
+            }
+        }
+    }
+    return ret;
+}
+
+/*
+  Function to get N bits ( N<= 32).
+*/
+int32_t viddec_pm_utils_bstream_peekbits(viddec_pm_utils_bstream_cxt_t *cxt, uint32_t *out, uint32_t num_bits, uint8_t skip)
+{
+    uint32_t data_left=0;
+    int32_t ret = -1;
+    /* STEP 1: Make sure that we have at least minimum data before we calculate bits */
+    viddec_pm_utils_check_bstream_reload(cxt, &data_left);
+
+    if ((num_bits <= 32) && (num_bits > 0) && (data_left != 0))
+    {
+        uint32_t bytes_required=0;
+        viddec_pm_utils_bstream_buf_cxt_t *bstream;
+
+        bstream = &(cxt->bstrm_buf);
+        bytes_required = (bstream->buf_bitoff + num_bits + 7)>>3;
+
+        /* Step 2: Make sure we have bytes for requested bits */
+        if (bytes_required <= data_left)
+        {
+            uint32_t act_bytes, phase;
+            viddec_pm_utils_getbits_t data;
+            phase = cxt->phase;
+            /* Step 3: Due to emualtion prevention bytes sometimes the bytes_required > actual_required bytes */
+            if (viddec_pm_utils_getbytes(bstream, &data, &act_bytes, &phase, bytes_required, cxt->is_emul_reqd, (bstream->buf_bitoff == 0)) != -1)
+            {
+                uint32_t total_bits=0;
+                uint32_t shift_by=0;
+                /* zero out upper bits */
+                /* LIMITATION:For some reason compiler is optimizing it to NOP if i do both shifts
+                   in single statement */
+                data.byte[0] <<= bstream->buf_bitoff;
+                data.byte[0] >>= bstream->buf_bitoff;
+
+#ifndef MFDBIGENDIAN
+                data.word[0] = SWAP_WORD(data.word[0]);
+                data.word[1] = SWAP_WORD(data.word[1]);
+#endif
+                total_bits = num_bits+bstream->buf_bitoff;
+                if (total_bits > 32)
+                {
+                    /* We have to use both the words to get required data */
+                    shift_by = total_bits - 32;
+                    data.word[0] = (data.word[0] << shift_by) | ( data.word[1] >> (32 - shift_by));
+                }
+                else
+                {
+                    shift_by = 32 - total_bits;
+                    data.word[0] = data.word[0] >> shift_by;
+                }
+                *out = data.word[0];
+                if (skip)
+                {
+                    /* update au byte position if needed */
+                    viddec_pm_utils_update_skipoffsets(bstream, total_bits, act_bytes);
+                    cxt->phase = phase;
+
+#ifdef VBP
+                    if (act_bytes > bytes_required)
+                    {
+                        cxt->emulation_byte_counter += act_bytes - bytes_required;
+                    }
+#endif
+                }
+
+                ret =1;
+            }
+        }
+    }
+    return ret;
+}
diff --git a/mixvbp/vbp_plugin/common/README b/mixvbp/vbp_plugin/common/README
new file mode 100644
index 0000000..938f24a
--- /dev/null
+++ b/mixvbp/vbp_plugin/common/README
@@ -0,0 +1 @@
+Some common codes will be added here.
diff --git a/mixvbp/vbp_plugin/h264/Android.mk b/mixvbp/vbp_plugin/h264/Android.mk
new file mode 100755
index 0000000..f015988
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/Android.mk
@@ -0,0 +1,66 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES :=			\
+	h264parse.c			\
+	h264parse_bsd.c			\
+	h264parse_math.c		\
+	h264parse_mem.c			\
+	h264parse_sei.c			\
+	h264parse_sh.c			\
+	h264parse_pps.c			\
+	h264parse_sps.c			\
+	h264parse_dpb.c			\
+	viddec_h264_parse.c		\
+	mix_vbp_h264_stubs.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES :=							   \
+	$(LOCAL_PATH)/include		   \
+	$(MIXVBP_DIR)/include		   \
+	$(MIXVBP_DIR)/vbp_manager/include	   \
+	$(MIXVBP_DIR)/vbp_manager/h264/include
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_h264
+
+LOCAL_SHARED_LIBRARIES :=		\
+	libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
+
+include $(CLEAR_VARS)
+PLATFORM_SUPPORT_AVC_SHORT_FORMAT := baytrail
+
+ifneq ($(filter $(TARGET_BOARD_PLATFORM),$(PLATFORM_SUPPORT_AVC_SHORT_FORMAT)),)
+LOCAL_SRC_FILES := \
+        h264parse.c \
+        h264parse_bsd.c \
+        h264parse_math.c \
+        h264parse_mem.c \
+        h264parse_sei.c \
+        h264parse_pps.c \
+        h264parse_sps.c \
+        h264parse_dpb.c \
+        h264parse_sh.c \
+        secvideo/baytrail/viddec_h264secure_parse.c \
+        mix_vbp_h264_stubs.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY -DUSE_AVC_SHORT_FORMAT
+
+LOCAL_C_INCLUDES :=   \
+	$(LOCAL_PATH)/include   \
+	$(MIXVBP_DIR)/include    \
+	$(MIXVBP_DIR)/vbp_manager/include   \
+	$(MIXVBP_DIR)/vbp_manager/h264/include
+
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_h264secure
+LOCAL_SHARED_LIBRARIES := libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
+
+endif
diff --git a/mixvbp/vbp_plugin/h264/h264parse.c b/mixvbp/vbp_plugin/h264/h264parse.c
new file mode 100755
index 0000000..9e36b7c
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse.c
@@ -0,0 +1,795 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    h264 parser
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+
+h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t *UseDefaultScalingMatrix, h264_Info* pInfo)
+{
+    int32_t j, scanj;
+    int32_t delta_scale, lastScale, nextScale;
+
+    const uint8_t ZZ_SCAN[16]  =
+        {  0,  1,  4,  8,  5,  2,  3,  6,  9, 12, 13, 10,  7, 11, 14, 15
+        };
+
+    const uint8_t ZZ_SCAN8[64] =
+        {  0,  1,  8, 16,  9,  2,  3, 10, 17, 24, 32, 25, 18, 11,  4,  5,
+           12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13,  6,  7, 14, 21, 28,
+           35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+           58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+        };
+
+    lastScale      = 8;
+    nextScale      = 8;
+    scanj = 0;
+
+    for (j=0; j<sizeOfScalingList; j++)
+    {
+        scanj = (sizeOfScalingList==16)?ZZ_SCAN[j]:ZZ_SCAN8[j];
+
+        if (nextScale!=0)
+        {
+            delta_scale = h264_GetVLCElement(parent, pInfo, true);
+            nextScale = (lastScale + delta_scale + 256) % 256;
+            *UseDefaultScalingMatrix = (uint8_t) (scanj==0 && nextScale==0);
+        }
+
+        scalingList[scanj] = (nextScale==0) ? lastScale:nextScale;
+        lastScale = scalingList[scanj];
+    }
+
+    return H264_STATUS_OK;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_active_par_set(h264_Info*pInfo,h264_Slice_Header_t* SliceHeader)
+{
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+
+    ///////////////////////////////////////////////////
+    // Reload SPS/PPS while
+    // 1) Start of Frame (in case of context switch)
+    // 2) PPS id changed
+    ///////////////////////////////////////////////////
+    if ((SliceHeader->first_mb_in_slice == 0) || (SliceHeader->pic_parameter_id != pInfo->active_PPS.pic_parameter_set_id))
+    {
+#ifndef WIN32
+        h264_Parse_Copy_Pps_From_DDR(pInfo, &pInfo->active_PPS, SliceHeader->pic_parameter_id);
+
+        if (pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)
+        {
+            return H264_PPS_INVALID_PIC_ID;			/// Invalid PPS detected
+        }
+
+        if (pInfo->active_PPS.seq_parameter_set_id != pInfo->active_SPS.seq_parameter_set_id)
+        {
+            pInfo->Is_SPS_updated =1;
+            h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+            h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+        }
+        else
+        {
+            if (h264_Parse_Check_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id))
+            {
+                pInfo->Is_SPS_updated =1;
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &pInfo->active_SPS, pInfo->active_PPS.seq_parameter_set_id);
+                h264_Parse_Clear_Sps_Updated_Flag(pInfo, pInfo->active_PPS.seq_parameter_set_id);
+            }
+        }
+
+#else
+        pInfo->active_PPS = PPS_GL[SliceHeader->pic_parameter_id];
+        pInfo->active_SPS = SPS_GL[pInfo->active_PPS.seq_parameter_set_id];
+#endif
+
+        if (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS)
+        {
+            return H264_PPS_INVALID_PIC_ID;			//// Invalid SPS detected
+        }
+    }
+    else {
+        if ((pInfo->active_PPS.seq_parameter_set_id >= MAX_NUM_SPS)  || (pInfo->active_SPS.seq_parameter_set_id >= MAX_NUM_SPS))
+        {
+            return H264_PPS_INVALID_PIC_ID;			/// Invalid PPS detected
+        }
+    }
+
+
+    pInfo->img.PicWidthInMbs    = (pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1 + 1);
+    //pInfo->img.PicHeightInMapUnits = (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1);
+    pInfo->img.FrameHeightInMbs = pInfo->active_SPS.sps_disp.frame_mbs_only_flag?				\
+                                  (pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1):	\
+                                  ((pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1)<<1);
+
+
+    return H264_STATUS_OK;
+};   //// End of h264_active_par_set
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////
+// Parse slice header info
+//////////////////////////////////////////////////
+h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status retStatus = H264_STATUS_ERROR;
+
+    ////////////////////////////////////////////////////
+    //// Parse slice header info
+    //// Part1: not depend on the active PPS/SPS
+    //// Part2/3: depend on the active parset
+    //////////////////////////////////////////////////
+
+    //retStatus = h264_Parse_Slice_Header_1(pInfo);
+
+    SliceHeader->sh_error = 0;
+
+    if (h264_Parse_Slice_Header_1(parent, pInfo, SliceHeader) == H264_STATUS_OK)
+    {
+        //////////////////////////////////////////
+        //// Active parameter set for this slice
+        //////////////////////////////////////////
+        retStatus = h264_active_par_set(pInfo, SliceHeader);
+    }
+
+    if (retStatus == H264_STATUS_OK) {
+        switch (pInfo->active_SPS.profile_idc)
+        {
+        case h264_ProfileBaseline:
+        case h264_ProfileMain:
+        case h264_ProfileExtended:
+            pInfo->active_PPS.transform_8x8_mode_flag=0;
+            pInfo->active_PPS.pic_scaling_matrix_present_flag =0;
+            pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset;
+
+        default:
+            break;
+        }
+
+        if ( h264_Parse_Slice_Header_2(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+        {
+            SliceHeader->sh_error |= 2;
+        }
+        else	if ( h264_Parse_Slice_Header_3(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+        {
+            SliceHeader->sh_error |= 4;
+        }
+
+    } else 	{
+        SliceHeader->sh_error |= 1;
+    }
+
+
+    //if(SliceHeader->sh_error) {
+    //pInfo->wl_err_flag |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+    //}
+
+
+
+    //////////////////////////////////
+    //// Parse slice data (MB loop)
+    //////////////////////////////////
+    //retStatus = h264_Parse_Slice_Data(pInfo);
+    {
+        //uint32_t data = 0;
+        //if( viddec_pm_peek_bits(parent, &data, 32) == -1)
+        //retStatus = H264_STATUS_ERROR;
+    }
+    //h264_Parse_rbsp_trailing_bits(pInfo);
+
+    return retStatus;
+}
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc)
+{
+    h264_Status ret = H264_STATUS_ERROR;
+
+    //h264_NAL_Unit_t* NAL = &pInfo->NAL;
+    uint32_t code;
+#if 0
+    viddec_pm_get_bits(void * parent,uint32_t * data,uint32_t num_bits)(parent, &code, 24);
+    viddec_pm_get_bits(parent, &code, 1);   //forbidden_zero_bit
+
+    viddec_pm_get_bits(parent, &code, 2);
+    SliceHeader->nal_ref_idc = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code, 5);
+    pInfo->nal_unit_type = (uint8_t)code;
+#else
+#ifdef VBP
+    if ( viddec_pm_get_bits(parent, &code, 8) != -1)
+#else
+    //// 24bit SC, 1 bit: forbidden_zero_bit, 2 bitrs: nal_ref_idc, 5 bits: nal_unit_type
+    if ( viddec_pm_get_bits(parent, &code, 32) != -1)
+#endif
+    {
+        *nal_ref_idc = (uint8_t)((code>>5)&0x3);
+        pInfo->nal_unit_type = (uint8_t)((code>>0)&0x1f);
+        ret = H264_STATUS_OK;
+    }
+#endif
+
+    return ret;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/*!
+ ************************************************************************
+ * \brief
+ *    set defaults for old_slice
+ *    NAL unit of a picture"
+ ************************************************************************
+ */
+#ifndef INT_MAX
+#define INT_MAX 0xFFFFFFFF
+#endif
+
+#ifndef UINT_MAX
+#define UINT_MAX 0x7FFFFFFF
+#endif
+
+void h264_init_old_slice(h264_Info* pInfo)
+{
+    pInfo->SliceHeader.field_pic_flag = 0;
+
+    pInfo->SliceHeader.pic_parameter_id = 0xFF;
+
+    pInfo->SliceHeader.frame_num = INT_MAX;
+
+    pInfo->SliceHeader.nal_ref_idc = 0xFF;
+
+    pInfo->SliceHeader.idr_flag = 0;
+
+    pInfo->SliceHeader.pic_order_cnt_lsb          = UINT_MAX;
+    pInfo->SliceHeader.delta_pic_order_cnt_bottom = INT_MAX;
+
+    pInfo->SliceHeader.delta_pic_order_cnt[0] = INT_MAX;
+    pInfo->SliceHeader.delta_pic_order_cnt[1] = INT_MAX;
+
+    return;
+}
+
+
+void h264_init_img(h264_Info* pInfo)
+{
+    h264_memset(&(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+
+    return;
+}
+
+
+void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem)
+{
+    int32_t i;
+
+    h264_Info * pInfo = &(parser->info);
+
+    parser->sps_pps_ddr_paddr = (uint32_t)persist_mem;
+
+    pInfo->SPS_PADDR_GL = parser->sps_pps_ddr_paddr;
+    pInfo->PPS_PADDR_GL = pInfo->SPS_PADDR_GL + MAX_NUM_SPS * sizeof(seq_param_set_all);
+    pInfo->OFFSET_REF_FRAME_PADDR_GL = pInfo->PPS_PADDR_GL + MAX_NUM_PPS * sizeof(pic_param_set);
+    pInfo->TMP_OFFSET_REFFRM_PADDR_GL = pInfo->OFFSET_REF_FRAME_PADDR_GL +
+                                        MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+
+    h264_memset( &(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used) );
+    h264_memset( &(pInfo->active_PPS), 0x0, sizeof(pic_param_set) );
+
+    /* Global for SPS   & PPS */
+    for (i=0; i<MAX_NUM_SPS; i++)
+    {
+        pInfo->active_SPS.seq_parameter_set_id = 0xff;
+        h264_Parse_Copy_Sps_To_DDR (pInfo, &(pInfo->active_SPS), i);
+    }
+    for (i=0; i<MAX_NUM_PPS; i++)
+    {
+        pInfo->active_PPS.seq_parameter_set_id = 0xff;
+        h264_Parse_Copy_Pps_To_DDR (pInfo, &(pInfo->active_PPS), i);
+    }
+
+    pInfo->active_SPS.seq_parameter_set_id = 0xff;
+    pInfo->sps_valid = 0;
+    pInfo->got_start = 0;
+
+    return;
+}
+
+
+void h264_init_Info_under_sps_pps_level(h264_Info* pInfo)
+{
+    int32_t i=0;
+
+    h264_memset( &(pInfo->dpb), 0x0, sizeof(h264_DecodedPictureBuffer) );
+    h264_memset( &(pInfo->SliceHeader), 0x0, sizeof(h264_Slice_Header_t) );
+    h264_memset( &(pInfo->old_slice), 0x0, sizeof(OldSliceParams) );
+    h264_memset( &(pInfo->sei_information), 0x0, sizeof(sei_info) );
+    h264_memset( &(pInfo->img), 0x0, sizeof(h264_img_par) );
+
+    pInfo->h264_list_replacement = 0;
+
+    pInfo->h264_pwt_start_byte_offset = 0;
+    pInfo->h264_pwt_start_bit_offset = 0;
+    pInfo->h264_pwt_end_byte_offset = 0;
+    pInfo->h264_pwt_end_bit_offset = 0;
+    pInfo->h264_pwt_enabled = 0;
+
+    for (i=0; i<32; i++)
+    {
+        pInfo->slice_ref_list0[i] = 0;
+        pInfo->slice_ref_list1[i] = 0;
+    }
+
+    pInfo->qm_present_list = 0;
+
+    pInfo->nal_unit_type = 0;
+    pInfo->old_nal_unit_type = 0xff;
+
+    pInfo->push_to_cur = 0;
+    pInfo->Is_first_frame_in_stream = 1;
+    pInfo->Is_SPS_updated = 0;
+    pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+
+    pInfo->is_frame_boundary_detected_by_non_slice_nal = 0;
+    pInfo->is_frame_boundary_detected_by_slice_nal = 0;
+    pInfo->is_current_workload_done = 0;
+
+    pInfo->sei_rp_received = 0;
+    pInfo->last_I_frame_idc = 255;
+    pInfo->wl_err_curr = 0;
+    pInfo->wl_err_next = 0;
+
+    pInfo->primary_pic_type_plus_one = 0;
+    pInfo->sei_b_state_ready = 0;
+
+    /* Init old slice structure  */
+    h264_init_old_slice(pInfo);
+
+    /* init_dpb */
+    h264_init_dpb(&(pInfo->dpb));
+
+    /* init_sei	*/
+    h264_sei_stream_initialise(pInfo);
+
+}
+
+void h264_init_Info(h264_Info* pInfo)
+{
+    h264_memset(pInfo, 0x0, sizeof(h264_Info));
+
+    pInfo->old_nal_unit_type = 0xff;
+
+    pInfo->Is_first_frame_in_stream =1;
+    pInfo->img.frame_count = 0;
+    pInfo->last_I_frame_idc = 255;
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+/////////////////////////////////////////////////////
+//
+// Judge whether it is the first VCL of a new picture
+//
+/////////////////////////////////////////////////////
+int32_t h264_is_second_field(h264_Info * pInfo)
+{
+    h264_Slice_Header_t cur_slice = pInfo->SliceHeader;
+    OldSliceParams old_slice = pInfo->old_slice;
+
+    int result = 0;
+
+    //pInfo->img.second_field = 0;
+
+    /// is it second field?
+
+    //OS_INFO( "xxx is_used = %d\n", pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used);
+
+    if (cur_slice.structure != FRAME)
+    {
+        if ( ( MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)&&(3 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) )
+                &&(0 != viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) ))
+        {
+            if ((cur_slice.frame_num == old_slice.frame_num)||(cur_slice.idr_flag))
+            {
+
+                if (old_slice.structure != cur_slice.structure)
+                {
+
+                    if (((cur_slice.structure == TOP_FIELD &&old_slice.structure == BOTTOM_FIELD) || // Condition 1:
+                            (old_slice.structure == TOP_FIELD && cur_slice.structure == BOTTOM_FIELD)) &&    \
+                            ((old_slice.nal_ref_idc ==0 && cur_slice.nal_ref_idc == 0)              || // Condition 2:
+                             (old_slice.nal_ref_idc !=0 &&cur_slice.nal_ref_idc != 0)))
+                    {
+                        //pInfo->img.second_field = 1;
+                        result = 1;
+                    }
+                }
+            }
+
+
+        }
+
+
+    }
+
+
+
+    return result;
+
+} //// End of h264_is_second_field
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+int32_t h264_is_new_picture_start(h264_Info * pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice)
+{
+    int result = 0;
+
+    if (pInfo->number_of_first_au_info_nal_before_first_slice)
+    {
+        pInfo->number_of_first_au_info_nal_before_first_slice = 0;
+        return 1;
+    }
+
+
+
+    result |= (old_slice.pic_parameter_id != cur_slice.pic_parameter_id);
+    result |= (old_slice.frame_num != cur_slice.frame_num);
+    result |= (old_slice.field_pic_flag != cur_slice.field_pic_flag);
+    if (cur_slice.field_pic_flag && old_slice.field_pic_flag)
+    {
+        result |= (old_slice.bottom_field_flag != cur_slice.bottom_field_flag);
+    }
+
+    result |= (old_slice.nal_ref_idc != cur_slice.nal_ref_idc) && \
+              ((old_slice.nal_ref_idc == 0) || (cur_slice.nal_ref_idc == 0));
+    result |= ( old_slice.idr_flag != cur_slice.idr_flag);
+
+    if (cur_slice.idr_flag && old_slice.idr_flag)
+    {
+        result |= (old_slice.idr_pic_id != cur_slice.idr_pic_id);
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 0)
+    {
+        result |=  (old_slice.pic_order_cnt_lsb          != cur_slice.pic_order_cnt_lsb);
+        result |=  (old_slice.delta_pic_order_cnt_bottom != cur_slice.delta_pic_order_cnt_bottom);
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 1)
+    {
+        result |= (old_slice.delta_pic_order_cnt[0] != cur_slice.delta_pic_order_cnt[0]);
+        result |= (old_slice.delta_pic_order_cnt[1] != cur_slice.delta_pic_order_cnt[1]);
+    }
+
+    return result;
+}
+
+
+int32_t h264_check_previous_frame_end(h264_Info * pInfo)
+{
+    int result = 0;
+
+    if ( (h264_NAL_UNIT_TYPE_SLICE==pInfo->old_nal_unit_type)||(h264_NAL_UNIT_TYPE_IDR==pInfo->old_nal_unit_type) )
+    {
+
+        switch ( pInfo->nal_unit_type )
+        {
+        case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+        case h264_NAL_UNIT_TYPE_SPS:
+        case h264_NAL_UNIT_TYPE_PPS:
+        case h264_NAL_UNIT_TYPE_SEI:
+        case h264_NAL_UNIT_TYPE_EOSeq:
+        case h264_NAL_UNIT_TYPE_EOstream:
+        case h264_NAL_UNIT_TYPE_Reserved1:
+        case h264_NAL_UNIT_TYPE_Reserved2:
+        case h264_NAL_UNIT_TYPE_Reserved3:
+        case h264_NAL_UNIT_TYPE_Reserved4:
+        case h264_NAL_UNIT_TYPE_Reserved5:
+        {
+            pInfo->img.current_slice_num = 0;
+
+            if ((pInfo->img.structure == FRAME) || (pInfo->img.second_field)) {
+                pInfo->is_frame_boundary_detected_by_non_slice_nal =1;
+                pInfo->is_current_workload_done=1;
+                result=1;
+            }
+            break;
+        }
+        default:
+            break;
+        }
+
+    }
+
+    return result;
+
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////
+// 1) Update old slice structure for frame boundary detection
+//////////////////////////////////////////////////////////////
+void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader)
+{
+    pInfo->old_slice.pic_parameter_id = pInfo->SliceHeader.pic_parameter_id;
+
+    pInfo->old_slice.frame_num = pInfo->SliceHeader.frame_num;
+
+    pInfo->old_slice.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+
+    if (pInfo->SliceHeader.field_pic_flag)
+    {
+        pInfo->old_slice.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+    }
+
+    pInfo->old_slice.nal_ref_idc   = pInfo->SliceHeader.nal_ref_idc;
+
+    pInfo->old_slice.structure = pInfo->SliceHeader.structure;
+
+    pInfo->old_slice.idr_flag = pInfo->SliceHeader.idr_flag;
+    if (pInfo->SliceHeader.idr_flag)
+    {
+        pInfo->old_slice.idr_pic_id = pInfo->SliceHeader.idr_pic_id;
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 0)
+    {
+        pInfo->old_slice.pic_order_cnt_lsb          = pInfo->SliceHeader.pic_order_cnt_lsb;
+        pInfo->old_slice.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 1)
+    {
+        pInfo->old_slice.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+        pInfo->old_slice.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+    }
+
+    ////////////////////////////// Next to current
+    memcpy(&pInfo->SliceHeader, &next_SliceHeader, sizeof(h264_Slice_Header_t));
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// Initialization for new picture
+//////////////////////////////////////////////////////////////////////////////
+void h264_update_img_info(h264_Info * pInfo )
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    pInfo->img.frame_num = pInfo->SliceHeader.frame_num;
+    pInfo->img.structure = pInfo->SliceHeader.structure;
+
+    pInfo->img.field_pic_flag = pInfo->SliceHeader.field_pic_flag;
+    pInfo->img.bottom_field_flag = pInfo->SliceHeader.bottom_field_flag;
+
+    pInfo->img.MbaffFrameFlag  = pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag));
+    pInfo->img.pic_order_cnt_type = pInfo->active_SPS.pic_order_cnt_type;
+
+    if (pInfo->img.pic_order_cnt_type == 1) {
+        pInfo->img.num_ref_frames_in_pic_order_cnt_cycle = pInfo->active_SPS.num_ref_frames_in_pic_order_cnt_cycle;
+        pInfo->img.delta_pic_order_always_zero_flag = pInfo->active_SPS.delta_pic_order_always_zero_flag;
+        pInfo->img.offset_for_non_ref_pic = pInfo->active_SPS.offset_for_non_ref_pic;
+        pInfo->img.offset_for_top_to_bottom_field = pInfo->active_SPS.offset_for_top_to_bottom_field;
+    }
+
+    pInfo->img.pic_order_cnt_lsb = pInfo->SliceHeader.pic_order_cnt_lsb;
+    //pInfo->img.pic_order_cnt_msb = pInfo->SliceHeader.pic_order_cnt_msb;
+    pInfo->img.delta_pic_order_cnt_bottom = pInfo->SliceHeader.delta_pic_order_cnt_bottom;
+    pInfo->img.delta_pic_order_cnt[0] = pInfo->SliceHeader.delta_pic_order_cnt[0];
+    pInfo->img.delta_pic_order_cnt[1] = pInfo->SliceHeader.delta_pic_order_cnt[1];
+
+
+    pInfo->img.PreviousFrameNum = pInfo->old_slice.frame_num;
+
+    pInfo->img.no_output_of_prior_pics_flag = pInfo->SliceHeader.sh_dec_refpic.no_output_of_prior_pics_flag;
+
+    ////////////////////////////////////////////////// Check SEI recovery point
+    if (pInfo->sei_information.recovery_point) {
+        int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+        pInfo->sei_information.recovery_frame_num = (pInfo->img.frame_num + pInfo->sei_information.recovery_frame_cnt) % MaxFrameNum;
+    }
+
+    if (pInfo->SliceHeader.idr_flag)
+        pInfo->sei_information.recovery_frame_num = pInfo->img.frame_num;
+
+
+
+    /////////////////////////////////////////////////Resolution Change
+    pInfo->img.curr_has_mmco_5 = 0;
+
+    if ( (pInfo->img.PicWidthInMbs != p_dpb->PicWidthInMbs)||
+            (pInfo->img.FrameHeightInMbs != p_dpb->FrameHeightInMbs) )
+    {
+        int32_t no_output_old_pics = (pInfo->SliceHeader.idr_flag)? pInfo->img.no_output_of_prior_pics_flag : 0;
+
+        // If resolution changed, reset the soft DPB here
+        h264_dpb_reset_dpb(pInfo, pInfo->img.PicWidthInMbs, pInfo->img.FrameHeightInMbs, 1, no_output_old_pics);
+    }
+
+    return;
+
+} ///// End of init new frame
+
+
+void h264_update_frame_type(h264_Info * pInfo )
+{
+
+//update frame type
+    if (pInfo->img.structure == FRAME)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            if (pInfo->dpb.fs_dec_idc < NUM_DPB_FRAME_STORES)
+            {
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_IDR << FRAME_TYPE_FRAME_OFFSET);
+            //pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = 0xff;
+            //pInfo->dpb.fs[0].pic_type = pInfo->dpb.fs_dec_idc;
+            }
+
+        }
+        else
+        {
+#if 1
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_B << FRAME_TYPE_FRAME_OFFSET);
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_P << FRAME_TYPE_FRAME_OFFSET);
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_FRAME_OFFSET))>>FRAME_TYPE_FRAME_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (0x1 << FRAME_TYPE_STRUCTRUE_OFFSET)|(FRAME_TYPE_I << FRAME_TYPE_FRAME_OFFSET);
+                }
+                pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+
+                break;
+            default:
+                break;
+
+            }
+#endif
+
+        }
+
+    }
+    else if (pInfo->img.structure == TOP_FIELD)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));;
+        }
+        else
+        {
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_TOP_OFFSET))>>FRAME_TYPE_TOP_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_TOP_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_BOTTOM_OFFSET));
+                }
+                if (pInfo->sei_rp_received)
+                    pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc;
+                else
+                    pInfo->last_I_frame_idc = 255;
+                break;
+            default:
+                break;
+
+            }
+
+        }
+
+
+    } else if (pInfo->img.structure == BOTTOM_FIELD)
+    {
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_IDR << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));;
+        }
+        else
+        {
+            switch (pInfo->SliceHeader.slice_type)
+            {
+            case h264_PtypeB:
+                pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_B << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                break;
+            case h264_PtypeSP:
+            case h264_PtypeP:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET != FRAME_TYPE_B)
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_P << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                break;
+            case h264_PtypeI:
+            case h264_PtypeSI:
+                if ( ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type) & (0x7 << FRAME_TYPE_BOTTOM_OFFSET))>>FRAME_TYPE_BOTTOM_OFFSET == FRAME_TYPE_INVALID)
+                {
+                    pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type = (FRAME_TYPE_I << FRAME_TYPE_BOTTOM_OFFSET)|(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type & (0x7 << FRAME_TYPE_TOP_OFFSET));
+                }
+                if (pInfo->sei_rp_received)
+                    pInfo->last_I_frame_idc = pInfo->dpb.fs_dec_idc + PUT_LIST_INDEX_FIELD_BIT(1);
+                else
+                    pInfo->last_I_frame_idc = 255;
+
+                break;
+            default:
+                break;
+
+            }
+
+        }
+
+    }
+    return;
+
+}
+
+
+//////#endif ///////////// IFDEF H264_PARSE_C///////////////////
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_bsd.c b/mixvbp/vbp_plugin/h264/h264parse_bsd.c
new file mode 100755
index 0000000..40c7559
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_bsd.c
@@ -0,0 +1,228 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    h264 bistream decoding
+//
+///////////////////////////////////////////////////////////////////////*/
+
+
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_parser_ops.h"
+
+
+
+
+
+/**
+   get_codeNum     :Get codenum based on sec 9.1 of H264 spec.
+   @param      cxt : Buffer adress & size are part inputs, the cxt is updated
+                     with codeNum & sign on sucess.
+                     Assumption: codeNum is a max of 32 bits
+
+   @retval       1 : Sucessfuly found a code num, cxt is updated with codeNum, sign, and size of code.
+   @retval       0 : Couldn't find a code in the current buffer.
+   be freed.
+*/
+
+uint32_t h264_get_codeNum(void *parent, h264_Info* pInfo)
+{
+    int32_t    leadingZeroBits= 0;
+    uint32_t    temp = 0, match = 0, noOfBits = 0, count = 0;
+    uint32_t   codeNum =0;
+    uint32_t   bits_offset =0, byte_offset =0;
+    uint8_t    is_emul =0;
+    uint8_t    is_first_byte = 1;
+    uint32_t   length =0;
+    uint32_t   bits_need_add_in_first_byte =0;
+    int32_t    bits_operation_result=0;
+
+    //remove warning
+    pInfo = pInfo;
+
+    ////// Step 1: parse through zero bits until we find a bit with value 1.
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+
+    while (!match)
+    {
+        if ((bits_offset != 0) && ( is_first_byte == 1))
+        {
+            //we handle byte at a time, if we have offset then for first
+            //   byte handle only 8 - offset bits
+            noOfBits = (uint8_t)(8 - bits_offset);
+            bits_operation_result = viddec_pm_peek_bits(parent, &temp, noOfBits);
+
+
+            temp = (temp << bits_offset);
+            if (temp!=0)
+            {
+                bits_need_add_in_first_byte = bits_offset;
+            }
+            is_first_byte =0;
+        }
+        else
+        {
+            noOfBits = 8;/* always 8 bits as we read a byte at a time */
+            bits_operation_result = viddec_pm_peek_bits(parent, &temp, 8);
+
+        }
+
+        if (-1==bits_operation_result)
+        {
+            return MAX_INT32_VALUE;
+        }
+
+        if (temp != 0)
+        {
+            // if byte!=0 we have at least one bit with value 1.
+            count=1;
+            while (((temp & 0x80) != 0x80) && (count <= noOfBits))
+            {
+                count++;
+                temp = temp <<1;
+            }
+            //At this point we get the bit position of 1 in current byte(count).
+
+            match = 1;
+            leadingZeroBits += count;
+        }
+        else
+        {
+            // we don't have a 1 in current byte
+            leadingZeroBits += noOfBits;
+        }
+
+        if (!match)
+        {
+            //actually move the bitoff by viddec_pm_get_bits
+            viddec_pm_get_bits(parent, &temp, noOfBits);
+        }
+        else
+        {
+            //actually move the bitoff by viddec_pm_get_bits
+            viddec_pm_get_bits(parent, &temp, count);
+        }
+
+    }
+    ////// step 2: Now read the next (leadingZeroBits-1) bits to get the encoded value.
+
+
+    if (match)
+    {
+
+        viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+        /* bit position in current byte */
+        //count = (uint8_t)((leadingZeroBits + bits_offset)& 0x7);
+        count = ((count + bits_need_add_in_first_byte)& 0x7);
+
+        leadingZeroBits --;
+        length =  leadingZeroBits;
+        codeNum = 0;
+        noOfBits = 8 - count;
+
+
+        while (leadingZeroBits > 0)
+        {
+            if (noOfBits < (uint32_t)leadingZeroBits)
+            {
+                viddec_pm_get_bits(parent, &temp, noOfBits);
+
+
+                codeNum = (codeNum << noOfBits) | temp;
+                leadingZeroBits -= noOfBits;
+            }
+            else
+            {
+                viddec_pm_get_bits(parent, &temp, leadingZeroBits);
+
+                codeNum = (codeNum << leadingZeroBits) | temp;
+                leadingZeroBits = 0;
+            }
+
+
+            noOfBits = 8;
+        }
+        // update codeNum = 2 ** (leadingZeroBits) -1 + read_bits(leadingZeroBits).
+        codeNum = codeNum + (1 << length) -1;
+
+    }
+
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+    if (bits_offset!=0)
+    {
+        viddec_pm_peek_bits(parent, &temp, 8-bits_offset);
+    }
+
+    return codeNum;
+}
+
+
+/*---------------------------------------*/
+/*---------------------------------------*/
+int32_t h264_GetVLCElement(void *parent, h264_Info* pInfo, uint8_t bIsSigned)
+{
+    int32_t sval = 0;
+    signed char sign;
+
+    sval = h264_get_codeNum(parent , pInfo);
+
+    if (bIsSigned) //get signed integer golomb code else the value is unsigned
+    {
+        sign = (sval & 0x1)?1:-1;
+        sval = (sval +1) >> 1;
+        sval = sval * sign;
+    }
+
+    return sval;
+} // Ipp32s H264Bitstream::GetVLCElement(bool bIsSigned)
+
+///
+/// Check whether more RBSP data left in current NAL
+///
+uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo)
+{
+    uint8_t cnt = 0;
+
+    uint8_t  is_emul =0;
+    uint8_t 	cur_byte = 0;
+    int32_t  shift_bits =0;
+    uint32_t ctr_bit = 0;
+    uint32_t bits_offset =0, byte_offset =0;
+
+    //remove warning
+    pInfo = pInfo;
+
+    if (!viddec_pm_is_nomoredata(parent))
+        return 1;
+
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+    shift_bits = 7-bits_offset;
+
+    // read one byte
+    viddec_pm_get_cur_byte(parent, &cur_byte);
+
+    ctr_bit = ((cur_byte)>> (shift_bits--)) & 0x01;
+
+    // a stop bit has to be one
+    if (ctr_bit==0)
+        return 1;
+
+    while (shift_bits>=0 && !cnt)
+    {
+        cnt |= (((cur_byte)>> (shift_bits--)) & 0x01);   // set up control bit
+    }
+
+    return (cnt);
+}
+
+
+
+///////////// EOF/////////////////////
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_dpb.c b/mixvbp/vbp_plugin/h264/h264parse_dpb.c
new file mode 100755
index 0000000..13adb1b
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_dpb.c
@@ -0,0 +1,4222 @@
+/*!
+ ***********************************************************************
+ *  \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+
+//#include <limits.h>
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+//#include "h264_debug.h"
+
+#ifndef NULL
+#define NULL 0
+#endif
+//#ifndef USER_MODE
+//#define NULL 0
+//#endif
+
+///////////////////////// DPB init //////////////////////////////////////////
+//////////////////////////////////////////////////////////////////////////////
+// Init DPB
+// Description: init dpb, which should be called while open
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb)
+{
+    int32_t i;
+
+    //// Init DPB to zero
+    //h264_memset(p_dpb, 0x0, sizeof(h264_DecodedPictureBuffer) );
+
+
+    for (i=0; i<NUM_DPB_FRAME_STORES; i++)
+    {
+        p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+        p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+    }
+    p_dpb->used_size = 0;
+    p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+    p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+    return;
+}
+
+
+///////////////////////// Reference list management //////////////////////////
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    p_dpb->fs_ref_idc[p_dpb->ref_frames_in_buffer] = ref_idc;
+    p_dpb->ref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_add_ltref_list ()
+//
+// Adds an idc to the long term reference list
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_add_ltref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    p_dpb->fs_ltref_idc[p_dpb->ltref_frames_in_buffer] = ref_idc;
+    p_dpb->ltref_frames_in_buffer++;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_all_ref_lists (h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting)
+//
+// Decide whether the current picture needs to be added to the reference lists
+// active_fs should be set-up prior to calling this function
+//
+// Check if we need to search the lists here
+// or can we go straight to adding to ref lists..
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb, int32_t NonExisting)
+{
+    if (NonExisting)
+        h264_dpb_set_active_fs(p_dpb,p_dpb->fs_non_exist_idc);
+    else
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+    //if(active_fs->is_reference)
+    if (p_dpb->active_fs->frame.used_for_reference)
+    {
+        if (viddec_h264_get_is_long_term(p_dpb->active_fs))
+        {
+            if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME)
+                h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            else
+            {
+                uint32_t found_in_list = 0, i = 0;
+                for (i = 0; (i < p_dpb->ltref_frames_in_buffer) && (found_in_list == 0); i++) {
+                    if (p_dpb->fs_ltref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1;
+                }
+
+                if (found_in_list == 0) h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            }
+        }
+        else
+        {
+            if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME) {
+                h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            } else
+            {
+                uint32_t found_in_list = 0, i = 0;
+
+                for (i = 0; (i < p_dpb->ref_frames_in_buffer) && (found_in_list == 0); i++)
+                {
+                    if (p_dpb->fs_ref_idc[i] == p_dpb->active_fs->fs_idc) found_in_list = 1;
+                }
+
+                if (found_in_list == 0) h264_dpb_add_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+            }
+        }
+    }
+
+    return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Set active fs
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb, int32_t index)
+{
+    p_dpb->active_fs = &p_dpb->fs[index];
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Sort reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_list_sort(uint8_t *list, int32_t *sort_indices, int32_t size, int32_t desc)
+{
+    int32_t j, k, temp, idc;
+
+    // Dodgy looking for embedded code here...
+    if (size > 1)
+    {
+        for (j = 0; j < size-1; j = j + 1) {
+            for (k = j + 1; k < size; k = k + 1) {
+                if ((desc & (sort_indices[j] < sort_indices[k]))|
+                        (~desc & (sort_indices[j] > sort_indices[k])) )
+                {
+                    temp = sort_indices[k];
+                    sort_indices[k] = sort_indices[j];
+                    sort_indices[j] = temp;
+                    idc = list[k];
+                    list[k] = list[j];
+                    list[j] = idc;
+                }
+            }
+        }
+    }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_bottom_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_bottom_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term)
+{
+    int32_t temp;
+    if (long_term) temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && (p_dpb->active_fs->bottom_field.is_long_term))  ? 1 : 0;
+    else          temp = ((p_dpb->active_fs->bottom_field.used_for_reference) && !(p_dpb->active_fs->bottom_field.is_long_term)) ? 1 : 0;
+
+    return temp;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_pic_is_top_field_ref ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_pic_is_top_field_ref(h264_DecodedPictureBuffer * p_dpb, int32_t long_term)
+{
+    int32_t temp;
+    if (long_term)
+        temp = ((p_dpb->active_fs->top_field.used_for_reference) && (p_dpb->active_fs->top_field.is_long_term))  ? 1 : 0;
+    else
+        temp = ((p_dpb->active_fs->top_field.used_for_reference) && !(p_dpb->active_fs->top_field.is_long_term)) ? 1 : 0;
+
+    return temp;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gen_pic_list_from_frame_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_gen_pic_list_from_frame_list(h264_DecodedPictureBuffer *p_dpb, uint8_t *pic_list, uint8_t *frame_list, int32_t currPicStructure, int32_t list_size, int32_t long_term)
+{
+    int32_t top_idx, bot_idx, got_pic, list_idx;
+    int32_t lterm;
+
+    list_idx = 0;
+    lterm = (long_term)? 1:0;
+
+    if (list_size) {
+
+
+        top_idx = 0;
+        bot_idx = 0;
+
+        if (currPicStructure == TOP_FIELD) {
+            while ((top_idx < list_size)||(bot_idx < list_size))
+            {
+                /////////////////////////////////////////// ref Top Field
+                got_pic = 0;
+                while ((top_idx < list_size) & ~got_pic)
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1)
+                    {
+                        if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term))
+                        {
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0);  // top_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    top_idx++;
+                }
+
+                /////////////////////////////////////////// ref Bottom Field
+                got_pic = 0;
+                while ((bot_idx < list_size) & ~got_pic)
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2)
+                    {
+                        if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term))
+                        {
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1);  // bottom_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    bot_idx++;
+                }
+            }
+        }
+
+        /////////////////////////////////////////////// current Bottom Field
+        if (currPicStructure == BOTTOM_FIELD)	{
+            while ((top_idx < list_size)||(bot_idx < list_size))
+            {
+                /////////////////////////////////////////// ref Top Field
+                got_pic = 0;
+                while ((bot_idx < list_size) && (!(got_pic)))
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[bot_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x2) {
+                        if (h264_dpb_pic_is_bottom_field_ref(p_dpb, long_term)) {
+                            // short term ref pic
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[bot_idx] + PUT_LIST_INDEX_FIELD_BIT(1);  // bottom_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    bot_idx++;
+                }
+
+                /////////////////////////////////////////// ref Bottom Field
+                got_pic = 0;
+                while ((top_idx < list_size) && (!(got_pic)))
+                {
+                    h264_dpb_set_active_fs(p_dpb, frame_list[top_idx]);
+                    if ((viddec_h264_get_is_used(p_dpb->active_fs))&0x1) {
+                        if (h264_dpb_pic_is_top_field_ref(p_dpb, long_term)) {
+                            // short term ref pic
+                            pic_list[list_idx] = PUT_LIST_LONG_TERM_BITS(lterm) + frame_list[top_idx] + PUT_LIST_INDEX_FIELD_BIT(0);  // top_field
+                            list_idx++;
+                            got_pic = 1;
+                        }
+                    }
+                    top_idx++;
+                }
+            }
+        }
+    }
+
+    return list_idx;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ref_list ()
+//
+// Removes an idc from the refernce list and updates list after
+//
+
+void h264_dpb_remove_ref_list(h264_DecodedPictureBuffer * p_dpb, int32_t ref_idc)
+{
+    uint8_t idx   = 0;
+    int32_t Found = 0;
+
+    while ((idx < p_dpb->ref_frames_in_buffer) && (!(Found)))
+    {
+        if (p_dpb->fs_ref_idc[idx] == ref_idc)
+            Found = 1;
+        else
+            idx++;
+    }
+
+    if (Found)
+    {
+        // Move the remainder of the list up one
+        while (idx < p_dpb->ref_frames_in_buffer - 1) {
+            p_dpb->fs_ref_idc[idx] = p_dpb->fs_ref_idc[idx + 1];
+            idx ++;
+        }
+
+        p_dpb->fs_ref_idc[idx] = MPD_DPB_FS_NULL_IDC; // Clear the last one
+        p_dpb->ref_frames_in_buffer--;
+    }
+
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_ltref_list ()
+//
+// Removes an idc from the long term reference list and updates list after
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_ltref_list(h264_DecodedPictureBuffer * p_dpb,int32_t ref_idc)
+{
+    uint8_t idx   = 0;
+    int32_t Found = 0;
+
+    while ((idx < p_dpb->ltref_frames_in_buffer) && (!(Found)))
+    {
+        if (p_dpb->fs_ltref_idc[idx] == ref_idc) Found = 1;
+        else idx++;
+    }
+
+    if (Found)
+    {
+        // Move the remainder of the list up one
+        while (idx <(uint8_t)(p_dpb->ltref_frames_in_buffer - 1))
+        {
+            p_dpb->fs_ltref_idc[idx] = p_dpb->fs_ltref_idc[idx + 1];
+            idx ++;
+        }
+        p_dpb->fs_ltref_idc[idx] = MPD_DPB_FS_NULL_IDC;		// Clear the last one
+
+        p_dpb->ltref_frames_in_buffer--;
+    }
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_lists ()
+//
+// Used to initialise the reference lists
+// Also assigns picture numbers and long term picture numbers if P OR B slice
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_update_ref_lists(h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer * p_dpb = &pInfo->dpb;
+
+    int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+    uint8_t list0idx, list0idx_1, listltidx;
+    uint8_t idx;
+
+    uint8_t add_top, add_bottom, diff;
+    uint8_t list_idc;
+    uint8_t check_non_existing, skip_picture;
+
+
+    uint8_t gen_pic_fs_list0[16];
+    uint8_t gen_pic_fs_list1[16];
+    uint8_t gen_pic_fs_listlt[16];
+    uint8_t gen_pic_pic_list[32];  // check out these sizes...
+
+    uint8_t sort_fs_idc[16];
+    int32_t list_sort_number[16];
+
+#ifdef DUMP_HEADER_INFO
+    static int cc1 = 0;
+    //OS_INFO("-------------cc1= %d\n",cc1);    /////// DEBUG info
+    if (cc1 == 255)
+        idx = 0;
+#endif
+
+    list0idx = list0idx_1 = listltidx = 0;
+
+    if (pInfo->SliceHeader.structure == FRAME)
+    {
+        ////////////////////////////////////////////////// short term handling
+        for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+            if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3)&&(p_dpb->active_fs->frame.used_for_reference == 3))
+            {
+                if (p_dpb->active_fs->frame_num > pInfo->img.frame_num)
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum;
+                else
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num;
+
+                p_dpb->active_fs->frame.pic_num     = p_dpb->active_fs->frame_num_wrap;
+
+                // Use this opportunity to sort list for a p-frame
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+                {
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame.pic_num;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++)
+                p_dpb->listX_0[idx] = (sort_fs_idc[idx]);  // frame
+
+            p_dpb->listXsize[0] = list0idx;
+        }
+
+        ////////////////////////////////////////////////// long term handling
+        for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+            if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3) && (p_dpb->active_fs->frame.used_for_reference == 3))
+            {
+                p_dpb->active_fs->frame.long_term_pic_num = p_dpb->active_fs->frame.long_term_frame_idx;
+
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+                {
+                    sort_fs_idc[list0idx-p_dpb->listXsize[0]]       = p_dpb->fs_ltref_idc[idx];
+                    list_sort_number[list0idx-p_dpb->listXsize[0]]  = p_dpb->active_fs->frame.long_term_pic_num;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-p_dpb->listXsize[0], 0);
+            for (idx = p_dpb->listXsize[0]; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+            }
+            p_dpb->listXsize[0] = list0idx;
+        }
+    }
+    else   /// Field base
+    {
+        if (pInfo->SliceHeader.structure == TOP_FIELD)
+        {
+            add_top    = 1;
+            add_bottom = 0;
+        }
+        else
+        {
+            add_top    = 0;
+            add_bottom = 1;
+        }
+
+        ////////////////////////////////////////////P0: Short term handling
+        for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+            if (p_dpb->active_fs->frame.used_for_reference)
+            {
+                if (p_dpb->active_fs->frame_num > pInfo->SliceHeader.frame_num) {
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num - MaxFrameNum;
+                } else {
+                    p_dpb->active_fs->frame_num_wrap = p_dpb->active_fs->frame_num;
+                }
+
+                if ((p_dpb->active_fs->frame.used_for_reference)&0x1) {
+                    p_dpb->active_fs->top_field.pic_num    = (p_dpb->active_fs->frame_num_wrap << 1) + add_top;
+                }
+
+                if ((p_dpb->active_fs->frame.used_for_reference)&0x2) {
+                    p_dpb->active_fs->bottom_field.pic_num = (p_dpb->active_fs->frame_num_wrap << 1) + add_bottom;
+                }
+
+                if (pInfo->SliceHeader.slice_type == h264_PtypeP) {
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame_num_wrap;
+                    list0idx++;
+                }
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++) {
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+            }
+
+            p_dpb->listXsize[0] = 0;
+            p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+            {
+                p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+            }
+        }
+
+        ////////////////////////////////////////////P0: long term handling
+        for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+            if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) {
+                p_dpb->active_fs->top_field.long_term_pic_num    = (p_dpb->active_fs->top_field.long_term_frame_idx << 1) + add_top;
+            }
+
+            if (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) {
+                p_dpb->active_fs->bottom_field.long_term_pic_num = (p_dpb->active_fs->bottom_field.long_term_frame_idx << 1) + add_bottom;
+            }
+
+            if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+            {
+                sort_fs_idc[listltidx]      = p_dpb->fs_ltref_idc[idx];
+                list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx;
+                listltidx++;
+            }
+        }
+
+        if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+        {
+            h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+            for (idx = 0; idx < listltidx; idx++) {
+                gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+            }
+            list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+            for (idx = 0; idx < list0idx_1; idx++) {
+                p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+            }
+            p_dpb->listXsize[0] += list0idx_1;
+        }
+    }
+
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeI)
+    {
+        p_dpb->listXsize[0] = 0;
+        p_dpb->listXsize[1] = 0;
+        return;
+    }
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeP)
+    {
+        //// Forward done above
+        p_dpb->listXsize[1] = 0;
+    }
+
+
+    // B-Slice
+    // Do not include non-existing frames for B-pictures when cnt_type is zero
+
+    if (pInfo->SliceHeader.slice_type == h264_PtypeB)
+    {
+        list0idx = list0idx_1 = listltidx = 0;
+        skip_picture = 0;
+
+        if (pInfo->active_SPS.pic_order_cnt_type == 0)
+            check_non_existing = 1;
+        else
+            check_non_existing = 0;
+
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+                if (viddec_h264_get_is_used(p_dpb->active_fs) == 3)
+                {
+                    if (check_non_existing)
+                    {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs)) skip_picture = 1;
+                        else                           skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)
+                    {
+                        if ((p_dpb->active_fs->frame.used_for_reference==3) && (!(p_dpb->active_fs->frame.is_long_term)))
+                        {
+                            if (pInfo->img.framepoc >= p_dpb->active_fs->frame.poc)
+                            {
+                                sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                                list_sort_number[list0idx] = p_dpb->active_fs->frame.poc;
+                                list0idx++;
+                            }
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = sort_fs_idc[idx];
+            }
+
+            list0idx_1 = list0idx;
+
+            /////////////////////////////////////////B0:  Short term handling
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+                if (viddec_h264_get_is_used(p_dpb->active_fs) == 3)
+                {
+                    if (check_non_existing)
+                    {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))	skip_picture = 1;
+                        else							skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)
+                    {
+                        if ((p_dpb->active_fs->frame.used_for_reference) && (!(p_dpb->active_fs->frame.is_long_term)))
+                        {
+                            if (pInfo->img.framepoc < p_dpb->active_fs->frame.poc)
+                            {
+                                sort_fs_idc[list0idx-list0idx_1]      = p_dpb->fs_ref_idc[idx];
+                                list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc;
+                                list0idx++;
+                            }
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+            for (idx = list0idx_1; idx < list0idx; idx++) {
+                p_dpb->listX_0[idx] = sort_fs_idc[idx-list0idx_1];
+            }
+
+            for (idx = 0; idx < list0idx_1; idx++) {
+                p_dpb->listX_1[list0idx-list0idx_1+idx] = p_dpb->listX_0[idx];
+            }
+
+            for (idx = list0idx_1; idx < list0idx; idx++) {
+                p_dpb->listX_1[idx-list0idx_1] = p_dpb->listX_0[idx];
+            }
+
+            p_dpb->listXsize[0] = list0idx;
+            p_dpb->listXsize[1] = list0idx;
+
+            /////////////////////////////////////////B0:  long term handling
+            list0idx = 0;
+
+            // Can non-existent pics be set as long term??
+            for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+                if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3))
+                {
+                    // if we have two fields, both must be long-term
+                    sort_fs_idc[list0idx]      = p_dpb->fs_ltref_idc[idx];
+                    list_sort_number[list0idx] = p_dpb->active_fs->frame.long_term_pic_num;
+                    list0idx++;
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 0);
+            for (idx = p_dpb->listXsize[0]; idx < (p_dpb->listXsize[0]+list0idx); idx = idx + 1)
+            {
+                p_dpb->listX_0[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+                p_dpb->listX_1[idx] = (1<<6) + sort_fs_idc[idx-p_dpb->listXsize[0]];
+            }
+
+            p_dpb->listXsize[0] += list0idx;
+            p_dpb->listXsize[1] += list0idx;
+        }
+        else  // Field
+        {
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+                if (viddec_h264_get_is_used(p_dpb->active_fs))	{
+                    if (check_non_existing) {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))
+                            skip_picture = 1;
+                        else
+                            skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0)  {
+                        if (pInfo->img.ThisPOC >= p_dpb->active_fs->frame.poc) {
+                            sort_fs_idc[list0idx]      = p_dpb->fs_ref_idc[idx];
+                            list_sort_number[list0idx] = p_dpb->active_fs->frame.poc;
+                            list0idx++;
+                        }
+                    }
+                }
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx, 1);
+            for (idx = 0; idx < list0idx; idx = idx + 1) {
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx];
+            }
+
+            list0idx_1 = list0idx;
+
+            ///////////////////////////////////////////// B1: Short term handling
+            for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+                if (viddec_h264_get_is_used(p_dpb->active_fs))
+                {
+                    if (check_non_existing) {
+                        if (viddec_h264_get_is_non_existent(p_dpb->active_fs))
+                            skip_picture = 1;
+                        else
+                            skip_picture = 0;
+                    }
+
+                    if (skip_picture == 0) {
+                        if (pInfo->img.ThisPOC < p_dpb->active_fs->frame.poc) {
+                            sort_fs_idc[list0idx-list0idx_1]      = p_dpb->fs_ref_idc[idx];
+                            list_sort_number[list0idx-list0idx_1] = p_dpb->active_fs->frame.poc;
+                            list0idx++;
+                        }
+                    }
+                }
+            }
+
+            ///// Generate frame list from sorted fs
+            /////
+            h264_list_sort(sort_fs_idc, list_sort_number, list0idx-list0idx_1, 0);
+            for (idx = list0idx_1; idx < list0idx; idx++)
+                gen_pic_fs_list0[idx] = sort_fs_idc[idx-list0idx_1];
+
+            for (idx = 0; idx < list0idx_1; idx++)
+                gen_pic_fs_list1[list0idx-list0idx_1+idx] = gen_pic_fs_list0[idx];
+
+            for (idx = list0idx_1; idx < list0idx; idx++)
+                gen_pic_fs_list1[idx-list0idx_1] = gen_pic_fs_list0[idx];
+
+            ///// Generate List_X0
+            /////
+            p_dpb->listXsize[0] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list0, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx++)
+                p_dpb->listX_0[idx] = gen_pic_pic_list[idx];
+
+            //// Generate List X1
+            ////
+            p_dpb->listXsize[1] = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_list1, pInfo->img.structure, list0idx, 0);
+
+            for (idx = 0; idx < p_dpb->listXsize[1]; idx++)
+                p_dpb->listX_1[idx] = gen_pic_pic_list[idx];
+
+            ///////////////////////////////////////////// B1: long term handling
+            for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+                sort_fs_idc[listltidx]      = p_dpb->fs_ltref_idc[idx];
+                list_sort_number[listltidx] = p_dpb->active_fs->long_term_frame_idx;
+                listltidx++;
+            }
+
+            h264_list_sort(sort_fs_idc, list_sort_number, listltidx, 0);
+            for (idx = 0; idx < listltidx; idx++)
+                gen_pic_fs_listlt[idx] = sort_fs_idc[idx];
+
+            list0idx_1 = h264_dpb_gen_pic_list_from_frame_list(p_dpb, gen_pic_pic_list, gen_pic_fs_listlt, pInfo->img.structure, listltidx, 1);
+
+            for (idx = 0; idx < list0idx_1; idx++)
+            {
+                p_dpb->listX_0[p_dpb->listXsize[0]+idx] = gen_pic_pic_list[idx];
+                p_dpb->listX_1[p_dpb->listXsize[1]+idx] = gen_pic_pic_list[idx];
+            }
+
+            p_dpb->listXsize[0] += list0idx_1;
+            p_dpb->listXsize[1] += list0idx_1;
+        }
+    }
+
+    // Setup initial list sizes at this point
+    p_dpb->nInitListSize[0] = p_dpb->listXsize[0];
+    p_dpb->nInitListSize[1] = p_dpb->listXsize[1];
+    if (pInfo->SliceHeader.slice_type != h264_PtypeI)
+    {
+        if ((p_dpb->listXsize[0]==p_dpb->listXsize[1]) && (p_dpb->listXsize[0] > 1))
+        {
+            // check if lists are identical, if yes swap first two elements of listX[1]
+            diff = 0;
+            for (idx = 0; idx < p_dpb->listXsize[0]; idx = idx + 1)
+            {
+                if (p_dpb->listX_0[idx] != p_dpb->listX_1[idx]) diff = 1;
+            }
+
+
+            if (!(diff))
+            {
+                list_idc       = p_dpb->listX_1[0];
+                p_dpb->listX_1[0] = p_dpb->listX_1[1];
+                p_dpb->listX_1[1] = list_idc;
+            }
+        }
+
+        // set max size
+        if (p_dpb->listXsize[0] > pInfo->SliceHeader.num_ref_idx_l0_active)
+        {
+            p_dpb->listXsize[0] = pInfo->SliceHeader.num_ref_idx_l0_active;
+        }
+
+
+        if (p_dpb->listXsize[1] > pInfo->SliceHeader.num_ref_idx_l1_active)
+        {
+            p_dpb->listXsize[1] = pInfo->SliceHeader.num_ref_idx_l1_active;
+        }
+
+
+
+    }
+
+
+
+    /// DPB reorder list
+    h264_dpb_reorder_lists(pInfo);
+
+    return;
+}   //// End of init_dpb_list
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_short_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+// Sets field_flag, bottom_field and err_flag based on the picture and whether
+// it is available or not...
+//
+static frame_param_ptr h264_dpb_get_short_term_pic(h264_Info * pInfo,int32_t pic_num, int32_t *bottom_field_bit)
+{
+    register uint32_t idx;
+    register frame_param_ptr temp_fs;
+
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    *bottom_field_bit = 0;
+    for (idx = 0; idx < p_dpb->ref_frames_in_buffer; idx++)
+    {
+        temp_fs = &p_dpb->fs[p_dpb->fs_ref_idc[idx]];
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            if (temp_fs->frame.used_for_reference == 3)
+                if (!(temp_fs->frame.is_long_term))
+                    if (temp_fs->frame.pic_num == pic_num) return temp_fs;
+        }
+        else // current picture is a field
+        {
+            if (temp_fs->frame.used_for_reference&0x1)
+                if (!(temp_fs->top_field.is_long_term))
+                    if (temp_fs->top_field.pic_num == pic_num)
+                    {
+                        return temp_fs;
+                    }
+
+            if (temp_fs->frame.used_for_reference&0x2)
+                if (!(temp_fs->bottom_field.is_long_term))
+                    if (temp_fs->bottom_field.pic_num == pic_num)
+                    {
+                        *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+                        return temp_fs;
+                    }
+        }
+    }
+    return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_long_term_pic ()
+//
+// Sets active_fs to point to frame store containing picture with given picNum
+//
+
+static frame_param_ptr h264_dpb_get_long_term_pic(h264_Info * pInfo,int32_t long_term_pic_num, int32_t *bottom_field_bit)
+{
+    register uint32_t idx;
+    register frame_param_ptr temp_fs;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    *bottom_field_bit = 0;
+    for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+    {
+        temp_fs = &p_dpb->fs[p_dpb->fs_ltref_idc[idx]];
+        if (pInfo->SliceHeader.structure == FRAME)
+        {
+            if (temp_fs->frame.used_for_reference == 3)
+                if (temp_fs->frame.is_long_term)
+                    if (temp_fs->frame.long_term_pic_num == long_term_pic_num)
+                        return temp_fs;
+        }
+        else
+        {
+            if (temp_fs->frame.used_for_reference&0x1)
+                if (temp_fs->top_field.is_long_term)
+                    if (temp_fs->top_field.long_term_pic_num == long_term_pic_num)
+                        return temp_fs;
+
+            if (temp_fs->frame.used_for_reference&0x2)
+                if (temp_fs->bottom_field.is_long_term)
+                    if (temp_fs->bottom_field.long_term_pic_num == long_term_pic_num)
+                    {
+                        *bottom_field_bit = PUT_LIST_INDEX_FIELD_BIT(1);
+                        return temp_fs;
+                    }
+        }
+    }
+    return NULL;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_ref_pic_list ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+struct list_value_t
+{
+    int32_t value;
+    struct list_value_t *next;
+};
+
+struct linked_list_t
+{
+    struct list_value_t *begin;
+    struct list_value_t *end;
+    struct list_value_t *entry;
+    struct list_value_t *prev_entry;
+    struct list_value_t list[32];
+};
+
+static void linked_list_initialize (struct linked_list_t *lp, uint8_t *vp, int32_t size)
+{
+    struct list_value_t *lvp;
+
+    lvp            = lp->list;
+    lp->begin      = lvp;
+    lp->entry      = lvp;
+    lp->end        = lvp + (size-1);
+    lp->prev_entry = NULL;
+
+    while (lvp <= lp->end)
+    {
+        lvp->value = *(vp++);
+        lvp->next  = lvp + 1;
+        lvp++;
+    }
+    lp->end->next = NULL;
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_reorder (struct linked_list_t *lp, int32_t list_value)
+{
+    register struct list_value_t *lvp = lp->entry;
+    register struct list_value_t *lvp_prev;
+
+    if (lvp == NULL) {
+        lp->end->value = list_value;  // replace the end entry
+    } else if ((lp->begin==lp->end)||(lvp==lp->end))  // replece the begin/end entry and set the entry to NULL
+    {
+        lp->entry->value = list_value;
+        lp->prev_entry   = lp->entry;
+        lp->entry        = NULL;
+    }
+    else if (lvp->value==list_value)  // the entry point matches
+    {
+        lp->prev_entry = lvp;
+        lp->entry      = lvp->next;
+    }
+    else if (lvp->next == lp->end) // the entry is just before the end
+    {
+        // replace the end and swap the end and entry points
+        //                  lvp
+        //  prev_entry  => entry                    => old_end
+        //                 old_end & new_prev_entry => new_end & entry
+        lp->end->value = list_value;
+
+        if (lp->prev_entry)
+            lp->prev_entry->next = lp->end;
+        else
+            lp->begin            = lp->end;
+
+        lp->prev_entry = lp->end;
+        lp->end->next  = lvp;
+        lp->end        = lvp;
+        lvp->next      = NULL;
+    }
+    else
+    {
+        lvp_prev = NULL;
+        while (lvp->next) // do not check the end but we'll be in the loop at least once
+        {
+            if (lvp->value == list_value) break;
+            lvp_prev = lvp;
+            lvp = lvp->next;
+        }
+        lvp->value = list_value;   // force end matches
+        if (lvp_prev != NULL)
+        {
+            // remove lvp from the list
+            lvp_prev->next = lvp->next;
+        }
+        if (lvp==lp->end) lp->end = lvp_prev;
+
+        // insert lvp in front of lp->entry
+        if (lp->entry==lp->begin)
+        {
+            lvp->next = lp->begin;
+            lp->begin = lvp;
+        }
+        else
+        {
+            lvp->next = lp->entry;
+            lp->prev_entry->next = lvp;
+        }
+        lp->prev_entry = lvp;
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+static void linked_list_output (struct linked_list_t *lp, int32_t *vp)
+{
+    register int32_t *ip1;
+    register struct list_value_t *lvp;
+
+    lvp  = lp->begin;
+    ip1  = vp;
+    while (lvp)
+    {
+        *(ip1++) = lvp->value;
+        lvp = lvp->next;
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+int32_t h264_dpb_reorder_ref_pic_list(h264_Info * pInfo,int32_t list_num, int32_t num_ref_idx_active)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint8_t                   *remapping_of_pic_nums_idc;
+    list_reordering_num_t		*list_reordering_num;
+    int32_t                    bottom_field_bit;
+
+    int32_t  maxPicNum, currPicNum, picNumLXNoWrap, picNumLXPred, pic_num;
+    int32_t  refIdxLX;
+    int32_t  i;
+
+    int32_t    PicList[32] = {0};
+    struct linked_list_t ll;
+    struct linked_list_t *lp = &ll;     // should consider use the scratch space
+
+    // declare these below as registers gave me 23 cy/MB for the worst frames in Allegro_Combined_CABAC_07_HD, YHu
+    register frame_param_ptr temp_fs;
+    register int32_t temp;
+    register uint8_t  *ip1;
+
+    maxPicNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+
+    if (list_num == 0) // i.e list 0
+    {
+        ip1 = p_dpb->listX_0;
+        remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l0.reordering_of_pic_nums_idc;
+        list_reordering_num       = pInfo->SliceHeader.sh_refpic_l0.list_reordering_num;
+    }
+    else
+    {
+        ip1 = p_dpb->listX_1;
+        remapping_of_pic_nums_idc = pInfo->SliceHeader.sh_refpic_l1.reordering_of_pic_nums_idc;
+        list_reordering_num       = pInfo->SliceHeader.sh_refpic_l1.list_reordering_num;
+    }
+
+
+    linked_list_initialize (lp, ip1, num_ref_idx_active);
+
+    currPicNum = pInfo->SliceHeader.frame_num;
+    if (pInfo->SliceHeader.structure != FRAME)
+    {
+
+        /* The reason it is + 1 I think, is because the list is based on polarity
+           expand later...
+        */
+        maxPicNum  <<= 1;
+        currPicNum <<= 1;
+        currPicNum++;
+    }
+
+    picNumLXPred = currPicNum;
+    refIdxLX = 0;
+
+    for (i = 0; remapping_of_pic_nums_idc[i] != 3; i++)
+    {
+        if (i > MAX_NUM_REF_FRAMES)
+        {
+            break;
+        }
+
+        if (remapping_of_pic_nums_idc[i] < 2) // - short-term re-ordering
+        {
+            temp = (list_reordering_num[i].abs_diff_pic_num_minus1 + 1);
+            if (remapping_of_pic_nums_idc[i] == 0)
+            {
+                temp = picNumLXPred - temp;
+                if (temp < 0 ) picNumLXNoWrap = temp + maxPicNum;
+                else           picNumLXNoWrap = temp;
+            }
+            else // (remapping_of_pic_nums_idc[i] == 1)
+            {
+                temp += picNumLXPred;
+                if (temp  >=  maxPicNum) picNumLXNoWrap = temp - maxPicNum;
+                else                     picNumLXNoWrap = temp;
+            }
+
+            // Updates for next iteration of the loop
+            picNumLXPred = picNumLXNoWrap;
+
+            if (picNumLXNoWrap > currPicNum ) pic_num = picNumLXNoWrap - maxPicNum;
+            else                              pic_num = picNumLXNoWrap;
+
+            temp_fs = h264_dpb_get_short_term_pic(pInfo, pic_num, &bottom_field_bit);
+            if (temp_fs)
+            {
+                temp = bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+                linked_list_reorder (lp, temp);
+            }
+        }
+        else //(remapping_of_pic_nums_idc[i] == 2) long-term re-ordering
+        {
+            pic_num = list_reordering_num[i].long_term_pic_num;
+
+            temp_fs = h264_dpb_get_long_term_pic(pInfo, pic_num, &bottom_field_bit);
+            if (temp_fs)
+            {
+                temp = PUT_LIST_LONG_TERM_BITS(1) + bottom_field_bit + PUT_FS_IDC_BITS(temp_fs->fs_idc);
+                linked_list_reorder (lp, temp);
+            }
+        }
+    }
+
+    linked_list_output (lp, PicList);
+
+    if (0 == list_num )
+    {
+        for (i=0; i<num_ref_idx_active; i++)
+        {
+            pInfo->slice_ref_list0[i]=(uint8_t)PicList[i];
+        }
+    }
+    else
+    {
+        for (i=0; i<num_ref_idx_active; i++)
+        {
+            pInfo->slice_ref_list1[i]=(uint8_t)PicList[i];
+        }
+    }
+
+
+    // Instead of updating the now reordered list here, just write it down...
+    // This way, we can continue to hold the initialised list in p_dpb->listX_0
+    // and therefore not need to update it every slice
+
+    //h264_dpb_write_list(list_num, PicList, num_ref_idx_active);
+
+    return num_ref_idx_active;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+void h264_dpb_RP_check_list (h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint8_t    *p_list = pInfo->slice_ref_list0;
+
+    //
+    // If the decoding start from RP and without exact point, all B frames belong to previous GOP should be throw away!
+    //
+
+    if ((pInfo->SliceHeader.slice_type == h264_PtypeB)&&(pInfo->sei_b_state_ready ==0) && pInfo->sei_rp_received) {
+        pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+        pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+    }
+
+
+    //
+    // Repare Ref list if it damaged with RP recovery only
+    //
+    if ((pInfo->SliceHeader.slice_type == h264_PtypeP) && pInfo->sei_rp_received)
+    {
+
+        int32_t idx, rp_found = 0;
+
+        if ( ((pInfo->SliceHeader.num_ref_idx_l0_active == 1)&&(pInfo->SliceHeader.structure == FRAME)) ||
+                ((pInfo->SliceHeader.num_ref_idx_l0_active == 2)&&(pInfo->SliceHeader.structure != FRAME)) )
+        {
+            if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = pInfo->slice_ref_list0;
+            }
+            else
+            {
+                p_list = pInfo->dpb.listX_0;
+                //pInfo->sei_rp_received = 0;
+                //return;
+            }
+
+
+            for (idx = 0; idx < p_dpb->used_size; idx++) {
+                if (p_dpb->fs_dpb_idc[idx] == pInfo->last_I_frame_idc) {
+                    rp_found = 1;
+                    break;
+                }
+            }
+            if (rp_found) {
+#if 0
+                int32_t poc;
+
+                ///// Clear long-term ref list
+                for (idx = 0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+                {
+                    h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+                    h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+                }
+
+                ///// Clear short-term ref list
+                //while(p_dpb->used_size>1)
+                for (idx = 0; idx < p_dpb->used_size; idx++)
+                {
+                    int32_t idx_pos;
+                    //// find smallest non-output POC
+                    h264_dpb_get_smallest_poc(p_dpb, &poc, &idx_pos);
+
+                    //// Remove all frames in previous GOP
+                    if ((idx_pos != MPD_DPB_FS_NULL_IDC) && (p_dpb->fs_dpb_idc[idx_pos] != pInfo->last_I_frame_idc))
+                    {
+                        // Remove from ref-list
+                        h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+                        h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx_pos]);
+
+                        // Output from DPB
+                        //h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                        //if((active_fs->is_output == 0) && (active_fs->is_non_existent == 0))
+                        {
+                            //int32_t existing;
+                            //h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[idx], 0, &existing);
+                            //p_dpb->last_output_poc = poc;
+                        }
+                        //h264_dpb_remove_frame_from_dpb(p_dpb, idx);		// Remove dpb.fs_dpb_idc[pos]
+
+                    }
+                }
+#endif
+
+                ///// Set the reference to last I frame
+                if ( (pInfo->last_I_frame_idc!=255)&&(pInfo->last_I_frame_idc!=p_list[0]))
+                {
+                    /// Repaire the reference list now
+                    h264_dpb_unmark_for_reference(p_dpb, p_list[0]);
+                    h264_dpb_remove_ref_list(p_dpb, p_list[0]);
+                    p_list[0] = pInfo->last_I_frame_idc;
+                    if (pInfo->SliceHeader.structure != FRAME)
+                        p_list[1] = (pInfo->last_I_frame_idc ^ 0x20);
+                }
+            }
+        }
+
+        pInfo->sei_rp_received = 0;
+        pInfo->sei_b_state_ready = 1;
+
+    }
+
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reorder_lists ()
+//
+// Used to sort a list based on a corresponding sort indices
+//
+
+void h264_dpb_reorder_lists(h264_Info * pInfo)
+{
+    int32_t currSliceType = pInfo->SliceHeader.slice_type;
+
+    if (currSliceType == h264_PtypeP )
+    {
+        /////////////////////////////////////////////// Reordering reference list for P slice
+        /// Forward reordering
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+    } else if (currSliceType == h264_PtypeB)
+    {
+        /////////////////////////////////////////////// Reordering reference list for B slice
+        /// Forward reordering
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 0, pInfo->SliceHeader.num_ref_idx_l0_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[0]=pInfo->SliceHeader.num_ref_idx_l0_active;
+
+        /// Backward reordering
+        if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+            h264_dpb_reorder_ref_pic_list(pInfo, 1, pInfo->SliceHeader.num_ref_idx_l1_active);
+        else
+        {
+
+        }
+        pInfo->dpb.listXsize[1]=pInfo->SliceHeader.num_ref_idx_l1_active;
+    }
+
+    //// Check if need recover reference list with previous recovery point
+    if (!pInfo->img.second_field)
+    {
+        h264_dpb_RP_check_list(pInfo);
+    }
+
+
+    return;
+}
+
+////////////////////////////////////////// DPB management //////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_dpb_get_non_output_frame_number ()
+//
+// get total non output frame number in the DPB.
+//
+static int32_t avc_dpb_get_non_output_frame_number(h264_Info * pInfo)
+{
+    int32_t idx;
+    int32_t number=0;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    for (idx = 0; idx < p_dpb->used_size; idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+        if (viddec_h264_get_is_output(p_dpb->active_fs) == 0)
+        {
+            (number)++;
+        }
+    }
+
+    return number;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//// Store previous picture in DPB, and then update DPB queue, remove unused frames from DPB
+
+void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,int32_t NonExisting, int32_t use_old)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    int32_t used_for_reference;
+    int32_t is_direct_output;
+    int32_t second_field_stored = 0;
+    int32_t poc;
+    int32_t pos;
+    int32_t flag;
+    int32_t first_field_non_ref = 0;
+    int32_t idr_flag;
+
+    if (NonExisting) {
+        if (p_dpb->fs_non_exist_idc == MPD_DPB_FS_NULL_IDC)
+            return;
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+    } else {
+        if (p_dpb->fs_dec_idc == MPD_DPB_FS_NULL_IDC)
+            return;
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+    }
+
+    if (NonExisting == 0)
+    {
+        //active_fs->sps_disp_index = (next_sps_disp_entry == 0)? 7 : next_sps_disp_entry - 1;
+        pInfo->img.last_has_mmco_5       = 0;
+        pInfo->img.last_pic_bottom_field = pInfo->img.bottom_field_flag;
+
+        //used_for_reference = (use_old) ? !(old_pInfo->img.old_disposable_flag) : !(pInfo->img.disposable_flag);
+        used_for_reference = (use_old) ? !(pInfo->old_slice.nal_ref_idc==0) : !(pInfo->SliceHeader.nal_ref_idc==0);
+
+        switch (viddec_h264_get_dec_structure(p_dpb->active_fs))
+        {
+        case(TOP_FIELD)   : {
+            p_dpb->active_fs->top_field.used_for_reference = used_for_reference;
+            viddec_h264_set_is_top_used(p_dpb->active_fs, 1);
+            //p_dpb->active_fs->crc_field_coded     = 1;
+        }
+        break;
+        case(BOTTOM_FIELD): {
+            p_dpb->active_fs->bottom_field.used_for_reference = used_for_reference << 1;
+            viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1);
+            //p_dpb->active_fs->crc_field_coded     = 1;
+        }
+        break;
+        default: {
+            p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0;
+            viddec_h264_set_is_frame_used(p_dpb->active_fs, 3);
+            //if(pInfo->img.MbaffFrameFlag) p_dpb->active_fs->crc_field_coded  = 1;
+
+        }
+        break;
+        }
+
+        //freeze_assert = use_old ? old_pInfo->img.sei_freeze_this_image : pInfo->img.sei_freeze_this_image;
+        //if (freeze_assert)  sei_information.disp_frozen = 1;
+
+        idr_flag = use_old ? pInfo->old_slice.idr_flag : pInfo->SliceHeader.idr_flag;
+        if (idr_flag) {
+            h264_dpb_idr_memory_management (pInfo, &pInfo->active_SPS, pInfo->img.no_output_of_prior_pics_flag);
+        } else {
+            // adaptive memory management
+            if (used_for_reference & pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag) {
+                h264_dpb_adaptive_memory_management(pInfo);
+            }
+        }
+        // Reset the active frame store - could have changed in mem management ftns
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+        if ((viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)||(viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD))
+        {
+            // check for frame store with same pic_number -- always true in my case, YH
+            // when we allocate frame store for the second field, we make sure the frame store for the second
+            // field is the one that contains the first field of the frame- see h264_dpb_init_frame_store()
+            // This is different from JM model.
+            // In this way we don't need to move image data around and can reduce memory bandwidth.
+            // simply check if the check if the other field has been decoded or not
+
+            if (viddec_h264_get_is_used(p_dpb->active_fs) != 0)
+            {
+                if (pInfo->img.second_field)
+                {
+                    h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 0, NonExisting, use_old);
+                    second_field_stored = 1;
+                }
+            }
+        }
+    }
+    else
+    { // Set up locals for non-existing frames
+        used_for_reference = 1;
+
+        p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0;
+        viddec_h264_set_is_frame_used(p_dpb->active_fs, 3);
+        viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME);
+        pInfo->img.structure = FRAME;
+    }
+
+    is_direct_output = 0;
+    if (NonExisting == 0)
+    {
+        if (p_dpb->used_size >= p_dpb->BumpLevel)
+        {
+            // non-reference frames may be output directly
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+            if ((used_for_reference == 0) && (viddec_h264_get_is_used(p_dpb->active_fs) == 3))
+            {
+                h264_dpb_get_smallest_poc (p_dpb, &poc, &pos);
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+                if ((pos == MPD_DPB_FS_NULL_IDC) || (pInfo->img.ThisPOC < poc))
+                {
+                    is_direct_output = 1;
+                }
+            }
+        }
+    }
+
+    if (NonExisting) {
+        h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+    } else if (pInfo->SliceHeader.idr_flag == 0) {
+        if (used_for_reference) {
+            if (pInfo->img.second_field == 0) {
+                if (pInfo->SliceHeader.sh_dec_refpic.adaptive_ref_pic_marking_mode_flag == 0) {
+                    h264_dpb_sliding_window_memory_management(p_dpb, NonExisting, pInfo->active_SPS.num_ref_frames);
+                }
+            }
+        }
+    }
+
+    h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+    //if (is_direct_output == 0)
+    {
+        if ((pInfo->img.second_field == 0) || (NonExisting))
+        {
+            h264_dpb_insert_picture_in_dpb(pInfo, used_for_reference, 1, NonExisting, use_old);
+        }
+
+        // In an errored stream we saw a condition where
+        // p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer > p_dpb->BumpLevel,
+        // which in itself is an error, but this means first_field_non_ref will
+        // not get set and causes problems for h264_dpb_queue_update()
+        if ((pInfo->img.structure != FRAME) && (pInfo->img.second_field == 0)) {
+            if (used_for_reference ==	0)
+                if (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer == p_dpb->BumpLevel)
+                    first_field_non_ref = 1;
+        }
+
+    }
+
+    if (NonExisting)
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+    else
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+    if (NonExisting == 0)
+    {
+        if ((pInfo->img.second_field == 1) || (pInfo->img.structure == FRAME))
+        {
+            //h264_send_new_decoded_frame();
+            if ((p_dpb->OutputCtrl) && (is_direct_output == 0))
+                h264_dpb_output_one_frame_from_dpb(pInfo, 0, 0,pInfo->active_SPS.num_ref_frames);
+
+            // Pictures inserted by this point - check if we have reached the specified output
+            // level (if one has been specified) so we can begin on next call
+
+            /*
+            Fixed HSD 212625---------------should compare OutputLevel with non-output frame number in dpb, not the used number in dpb
+            if((p_dpb->OutputLevelValid)&&(p_dpb->OutputCtrl == 0))
+            {
+            	if(p_dpb->used_size == p_dpb->OutputLevel)
+            	p_dpb->OutputCtrl = 1;
+            }
+            */
+
+            if (p_dpb->OutputLevelValid)
+            {
+                int32_t non_output_frame_number=0;
+                non_output_frame_number = avc_dpb_get_non_output_frame_number(pInfo);
+
+                if (non_output_frame_number == p_dpb->OutputLevel)
+                    p_dpb->OutputCtrl = 1;
+                else
+                    p_dpb->OutputCtrl = 0;
+            }
+            else {
+                p_dpb->OutputCtrl = 0;
+            }
+        }
+    }
+
+    while (p_dpb->used_size > (p_dpb->BumpLevel + first_field_non_ref))
+        //while(p_dpb->used_size > p_dpb->BumpLevel)
+    {
+        h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+        //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+    }
+
+    //
+    // Do not output "direct output" pictures until the sempahore has been set that the pic is
+    // decoded!!
+    //
+    if (is_direct_output) {
+        h264_dpb_queue_update(pInfo, 1, 1, 0,pInfo->active_SPS.num_ref_frames);
+        //h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+    }
+
+    //
+    // Add reference pictures into Reference list
+    //
+    if (used_for_reference) {
+        h264_dpb_insert_ref_lists(&pInfo->dpb, NonExisting);
+    }
+
+    h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+
+    return;
+} ////////////// End of DPB store pic
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_insert_picture_in_dpb ()
+//
+// Insert the decoded picture into the DPB. A free DPB position is necessary
+// for frames, .
+// This ftn tends to fill out the framestore's top level parameters from the
+// storable picture's parameters within it. It is called from  h264_dpb_store_picture_in_dpb()
+//
+// This function finishes by updating the reference lists - this means it must be called after
+// h264_dpb_sliding_window_memory_management()
+//
+// In the case of a frame it will call h264_dpb_split_field()
+// In the case of the second field of a complementary field pair it calls h264_dpb_combine_field()
+//
+
+void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,int32_t used_for_reference, int32_t add2dpb, int32_t NonExisting, int32_t use_old)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    if (NonExisting == 0) {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+        p_dpb->active_fs->frame_num = (use_old) ? pInfo->old_slice.frame_num : pInfo->SliceHeader.frame_num;
+    }
+    else {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_non_exist_idc);
+        p_dpb->active_fs->frame_num = p_dpb->active_fs->frame.pic_num;
+    }
+
+    if (add2dpb) {
+        p_dpb->fs_dpb_idc[p_dpb->used_size] = p_dpb->active_fs->fs_idc;
+        p_dpb->used_size++;
+    }
+
+
+    switch (viddec_h264_get_dec_structure(p_dpb->active_fs))
+    {
+    case FRAME : {
+        viddec_h264_set_is_frame_used(p_dpb->active_fs, 3);
+        p_dpb->active_fs->frame.used_for_reference = used_for_reference?3:0;
+        if (used_for_reference)
+        {
+            p_dpb->active_fs->frame.used_for_reference = 3;
+            if (p_dpb->active_fs->frame.is_long_term)
+                viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3);
+        }
+        // Split frame to 2 fields for prediction
+        h264_dpb_split_field(p_dpb, pInfo);
+
+    }
+    break;
+    case TOP_FIELD : {
+        viddec_h264_set_is_top_used(p_dpb->active_fs, 1);
+
+        p_dpb->active_fs->top_field.used_for_reference = used_for_reference;
+        if (used_for_reference)
+        {
+            p_dpb->active_fs->frame.used_for_reference |= 0x1;
+            if (p_dpb->active_fs->top_field.is_long_term)
+            {
+                viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1);
+                p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->top_field.long_term_frame_idx;
+            }
+        }
+        if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) {
+            h264_dpb_combine_field(p_dpb, use_old); // generate frame view
+        }
+        else
+        {
+            p_dpb->active_fs->frame.poc      = p_dpb->active_fs->top_field.poc;
+        }
+
+    }
+    break;
+    case BOTTOM_FIELD : {
+        viddec_h264_set_is_bottom_used(p_dpb->active_fs, 1);
+
+        p_dpb->active_fs->bottom_field.used_for_reference = (used_for_reference<<1);
+        if (used_for_reference)
+        {
+            p_dpb->active_fs->frame.used_for_reference |= 0x2;
+            if (p_dpb->active_fs->bottom_field.is_long_term)
+            {
+                viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1);
+                p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->bottom_field.long_term_frame_idx;
+            }
+        }
+        if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) {
+            h264_dpb_combine_field(p_dpb, use_old); // generate frame view
+        }
+        else
+        {
+            p_dpb->active_fs->frame.poc = p_dpb->active_fs->bottom_field.poc;
+        }
+
+    }
+    break;
+    }
+    /*
+    	if ( gRestartMode.LastRestartType  == RESTART_SEI )
+    	{
+    		if ( p_dpb->active_fs->open_gop_entry ) dpb.WaitSeiRecovery = 1;
+    	}
+
+    	gRestartMode.LastRestartType = 0xFFFF;
+    */
+
+    return;
+} ////// End of insert picture in DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_short_term_for_reference ()
+//
+// Adaptive Memory Management: Mark short term picture unused
+//
+
+void h264_dpb_mm_unmark_short_term_for_reference(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1)
+{
+    int32_t picNumX;
+    int32_t currPicNum;
+    uint32_t idx;
+    int32_t unmark_done;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    if (pInfo->img.structure == FRAME)
+        currPicNum = pInfo->img.frame_num;
+    else
+        currPicNum = (pInfo->img.frame_num << 1) + 1;
+
+    picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+    unmark_done = 0;
+
+    for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(unmark_done)); idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+        if (pInfo->img.structure == FRAME)
+        {
+            /* If all pic numbers in the list are different (and they should be)
+               we should terminate the for loop the moment we match pic numbers,
+               no need to continue to check - hence set unmark_done
+            */
+
+            if ((p_dpb->active_fs->frame.used_for_reference == 3) && (viddec_h264_get_is_long_term(p_dpb->active_fs) == 0) &&
+                    (p_dpb->active_fs->frame.pic_num == picNumX))
+            {
+                h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc);
+                h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+                unmark_done = 1;
+            }
+        }
+        else
+        {
+            /*
+               If we wish to unmark a short-term picture by picture number when the current picture
+               is a field, we have to unmark the corresponding field as unused for reference,
+               and also if it was part of a frame or complementary reference field pair, the
+               frame is to be marked as unused. However the opposite field may still be used as a
+               reference for future fields
+
+               How will this affect the reference list update ftn coming after??
+
+            */
+            if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&&
+                    (p_dpb->active_fs->top_field.pic_num == picNumX) )
+            {
+                p_dpb->active_fs->top_field.used_for_reference = 0;
+                p_dpb->active_fs->frame.used_for_reference &= 2;
+
+                unmark_done = 1;
+
+                //Check if other field is used for short-term reference, if not remove from list...
+                if (p_dpb->active_fs->bottom_field.used_for_reference == 0)
+                    h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+            }
+            if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) &&
+                    (p_dpb->active_fs->bottom_field.pic_num == picNumX) )
+            {
+                p_dpb->active_fs->bottom_field.used_for_reference = 0;
+                p_dpb->active_fs->frame.used_for_reference &= 1;
+
+                unmark_done = 1;
+
+                //Check if other field is used for reference, if not remove from list...
+                if (p_dpb->active_fs->top_field.used_for_reference == 0)
+                    h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+            }
+        }
+    }
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+////////////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_long_term_for_reference ()
+//
+// Adaptive Memory Management: Mark long term picture unused
+//
+// In a frame situation the long_term_pic_num will refer to another frame.
+// Thus we can call h264_dpb_unmark_for_long_term_reference() and then remove the picture
+// from the list
+//
+// If the current picture is a field, long_term_pic_num will refer to another field
+// It is also the case that each individual field should have a unique picture number
+// 8.2.5.4.2 suggests that when curr pic is a field, an mmco == 2 operation
+// should be accompanied by a second op to unmark the other field as being unused
+///////////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_unmark_long_term_for_reference (h264_Info * pInfo, int32_t long_term_pic_num)
+{
+    uint32_t idx;
+    int32_t unmark_done;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    unmark_done = 0;
+    for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (!(unmark_done)); idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+        if (pInfo->img.structure == FRAME)
+        {
+            if ((p_dpb->active_fs->frame.used_for_reference==3) && (viddec_h264_get_is_long_term(p_dpb->active_fs)==3) &&
+                    (p_dpb->active_fs->frame.long_term_pic_num == long_term_pic_num))
+            {
+                h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+                h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+                unmark_done = 1;
+            }
+        }
+        else
+        {
+            /// Check top field
+            if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x1) &&
+                    (p_dpb->active_fs->top_field.long_term_pic_num == long_term_pic_num) )
+            {
+                p_dpb->active_fs->top_field.used_for_reference = 0;
+                p_dpb->active_fs->top_field.is_long_term = 0;
+                p_dpb->active_fs->frame.used_for_reference &= 2;
+                viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 2);
+
+                unmark_done = 1;
+
+                //Check if other field is used for long term reference, if not remove from list...
+                if ((p_dpb->active_fs->bottom_field.used_for_reference == 0) || (p_dpb->active_fs->bottom_field.is_long_term == 0))
+                    h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+            }
+
+            /// Check Bottom field
+            if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2) &&
+                    (p_dpb->active_fs->bottom_field.long_term_pic_num == long_term_pic_num) )
+            {
+                p_dpb->active_fs->bottom_field.used_for_reference = 0;
+                p_dpb->active_fs->bottom_field.is_long_term = 0;
+                p_dpb->active_fs->frame.used_for_reference &= 1;
+                viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 1);
+
+                unmark_done = 1;
+                //Check if other field is used for long term reference, if not remove from list...
+                if ((p_dpb->active_fs->top_field.used_for_reference == 0) || (p_dpb->active_fs->top_field.is_long_term == 0))
+                {
+                    h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+                }
+            }
+        } // field structure
+    } //for(idx)
+
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_pic_struct_by_pic_num
+//
+// Searches the fields appearing in short term reference list
+// Returns the polarity of the field with pic_num = picNumX
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_get_pic_struct_by_pic_num(h264_DecodedPictureBuffer *p_dpb, int32_t picNumX)
+{
+    uint32_t idx;
+    int32_t pic_struct = INVALID;
+    int32_t found = 0;
+
+    for (idx =0; (idx < p_dpb->ref_frames_in_buffer) && (!(found)); idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+        if ((p_dpb->active_fs->frame.used_for_reference&0x1) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x01))&&
+                (p_dpb->active_fs->top_field.pic_num == picNumX) )
+        {
+            found = 1;
+            pic_struct = TOP_FIELD;
+
+        }
+        if ((p_dpb->active_fs->frame.used_for_reference&0x2) && (!(viddec_h264_get_is_long_term(p_dpb->active_fs)&0x2)) &&
+                (p_dpb->active_fs->bottom_field.pic_num == picNumX) )
+        {
+            found = 1;
+            pic_struct = BOTTOM_FIELD;
+
+        }
+    }
+
+    return pic_struct;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_assign_long_term_frame_idx ()
+//
+// Assign a long term frame index to a short term picture
+// Both lists must be updated as part of this process...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mm_assign_long_term_frame_idx(h264_Info * pInfo, int32_t difference_of_pic_nums_minus1, int32_t long_term_frame_idx)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    int32_t picNumX;
+    int32_t currPicNum;
+    int32_t polarity = 0;
+
+    if (pInfo->img.structure == FRAME) {
+        currPicNum = pInfo->img.frame_num;
+    } else {
+        currPicNum = (pInfo->img.frame_num << 1) + 1;
+    }
+
+    picNumX = currPicNum - (difference_of_pic_nums_minus1 + 1);
+
+    // remove frames / fields with same long_term_frame_idx
+    if (pInfo->img.structure == FRAME) {
+        h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+    } else {
+        polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+
+        if (polarity != INVALID)
+            h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->active_fs->fs_idc, polarity);
+    }
+
+    h264_dpb_mark_pic_long_term(pInfo, long_term_frame_idx, picNumX);
+
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_update_max_long_term_frame_idx ()
+//
+// Set new max long_term_frame_idx
+//
+
+void h264_dpb_mm_update_max_long_term_frame_idx(h264_DecodedPictureBuffer *p_dpb,int32_t max_long_term_frame_idx_plus1)
+{
+    //h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    int32_t idx;
+    int32_t temp;
+    int32_t removed_count;
+    int32_t idx2 = 0;
+
+    p_dpb->max_long_term_pic_idx = max_long_term_frame_idx_plus1 - 1;
+
+    temp = p_dpb->ltref_frames_in_buffer;
+    removed_count = 0;
+
+    // check for invalid frames
+    for (idx = 0; idx < temp; idx++)
+    {
+        idx2 = idx - removed_count;
+        if (idx2 < 16 && idx2 > 0)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+
+            if (p_dpb->active_fs->long_term_frame_idx > p_dpb->max_long_term_pic_idx)
+            {
+                removed_count++;
+                h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+                h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx2]);
+            }
+        }
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_unmark_all_short_term_for_reference ()
+//
+// Unmark all short term refernce pictures
+//
+
+void h264_dpb_mm_unmark_all_short_term_for_reference (h264_DecodedPictureBuffer *p_dpb)
+{
+    int32_t idx;
+    int32_t temp = p_dpb->ref_frames_in_buffer;
+
+    for (idx = 0; idx < temp; idx++)
+    {
+        h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+        h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+    }
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mm_mark_current_picture_long_term ()
+//
+// Marks the current picture as long term after unmarking any long term picture
+// already assigned with the same long term frame index
+//
+
+void h264_dpb_mm_mark_current_picture_long_term(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+    int32_t picNumX;
+    h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+    if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME)
+    {
+        h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(p_dpb, long_term_frame_idx);
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+        p_dpb->active_fs->frame.is_long_term        = 1;
+        p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+        p_dpb->active_fs->frame.long_term_pic_num   = long_term_frame_idx;
+    }
+    else
+    {
+        if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)
+        {
+            picNumX = (p_dpb->active_fs->top_field.pic_num << 1) + 1;
+            p_dpb->active_fs->top_field.is_long_term        = 1;
+            p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+
+            // Assign long-term pic num
+            p_dpb->active_fs->top_field.long_term_pic_num   = (long_term_frame_idx << 1) + 1;
+        }
+        else
+        {
+            picNumX = (p_dpb->active_fs->bottom_field.pic_num << 1) + 1;
+            p_dpb->active_fs->bottom_field.is_long_term        = 1;
+            p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+            // Assign long-term pic num
+            p_dpb->active_fs->bottom_field.long_term_pic_num   = (long_term_frame_idx << 1) + 1;
+
+        }
+        h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(p_dpb, long_term_frame_idx, p_dpb->fs_dec_idc, viddec_h264_get_dec_structure(p_dpb->active_fs));
+    }
+    // Add to long term list
+    //h264_dpb_add_ltref_list(p_dpb->fs_dec_idc);
+
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference frame or complementary field pair unused for referemce
+// NOTE: Obviously this ftn cannot be used to unmark individual fields...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx)
+{
+    uint32_t idx;
+    for (idx =0; idx < p_dpb->ltref_frames_in_buffer; idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+
+        if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx)
+        {
+            h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx]);
+            h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx]);
+        }
+    }
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_long_term_field_for_reference_by_frame_idx ()
+//
+// Mark a long-term reference field unused for reference. However if it is the
+// complementary field (opposite polarity) of the picture stored in fs_idc,
+// we do not unmark it
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity)
+{
+    uint32_t idx;
+    int32_t found = 0;
+    int32_t is_complement = 0;
+
+    for (idx = 0; (idx < p_dpb->ltref_frames_in_buffer) && (found == 0); idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ltref_idc[idx]);
+        if (p_dpb->active_fs->long_term_frame_idx == long_term_frame_idx)
+        {
+            if (p_dpb->active_fs->fs_idc == fs_idc)
+            {
+                // Again these seem like redundant checks but for safety while until JM is updated
+                if (polarity == TOP_FIELD)
+                    is_complement = (p_dpb->active_fs->bottom_field.is_long_term)? 1:0;
+                else if (polarity == BOTTOM_FIELD)
+                    is_complement = (p_dpb->active_fs->top_field.is_long_term)   ? 1:0;
+            }
+            found = 1;
+        }
+    }
+
+    if (found) {
+        if (is_complement == 0)
+        {
+            h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+            h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[idx-1]);
+        }
+    }
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_pic_long_term ()
+//
+// This is used on a picture already in the dpb - i.e. not for the current picture
+// dpb_split / dpb_combine field will perform ftnality in that case
+//
+// Marks a picture as used for long-term reference. Adds it to the long-term
+// reference list. Also removes it from the short term reference list if required
+//
+// Note: if the current picture is a frame, the picture to be marked will be a
+// short-term reference frame or short-term complemenetary reference field pair
+// We use the pic_num assigned to the frame part of the structure to locate it
+// Both its fields will have their long_term_frame_idx and long_term_pic_num
+// assigned to be equal to long_term_frame_idx
+//
+// If the current picture is a field, the picture to be marked will be a
+// short-term reference field. We use the pic_nums assigned to the field parts of
+// the structure to identify the appropriate field. We assign the long_term_frame_idx
+// of the field equal to long_term_frame_idx.
+//
+// We also check to see if this marking has resulted in both fields of the frame
+// becoming long_term. If it has, we update the frame part of the structure by
+// setting its long_term_frame_idx
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint32_t idx;
+    int32_t mark_done;
+    int32_t polarity = 0;
+
+    mark_done = 0;
+
+    if (pInfo->img.structure == FRAME)
+    {
+        for (idx = 0; (idx < p_dpb->ref_frames_in_buffer) && (!(mark_done)); idx++)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_ref_idc[idx]);
+
+            if (p_dpb->active_fs->frame.used_for_reference == 3)
+            {
+                if ((!(p_dpb->active_fs->frame.is_long_term))&&(p_dpb->active_fs->frame.pic_num == picNumX))
+                {
+                    p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx;
+                    p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+                    p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+                    p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+
+                    p_dpb->active_fs->frame.is_long_term = 1;
+                    p_dpb->active_fs->top_field.is_long_term = 1;
+                    p_dpb->active_fs->bottom_field.is_long_term = 1;
+
+                    viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 3);
+                    mark_done = 1;
+
+                    // Assign long-term pic num
+                    p_dpb->active_fs->frame.long_term_pic_num   = long_term_frame_idx;
+                    p_dpb->active_fs->top_field.long_term_pic_num    = long_term_frame_idx;
+                    p_dpb->active_fs->bottom_field.long_term_pic_num = long_term_frame_idx;
+                    // Add to long term list
+                    h264_dpb_add_ltref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+                    // Remove from short-term list
+                    h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[idx]);
+                }
+            }
+        }
+    }
+    else
+    {
+        polarity = h264_dpb_get_pic_struct_by_pic_num(p_dpb, picNumX);
+        p_dpb->active_fs->long_term_frame_idx = long_term_frame_idx;         /////BUG
+
+        if (polarity == TOP_FIELD)
+        {
+            p_dpb->active_fs->top_field.long_term_frame_idx = long_term_frame_idx;
+            p_dpb->active_fs->top_field.is_long_term        = 1;
+            viddec_h264_set_is_top_long_term(p_dpb->active_fs, 1);
+
+            // Assign long-term pic num
+            p_dpb->active_fs->top_field.long_term_pic_num   = (long_term_frame_idx << 1) + ((pInfo->img.structure == TOP_FIELD) ? 1 : 0);
+
+        }
+        else if (polarity == BOTTOM_FIELD)
+        {
+            p_dpb->active_fs->bottom_field.long_term_frame_idx = long_term_frame_idx;
+            p_dpb->active_fs->bottom_field.is_long_term        = 1;
+            viddec_h264_set_is_bottom_long_term(p_dpb->active_fs, 1);
+
+            // Assign long-term pic num
+            p_dpb->active_fs->bottom_field.long_term_pic_num   = (long_term_frame_idx << 1) + ((pInfo->img.structure == BOTTOM_FIELD) ? 1 : 0);
+        }
+
+        if (viddec_h264_get_is_long_term(p_dpb->active_fs) == 3)
+        {
+            p_dpb->active_fs->frame.is_long_term = 1;
+            p_dpb->active_fs->frame.long_term_frame_idx = long_term_frame_idx;
+            h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+        }
+        else
+        {
+            // We need to add this idc to the long term ref list...
+            h264_dpb_add_ltref_list(p_dpb, p_dpb->active_fs->fs_idc);
+
+            // If the opposite field is not a short term reference, remove it from the
+            // short term list. Since we know top field is a reference but both are not long term
+            // we can simply check that both fields are not references...
+            if (p_dpb->active_fs->frame.used_for_reference != 3)
+                h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+        }
+    }
+    return;
+} ///// End of mark pic long term
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_adaptive_memory_management ()
+//
+// Perform Adaptive memory control decoded reference picture marking process
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_adaptive_memory_management (h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    int32_t idx;
+
+    idx = 0;
+
+    while (idx < pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count)
+    {
+        switch (pInfo->SliceHeader.sh_dec_refpic.memory_management_control_operation[idx])
+        {
+        case   1: {	//Mark a short-term reference picture as	�unused for reference?
+            h264_dpb_mm_unmark_short_term_for_reference(pInfo,
+                    pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx]);
+        }
+        break;
+        case   2: {	//Mark a long-term reference picture as 	�unused for reference?
+            h264_dpb_mm_unmark_long_term_for_reference(pInfo,
+                    pInfo->SliceHeader.sh_dec_refpic.long_term_pic_num[idx]);
+        }
+        break;
+        case  3: {		//Mark a short-term reference picture as	"used for long-term reference" and assign a long-term frame index to it
+            h264_dpb_mm_assign_long_term_frame_idx(pInfo,
+                                                   pInfo->SliceHeader.sh_dec_refpic.difference_of_pic_num_minus1[idx],
+                                                   pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+        }
+        break;
+        case  4: {	//Specify the maximum long-term frame index and
+            //mark all long-term reference pictureshaving long-term frame indices greater than
+            //the maximum value as "unused for reference"
+            h264_dpb_mm_update_max_long_term_frame_idx (&pInfo->dpb,
+                    pInfo->SliceHeader.sh_dec_refpic.max_long_term_frame_idx_plus1[idx]);
+        }
+        break;
+        case  5: {		//Mark all reference pictures as	"unused for reference" and set the MaxLongTermFrameIdx variable to
+            // "no long-term frame indices"
+            h264_dpb_mm_unmark_all_short_term_for_reference(&pInfo->dpb);
+            h264_dpb_mm_update_max_long_term_frame_idx(&pInfo->dpb, 0);
+            pInfo->img.last_has_mmco_5 = 1;
+        }
+        break;
+        case   6: {	//Mark the current picture as	"used for long-term reference" and assign a long-term frame index to it
+            h264_dpb_mm_mark_current_picture_long_term(&pInfo->dpb,
+                    pInfo->SliceHeader.sh_dec_refpic.long_term_frame_idx[idx]);
+        }
+        break;
+        }
+        idx++;
+    }
+
+
+    if (pInfo->img.last_has_mmco_5)
+    {
+        pInfo->img.frame_num = 0;
+        pInfo->SliceHeader.frame_num=0;
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+
+        if (viddec_h264_get_dec_structure(p_dpb->active_fs) == FRAME)
+        {
+            pInfo->img.bottompoc -= p_dpb->active_fs->frame.poc;
+            pInfo->img.toppoc    -= p_dpb->active_fs->frame.poc;
+
+
+            p_dpb->active_fs->frame.poc = 0;
+            p_dpb->active_fs->frame.pic_num = 0;
+            p_dpb->active_fs->frame_num = 0;
+        }
+
+        else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == TOP_FIELD)
+        {
+            p_dpb->active_fs->top_field.poc = p_dpb->active_fs->top_field.pic_num = 0;
+            pInfo->img.toppoc = p_dpb->active_fs->top_field.poc;
+        }
+        else if (viddec_h264_get_dec_structure(p_dpb->active_fs) == BOTTOM_FIELD)
+        {
+            p_dpb->active_fs->bottom_field.poc = p_dpb->active_fs->bottom_field.pic_num = 0;
+            pInfo->img.bottompoc = 0;
+        }
+
+        h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field,pInfo->active_SPS.num_ref_frames);
+    }
+    // Reset the marking count operations for the current picture...
+    pInfo->SliceHeader.sh_dec_refpic.dec_ref_pic_marking_count = 0;
+
+    return;
+} ////// End of adaptive memory management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_gaps_in_frame_num_mem_management ()
+//
+// Produces a set of frame_nums pertaining to "non-existing" pictures
+// Calls h264_dpb_store_picture_in_dpb
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo)
+{
+    int32_t		temp_frame_num = 0;
+    int32_t		idx, prev_idc;
+    int32_t 	prev_frame_num_plus1_wrap;
+    uint32_t	temp = 0;
+    int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+    seq_param_set_used_ptr  active_sps = &pInfo->active_SPS;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    pInfo->img.gaps_in_frame_num = 0;
+
+    // pInfo->img.last_has_mmco_5 set thru store_picture_in_dpb
+    if (pInfo->img.last_has_mmco_5)
+    {
+        // If the previous picture was an unpaired field, mark it as a dangler
+        if (p_dpb->used_size)
+        {
+            idx = p_dpb->used_size-1;
+            prev_idc = p_dpb->fs_dpb_idc[idx];
+            if (prev_idc != MPD_DPB_FS_NULL_IDC)
+            {
+                h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                p_dpb->active_fs->frame_num =0;
+            }
+        }
+        pInfo->img.PreviousFrameNumOffset = 0;
+        //CONFORMANCE_ISSUE
+        pInfo->img.PreviousFrameNum = 0;
+
+    }
+
+    // Check for gaps in frame_num
+    if (pInfo->SliceHeader.idr_flag) {
+        pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+    }
+    // Have we re-started following a recovery point message?
+    /*
+    	else if(got_sei_recovery || aud_got_restart){
+    		pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+    		//got_sei_recovery = 0;
+    		//aud_got_restart  = 0;
+    	}
+    */
+    else if (pInfo->img.frame_num != pInfo->img.PreviousFrameNum)
+    {
+        if (MaxFrameNum) {
+            ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+        } else {
+            temp = (uint32_t)pInfo->img.PreviousFrameNum + 1;
+        }
+        prev_frame_num_plus1_wrap = temp;
+        if (pInfo->img.frame_num != prev_frame_num_plus1_wrap)
+        {
+            pInfo->img.gaps_in_frame_num = (pInfo->img.frame_num < pInfo->img.PreviousFrameNum)? ((MaxFrameNum + pInfo->img.frame_num -1) - pInfo->img.PreviousFrameNum): (pInfo->img.frame_num - pInfo->img.PreviousFrameNum - 1);
+            // We should test for an error here - should infer an unintentional loss of pictures
+        }
+    }
+
+
+    //if(active_sps->gaps_in_frame_num_value_allowed_flag == 0) {
+    if (pInfo->img.gaps_in_frame_num && (active_sps->gaps_in_frame_num_value_allowed_flag == 0)) {
+        // infer an unintentional loss of pictures
+        // only invoke following process for a conforming bitstream
+        // when gaps_in_frame_num_value_allowed_flag is equal to 1
+        pInfo->img.gaps_in_frame_num = 0;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        pInfo->sw_bail = 1;
+#endif
+#endif
+        //mfd_printf("ERROR STREAM??\n");
+        ////// Error handling here----
+    }
+
+    /////// Removed following OLO source (Sodaville H.D)
+    //else if (pInfo->img.gaps_in_frame_num  > active_sps->num_ref_frames) {
+    //	// No need to produce any more non-existent frames than the amount required to flush the dpb
+    //	pInfo->img.gaps_in_frame_num = active_sps->num_ref_frames;
+    //mfd_printf("gaps in frame: %d\n", gaps_in_frame_num);
+    //}
+
+    // If the previous picture was an unpaired field, mark it as a dangler
+    if (p_dpb->used_size)
+    {
+        idx = p_dpb->used_size-1;
+        prev_idc = p_dpb->fs_dpb_idc[idx];
+        if (prev_idc != MPD_DPB_FS_NULL_IDC)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+            if (viddec_h264_get_is_used(p_dpb->active_fs) != 3) {
+                h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc);  //, DANGLING_TYPE_GAP_IN_FRAME
+            }
+        }
+    }
+
+    while (temp_frame_num < pInfo->img.gaps_in_frame_num)
+    {
+        h264_dpb_assign_frame_store(pInfo, 1);
+
+        // Set up initial markings - not sure if all are needed
+        viddec_h264_set_dec_structure(p_dpb->active_fs, FRAME);
+
+        if (MaxFrameNum)
+            ldiv_mod_u((uint32_t)(pInfo->img.PreviousFrameNum + 1), (uint32_t)MaxFrameNum, &temp);
+
+        p_dpb->active_fs->frame.pic_num = temp;
+        p_dpb->active_fs->long_term_frame_idx        = 0;
+        p_dpb->active_fs->frame.long_term_pic_num    = 0;
+        viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0);
+
+        // Note the call below will overwrite some aspects of the img structure with info relating to the
+        // non-existent picture
+        // However, since this is called before h264_hdr_decoding_poc() for the current existing picture
+        // it should be o.k.
+        if (pInfo->img.pic_order_cnt_type)
+            h264_hdr_decoding_poc(pInfo, 1, temp);
+
+        pInfo->img.structure = FRAME;
+        p_dpb->active_fs->frame.poc = pInfo->img.framepoc;
+
+        // call store_picture_in_dpb
+
+        h264_dpb_store_previous_picture_in_dpb(pInfo, 1, 0);
+
+        h264_hdr_post_poc(pInfo, 1, temp, 0);
+
+        temp_frame_num++;
+    }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_reference ()
+//
+// Mark FrameStore unused for reference. Removes it from the short term reference list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+    h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+    if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1)  p_dpb->active_fs->top_field.used_for_reference = 0;
+    if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2)  p_dpb->active_fs->bottom_field.used_for_reference = 0;
+    if (viddec_h264_get_is_used(p_dpb->active_fs) == 3) p_dpb->active_fs->frame.used_for_reference = 0;
+
+    p_dpb->active_fs->frame.used_for_reference = 0;
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_unmark_for_long_term_reference ()
+//
+// mark FrameStore unused for reference and reset long term flags
+// This function does not remove it form the long term list
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+    h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+    if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1)
+    {
+        p_dpb->active_fs->top_field.used_for_reference = 0;
+        p_dpb->active_fs->top_field.is_long_term = 0;
+    }
+
+    if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2)
+    {
+        p_dpb->active_fs->bottom_field.used_for_reference = 0;
+        p_dpb->active_fs->bottom_field.is_long_term = 0;
+    }
+    if (viddec_h264_get_is_used(p_dpb->active_fs) == 3)
+    {
+        p_dpb->active_fs->frame.used_for_reference = 0;
+        p_dpb->active_fs->frame.is_long_term = 0;
+    }
+
+    p_dpb->active_fs->frame.used_for_reference = 0;
+    viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0);
+
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_mark_dangling_field
+//
+// Tells HW previous field was dangling
+// Marks it in SW as so
+// Takes appropriate actions. - sys_data needs thought through...
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc)
+{
+
+    h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+    //PRINTF(MFD_NONE, " fs_idc = %d DANGLING_TYPE = %d \n", fs_idc,  reason);
+    /*
+    Make the check that it has not already been marked
+    This covers the situation of a dangling field followed by a
+    frame which is direct output (i.e. never entered into the dpb).
+    In this case we could attempt  to mark the prev unpaired field
+    as a dangler twice which would upset the HW dpb_disp_q count
+    */
+
+    if (viddec_h264_get_is_dangling(p_dpb->active_fs) == 0)
+    {
+        switch (viddec_h264_get_dec_structure(p_dpb->active_fs))
+        {
+        case TOP_FIELD:
+            viddec_h264_set_is_dangling(p_dpb->active_fs, 1);
+            //PRINTF(MFD_NONE,  "FN:%d  fs_idc=%d  FRAME_FLAG_DANGLING_TOP_FIELD\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc);
+            break;
+        case BOTTOM_FIELD:
+            //PRINTF(MFD_NONE,  " FN:%d  fs_idc=%d  FRAME_FLAG_DANGLING_BOTTOM_FIELD \n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc);
+            viddec_h264_set_is_dangling(p_dpb->active_fs, 1);
+            break;
+        default:
+            //PRINTF(MFD_NONE,  "FN:%d  fs_idc=%d  DANGLING: FATAL_ERROR\n ", (h264_frame_number+1), p_dpb->active_fs->fs_idc);
+            break;
+        }
+
+        //h264_send_new_decoded_frame();
+    }
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_is_used_for_reference ()
+//
+// Check if one of the frames/fields in active_fs is used for reference
+//
+void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t * flag)
+{
+
+    /* Check out below for embedded */
+    *flag = 0;
+    if (p_dpb->active_fs->frame.used_for_reference)
+        *flag = 1;
+    else if (viddec_h264_get_is_used(p_dpb->active_fs) ==3) // frame
+        *flag = p_dpb->active_fs->frame.used_for_reference;
+    else
+    {
+        if (viddec_h264_get_is_used(p_dpb->active_fs)&0x1) // top field
+            *flag = p_dpb->active_fs->top_field.used_for_reference;
+        if (viddec_h264_get_is_used(p_dpb->active_fs)&0x2) // bottom field
+            *flag = *flag ||  p_dpb->active_fs->bottom_field.used_for_reference;
+    }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_idr_memory_management ()
+//
+// Perform Memory management for idr pictures
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_idr_memory_management (h264_Info * pInfo,seq_param_set_used_ptr active_sps, int32_t no_output_of_prior_pics_flag)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    uint32_t	idx;
+    uint32_t	i;
+    int32_t		DPB_size;
+    int32_t		FrameSizeInBytes, FrameSizeInMbs;
+    uint32_t	data;
+    int32_t		num_ref_frames = active_sps->num_ref_frames;
+    int32_t		level_idc = active_sps->level_idc;
+    uint32_t    temp_bump_level=0;
+
+
+    /// H.D-----
+    /// There are 2 kinds of dpb flush defined, one is with display, the other is without display
+    /// The function name dpb_flush actually is just the first, and the 2nd one is for error case or no_prior_output
+    /// We will rewrite the code below to make it clean and clear
+    ///
+    if (no_output_of_prior_pics_flag)
+    {
+
+        // free all stored pictures
+        for (idx = 0; idx < p_dpb->used_size; idx = idx + 1)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+            //mfd_printf(" directly freeing fs_idc = %d DSN = 0x%x \n",p_dpb->active_fs->fs_idc, p_dpb->active_fs->first_dsn);
+            viddec_h264_set_is_frame_used(p_dpb->active_fs, 0);
+            //if( (p_dpb->active_fs->frame_sent == 0x01) && (p_dpb->active_fs->is_output == 0x0))
+            {
+                //DECODED_FRAME sent but not DISPLAY_FRAME
+                h264_dpb_unmark_for_reference(p_dpb, p_dpb->active_fs->fs_idc);
+                h264_dpb_remove_ref_list(p_dpb, p_dpb->active_fs->fs_idc);
+                //h264_send_new_display_frame(0x01); //send ignore_frame signal to Host
+#ifndef USE_AVC_SHORT_FORMAT
+                ///  Add into drop-out list for all frms in dpb without display
+                if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs)))   {
+                    if ( viddec_h264_get_is_output(&(p_dpb->fs[p_dpb->fs_dpb_idc[idx]])) ) {			//// This frame has been displayed but not released
+                        p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs_dpb_idc[idx];
+                        p_dpb->frame_numbers_need_to_be_removed ++;
+                    } else {																		//// This frame will be removed without display
+                        p_dpb->frame_id_need_to_be_dropped[p_dpb->frame_numbers_need_to_be_dropped] = p_dpb->fs_dpb_idc[idx];
+                        p_dpb->frame_numbers_need_to_be_dropped ++;
+                    }
+                }
+#endif
+            }
+
+        }
+
+        ////////////////////////////////////////// Reset Reference list
+        for (i = 0; i < p_dpb->ref_frames_in_buffer; i++)
+            p_dpb->fs_ref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+        for (i = 0; i < p_dpb->ltref_frames_in_buffer; i++)
+            p_dpb->fs_ltref_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+        ////////////////////////////////////////// Reset DPB and dpb list
+        for (i = 0; i < p_dpb->used_size; i++) {
+            p_dpb->fs[p_dpb->fs_dpb_idc[i]].fs_idc = MPD_DPB_FS_NULL_IDC;
+            p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+        }
+
+        p_dpb->used_size = 0;
+        p_dpb->ref_frames_in_buffer   = 0;
+        p_dpb->ltref_frames_in_buffer = 0;
+
+        p_dpb->last_output_poc = 0x80000000;
+    }
+    else {
+        h264_dpb_flush_dpb(pInfo, 1, pInfo->img.second_field, num_ref_frames);
+    }
+
+    if (p_dpb->fs_dec_idc != MPD_DPB_FS_NULL_IDC) // added condition for use of DPB initialization
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+        if (pInfo->img.long_term_reference_flag)
+        {
+            p_dpb->max_long_term_pic_idx      = 0;
+            switch (viddec_h264_get_dec_structure(p_dpb->active_fs))
+            {
+            case FRAME        :
+                p_dpb->active_fs->frame.is_long_term = 1;
+            case TOP_FIELD    :
+                p_dpb->active_fs->top_field.is_long_term = 1;
+            case BOTTOM_FIELD :
+                p_dpb->active_fs->bottom_field.is_long_term = 1;
+            }
+            p_dpb->active_fs->long_term_frame_idx = 0;
+        }
+        else
+        {
+            p_dpb->max_long_term_pic_idx = MPD_DPB_FS_NULL_IDC;
+            viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0);
+        }
+    }
+
+    p_dpb->OutputLevel      = 0;
+    p_dpb->OutputLevelValid = 0;
+    p_dpb->OutputCtrl = 0;
+
+
+    // Set up bumping level - do this every time a parameters set is activated...
+    if (active_sps->sps_disp.vui_parameters_present_flag)
+    {
+        if (active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+        {
+            //p_dpb->OutputLevel      = active_sps->sps_disp.vui_seq_parameters.num_reorder_frames;
+            //p_dpb->OutputLevelValid = 1;
+        }
+    }
+
+    // Set up bumping level - do this every time a parameters set is activated...
+    switch (level_idc)
+    {
+    case h264_Level1b:
+    case h264_Level1:
+    {
+        if ((active_sps->profile_idc < 100) && ((active_sps->constraint_set_flags & 0x1) == 0)) {
+            DPB_size =	 338;
+        }
+        else {
+            DPB_size =	 149;
+        }
+
+        break;
+    }
+    case h264_Level11:
+    {
+        DPB_size = 338;
+        break;
+    }
+    case h264_Level12:
+    case h264_Level13:
+    case h264_Level2:
+    {
+        DPB_size = 891;
+        break;
+    }
+    case h264_Level21:
+    {
+        DPB_size = 1782;
+        break;
+    }
+    case h264_Level22:
+    case h264_Level3:
+    {
+        DPB_size = 3038;
+        break;
+    }
+    case h264_Level31:
+    {
+        DPB_size = 6750;
+        break;
+    }
+    case h264_Level32:
+    {
+        DPB_size = 7680;
+        break;
+    }
+    case h264_Level4:
+    case h264_Level41:
+    {
+        DPB_size = 12288;
+        break;
+    }
+    case h264_Level42:
+    {
+        DPB_size = 13056;
+        break;
+    }
+    case h264_Level5:
+    {
+        DPB_size = 41400;
+        break;
+    }
+    case h264_Level51:
+    {
+        DPB_size = 69120;
+        break;
+    }
+    default  :
+        DPB_size =   69120;
+        break;
+    }
+
+    FrameSizeInMbs = pInfo->img.PicWidthInMbs * pInfo->img.FrameHeightInMbs;
+    FrameSizeInBytes = (FrameSizeInMbs << 8) + (FrameSizeInMbs << 7);
+
+    if (FrameSizeInBytes)
+    {
+
+        temp_bump_level = ldiv_mod_u((DPB_size << 10), FrameSizeInBytes, &data);
+
+        if (temp_bump_level > 255)
+        {
+            p_dpb->BumpLevel = 255;
+        }
+        else
+        {
+            p_dpb->BumpLevel = (uint8_t)temp_bump_level;
+        }
+    }
+
+    if (p_dpb->BumpLevel == 0)
+        p_dpb->BumpLevel = active_sps->num_ref_frames + 1;
+
+    if (p_dpb->BumpLevel > 16)
+        p_dpb->BumpLevel = 16;
+
+
+    if (active_sps->sps_disp.vui_parameters_present_flag && active_sps->sps_disp.vui_seq_parameters.bitstream_restriction_flag) {
+
+        if (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > p_dpb->BumpLevel) {
+            //MFD_PARSER_DEBUG(ERROR_H264_DPB);
+            //// err handling here
+
+            //// For some ilegal clips, the max dpb length described in vui might exceed the sps's value
+            //// To guarantee normal playback, just select the vui value to override
+            p_dpb->BumpLevel = active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering;
+        }
+        else {
+            p_dpb->BumpLevel = (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering > 1) ?
+                               (active_sps->sps_disp.vui_seq_parameters.max_dec_frame_buffering) : 1;
+        }
+    }
+
+
+    // A new sequence means automatic frame release
+    //sei_information.disp_frozen = 0;
+
+    return;
+} //// End --- dpb_idr_memory_management
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_frame_from_dpb ()
+//
+// remove one frame from DPB
+// The parameter index, is the location of the frame to be removed in the
+// fs_dpb_idc list. The used size is decremented by one
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx)
+{
+    int32_t fs_idc;
+    uint32_t i;
+
+    fs_idc = p_dpb->fs_dpb_idc[idx];
+
+    h264_dpb_set_active_fs(p_dpb, fs_idc);
+    viddec_h264_set_is_frame_used(p_dpb->active_fs, 0);
+
+#ifndef USE_AVC_SHORT_FORMAT
+    //add to support frame relocation interface to host
+    if (!(viddec_h264_get_is_non_existent(p_dpb->active_fs)))
+    {
+        p_dpb->frame_id_need_to_be_removed[p_dpb->frame_numbers_need_to_be_removed] = p_dpb->fs[fs_idc].fs_idc;
+        p_dpb->frame_numbers_need_to_be_removed ++;
+    }
+#endif
+    ///////////////////////////////////////// Reset FS
+    p_dpb->fs[fs_idc].fs_idc = MPD_DPB_FS_NULL_IDC;
+
+    /////Remove unused frame from dpb-list
+    i = idx;
+    while ( (i + 1)< p_dpb->used_size)
+    {
+        p_dpb->fs_dpb_idc[i] = p_dpb->fs_dpb_idc[i + 1];
+        i ++;
+    }
+    p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+
+    ////////////////////////////
+    p_dpb->used_size--;
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_remove_unused_frame_from_dpb ()
+//
+// Remove a picture from DPB which is no longer needed.
+// Search for a frame which is not used for reference and has previously been placed
+// in the output queue - if find one call h264_dpb_remove_frame_from_dpb() and
+// set flag 1
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag)
+{
+    uint32_t idx;
+    int32_t first_non_exist_valid, non_exist_idx;
+    int32_t used_for_reference = 0;
+
+    *flag = 0;
+    first_non_exist_valid = 0x0;
+    non_exist_idx = 0x0;
+
+    for (idx = 0; (idx < p_dpb->used_size) && (*flag == 0); idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+        h264_dpb_is_used_for_reference(p_dpb, &used_for_reference);
+
+        //if( (used_for_reference == 0x0 ) && active_fs->is_output &&  active_fs->is_non_existent == 0x0)
+        //{
+        //PRINTF(MFD_NONE, " requesting to send FREE: fs_idc = %d fb_id = %d \n", active_fs->fs_idc, active_fs->fb_id);
+        //dpb_release_fb(&h264_dpb, active_fs->fb_id, 1);
+        //}
+
+        if (viddec_h264_get_is_output(p_dpb->active_fs) && (used_for_reference == 0))
+        {
+            h264_dpb_remove_frame_from_dpb(p_dpb, idx);
+            *flag = 1;
+        }
+        /*
+        /////// Removed following OLO source (Sodaville H.D)
+        		else if ( (first_non_exist_valid == 0x0) && p_dpb->active_fs->is_non_existent )
+        		{
+        			first_non_exist_valid = 0x01;
+        			non_exist_idx = idx;
+        		}
+        */
+    }
+    /*
+    /////// Removed following OLO source (Sodaville H.D)
+    	if ( *flag == 0x0  && first_non_exist_valid) {
+    	   h264_dpb_remove_frame_from_dpb(p_dpb,non_exist_idx);
+    	  *flag = 1;
+    	}
+    */
+    return;
+}	//// End of h264_dpb_remove_unused_frame_from_dpb
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_get_smallest_poc ()
+//
+// find smallest POC in the DPB which has not as yet been output
+// This function only checks for frames and dangling fields...
+// unless the dpb used size is one, in which case it will accept an unpaired field
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos)
+{
+    int32_t poc_int;
+    uint32_t idx;
+    int32_t first_non_output = 1;
+
+    *pos = MPD_DPB_FS_NULL_IDC;
+
+    h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[0]);
+    poc_int = p_dpb->active_fs->frame.poc;
+
+    for (idx = 0; idx < p_dpb->used_size; idx++)
+    {
+        h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+        if (viddec_h264_get_is_output(p_dpb->active_fs) == 0)
+        {
+            //PRINTF(MFD_NONE, " active_fs->fs_idc = %d active_fs->is_used = %d, active_fs->is_dangling = %d , active_fs->poc = %d \n", active_fs->fs_idc, active_fs->is_used, active_fs->is_dangling, active_fs->poc);
+            if ((viddec_h264_get_is_used(p_dpb->active_fs) == 3) || (viddec_h264_get_is_dangling(p_dpb->active_fs)))
+            {
+                if (first_non_output)
+                {
+                    *pos = idx;
+                    first_non_output = 0;
+                    poc_int = p_dpb->active_fs->frame.poc;
+                }
+                else if (poc_int > p_dpb->active_fs->frame.poc)
+                {
+                    poc_int = p_dpb->active_fs->frame.poc;
+                    *pos = idx;
+                }
+            }
+            else if (p_dpb->used_size == 1)
+            {
+                poc_int = p_dpb->active_fs->frame.poc;
+                *pos = idx;
+            }
+        }
+    }
+
+    *poc = poc_int;
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_split_field ()
+//
+// Extract field information from a frame
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo)
+{
+
+    //p_dpb->active_fs->frame.poc          = p_dpb->active_fs->frame.poc;
+    //  p_dpb->active_fs->top_field.poc     = p_dpb->active_fs->frame.poc;
+    // This line changed on 11/05/05 KMc
+    p_dpb->active_fs->top_field.poc     = pInfo->img.toppoc;
+    p_dpb->active_fs->bottom_field.poc  = pInfo->img.bottompoc;
+
+    p_dpb->active_fs->top_field.used_for_reference    = p_dpb->active_fs->frame.used_for_reference & 1;
+    p_dpb->active_fs->bottom_field.used_for_reference = p_dpb->active_fs->frame.used_for_reference >> 1;
+
+    p_dpb->active_fs->top_field.is_long_term = p_dpb->active_fs->frame.is_long_term;
+    p_dpb->active_fs->bottom_field.is_long_term = p_dpb->active_fs->frame.is_long_term;
+
+    p_dpb->active_fs->long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx;
+    p_dpb->active_fs->top_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx;
+    p_dpb->active_fs->bottom_field.long_term_frame_idx = p_dpb->active_fs->frame.long_term_frame_idx;
+
+
+    // Assign field mvs attached to MB-Frame buffer to the proper buffer
+    //! Generate field MVs from Frame MVs
+    // ...
+    // these will be done in RTL through using proper memory mapping
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_combine_field (int32_t use_old)
+//
+// Generate a frame from top and bottom fields
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old)
+{
+
+    //remove warning
+    use_old = use_old;
+
+    p_dpb->active_fs->frame.poc = (p_dpb->active_fs->top_field.poc < p_dpb->active_fs->bottom_field.poc)?
+                           p_dpb->active_fs->top_field.poc: p_dpb->active_fs->bottom_field.poc;
+
+    //p_dpb->active_fs->frame.poc = p_dpb->active_fs->poc;
+
+
+    p_dpb->active_fs->frame.used_for_reference = p_dpb->active_fs->top_field.used_for_reference |(p_dpb->active_fs->bottom_field.used_for_reference);
+
+    p_dpb->active_fs->frame.is_long_term = p_dpb->active_fs->top_field.is_long_term |(p_dpb->active_fs->bottom_field.is_long_term <<1);
+
+    if (p_dpb->active_fs->frame.is_long_term)
+        p_dpb->active_fs->frame.long_term_frame_idx = p_dpb->active_fs->long_term_frame_idx;
+
+    return;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_sliding_window_memory_management ()
+//
+// Perform Sliding window decoded reference picture marking process
+// It must be the reference frame, complementary reference field pair
+// or non-paired reference field that has the smallest value of
+// FrameNumWrap which is marked as unused for reference. Note : We CANNOT
+// simply use frame_num!!!!
+//
+// Although we hold frame_num_wrap in SW, currently, this is not
+// being updated for every picture (the b-picture parameter non-update
+// phenomenon of the reference software)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb, int32_t NonExisting, int32_t num_ref_frames)
+{
+    // if this is a reference pic with sliding window, unmark first ref frame
+    // should this be (p_dpb->ref_frames_in_buffer + p_dpb->ltref_frames_in_buffer)
+    // Rem: adaptive marking can be on a slice by slice basis so we
+    // could have pictures merked as long term reference in adaptive marking and then
+    //  the marking mode changed back to sliding_window_memory_management
+    if (p_dpb->ref_frames_in_buffer >= (num_ref_frames - p_dpb->ltref_frames_in_buffer))
+    {
+        h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+        h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+
+        if (NonExisting == 0)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dec_idc);
+            viddec_h264_set_is_frame_long_term(p_dpb->active_fs, 0);
+        }
+    }
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_store_picture_in_dpb ()
+//
+// First we run the marking procedure.
+// Then, before we add the current frame_store to the list of refernce stores we run some checks
+// These include checking the number of existing reference frames
+// in DPB and if necessary, flushing frames.
+//
+// \param NonExisting
+//    If non-zero this is called to store a non-existing frame resulting from gaps_in_frame_num
+//////////////////////////////////////////////////////////////////////////////
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_frame_output ()
+//
+// If direct == 1, Directly output a frame without storing it in the p_dpb->
+// Therefore we must set is_used to 0, which I guess means it will not appear
+// in the fs_dpb_idc list and is_output to 1 which means it should be in the
+// fs_output_idc list.
+//
+// If it is a non-existing pcture we do not actually place it in the output queue
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    h264_dpb_set_active_fs(p_dpb, fs_idc);
+
+    //h264_dpb_push_output_queue();
+    if (pInfo->sei_information.disp_frozen)
+    {
+        // check pocs
+        if (p_dpb->active_fs->top_field.poc >= pInfo->sei_information.freeze_POC)
+        {
+            if (p_dpb->active_fs->top_field.poc <  pInfo->sei_information.release_POC)
+            {
+                viddec_h264_set_is_top_skipped(p_dpb->active_fs, 1);
+            }
+            else
+            {
+                pInfo->sei_information.disp_frozen = 0;
+            }
+        }
+
+        if (p_dpb->active_fs->bottom_field.poc >=  pInfo->sei_information.freeze_POC)
+        {
+            if (p_dpb->active_fs->bottom_field.poc <  pInfo->sei_information.release_POC)
+            {
+                viddec_h264_set_is_bottom_skipped(p_dpb->active_fs, 1);
+            }
+            else
+            {
+                pInfo->sei_information.disp_frozen = 0;
+            }
+        }
+    }
+
+    if ( viddec_h264_get_broken_link_picture(p_dpb->active_fs) )
+        pInfo->sei_information.broken_link = 1;
+
+    if ( pInfo->sei_information.broken_link)
+    {
+        // Check if this was the recovery point picture - going to have recovery point on
+        // a frame basis
+        if (viddec_h264_get_recovery_pt_picture(p_dpb->active_fs))
+        {
+            pInfo->sei_information.broken_link = 0;
+            // Also reset wait on sei recovery point picture
+            p_dpb->WaitSeiRecovery         = 0;
+        }
+        else
+        {
+            viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3);
+        }
+    }
+    else
+    {
+        // even if this is not a broken - link, we need to follow SEI recovery point rules
+        // Did we use SEI recovery point for th elast restart?
+        if ( p_dpb->WaitSeiRecovery )
+        {
+            if ( viddec_h264_get_recovery_pt_picture(p_dpb->active_fs) ) {
+                p_dpb->WaitSeiRecovery         = 0;
+            } else {
+                viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3);
+            }
+        }
+    }
+
+    if ( p_dpb->SuspendOutput )
+    {
+        if ( viddec_h264_get_open_gop_entry(p_dpb->active_fs) ) {
+            p_dpb->SuspendOutput      = 0;
+        } else {
+            viddec_h264_set_is_frame_skipped(p_dpb->active_fs, 3);
+        }
+    }
+
+    //h264_send_new_display_frame(0x0);
+    viddec_h264_set_is_output(p_dpb->active_fs, 1);
+
+    if (viddec_h264_get_is_non_existent(p_dpb->active_fs) == 0)
+    {
+        *existing = 1;
+#ifndef USE_AVC_SHORT_FORMAT
+        p_dpb->frame_id_need_to_be_displayed[p_dpb->frame_numbers_need_to_be_displayed]=p_dpb->active_fs->fs_idc;
+        p_dpb->frame_numbers_need_to_be_displayed++;
+#endif
+        //if(direct)
+        //h264_dpb_remove_frame_from_dpb(p_dpb, p_dpb->active_fs->fs_idc);		// Remove dpb.fs_dpb_idc[pos]
+    }
+    else
+    {
+        *existing = 0;
+    }
+
+    if (direct) {
+        viddec_h264_set_is_frame_used(p_dpb->active_fs, 0);
+        p_dpb->active_fs->frame.used_for_reference = 0;
+        p_dpb->active_fs->top_field.used_for_reference = 0;
+        p_dpb->active_fs->bottom_field.used_for_reference = 0;
+        p_dpb->active_fs->fs_idc = MPD_DPB_FS_NULL_IDC;
+    }
+    return;
+} ///////// End of dpb frame output
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_output_one_frame_from_dpb ()
+//
+// Output one frame stored in the DPB. Basiclly this results in its placment
+// in the fs_output_idc list.
+// Placement in the output queue should cause an automatic removal from the dpb
+// if the frame store is not being used as a reference
+// This may need another param for a frame request so that it definitely outputs one non-exiosting frame
+//////////////////////////////////////////////////////////////////////////////
+int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,int32_t direct, int32_t request, int32_t num_ref_frames)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    int32_t poc;
+    int32_t pos;
+    int32_t used_for_reference;
+
+    int32_t existing = 0;
+    int32_t is_refused = 0;
+    int32_t is_pushed = 0;
+
+    //remove warning
+    request = request;
+
+    if (direct)
+    {
+        h264_dpb_frame_output(pInfo, p_dpb->fs_dec_idc, 1, &existing);
+    }
+    else
+    {
+        if (p_dpb->used_size != 0)
+        {
+            // Should this be dpb.not_as_yet_output_num > 0 ??
+            // There should maybe be a is_refused == 0 condition instead...
+            while ((p_dpb->used_size > 0) && (existing == 0) && (is_refused == 0))
+            {
+                // find smallest non-output POC
+                h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+                if (pos != MPD_DPB_FS_NULL_IDC)
+                {
+                    // put it into the output queue
+                    h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+
+                    p_dpb->last_output_poc = poc;
+                    if (existing) is_pushed = 1;
+                    // If non-reference, free frame store and move empty store to end of buffer
+
+                    h264_dpb_is_used_for_reference(p_dpb, &used_for_reference);
+                    if (!(used_for_reference))
+                        h264_dpb_remove_frame_from_dpb(p_dpb, pos);		// Remove dpb.fs_dpb_idc[pos]
+                }
+                else
+                {
+                    int32_t flag;
+                    uint32_t idx;
+
+                    // This is basically an error condition caused by too many reference frames in the DPB.
+                    // It should only happen in errored streams, and can happen if this picture had an MMCO,
+                    // thus disabling h264_dpb_sliding_window_memory_management(), which would normally have
+                    // unmarked the oldest reference frame.
+                    h264_dpb_sliding_window_memory_management(p_dpb, 0,num_ref_frames);
+                    h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+
+                    if (flag == 0) {
+                        for (idx = 0; idx < p_dpb->used_size; idx++)
+                        {
+                            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                            h264_dpb_is_used_for_reference(p_dpb, &used_for_reference);
+
+                            if (used_for_reference) {
+                                break;
+                            }
+                        }
+
+                        if (idx < p_dpb->used_size) {
+                            // Short term
+                            h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                            h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+                            // Long term
+                            h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_dpb_idc[idx]);
+                            h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+                            // Remove from DPB
+                            h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+                        }
+                    }
+                    return 1;
+                }
+            }
+        }
+    }
+
+    return is_pushed;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_queue_update
+//
+// This should be called anytime the output queue might be changed
+//////////////////////////////////////////////////////////////////////////////
+
+int32_t h264_dpb_queue_update(h264_Info* pInfo,int32_t push, int32_t direct, int32_t frame_request, int32_t num_ref_frames)
+{
+
+    int32_t frame_output = 0;
+
+    if (push)
+    {
+        frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, direct, 0, num_ref_frames);
+    }
+    else if (frame_request)
+    {
+        frame_output = h264_dpb_output_one_frame_from_dpb(pInfo, 0, 1,num_ref_frames);
+    }
+
+
+    return frame_output;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_flush_dpb ()
+//
+// Unmarks all reference pictures in the short-term and long term lists and
+// in doing so resets the lists.
+//
+// Flushing the dpb, adds all the current frames in the dpb, not already on the output list
+// to the output list and removes them from the dpb (they will all be marked as unused for
+// reference first)
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_dpb_flush_dpb (h264_Info* pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    int32_t idx, flag;
+    int32_t ref_frames_in_buffer;
+
+    ref_frames_in_buffer = p_dpb->ref_frames_in_buffer;
+
+    for (idx = 0; idx < ref_frames_in_buffer; idx++) {
+        h264_dpb_unmark_for_reference(p_dpb, p_dpb->fs_ref_idc[0]);
+        h264_dpb_remove_ref_list(p_dpb, p_dpb->fs_ref_idc[0]);
+    }
+
+    ref_frames_in_buffer = p_dpb->ltref_frames_in_buffer;
+
+    for (idx = 0; idx < ref_frames_in_buffer; idx++)
+    {
+        h264_dpb_unmark_for_long_term_reference(p_dpb, p_dpb->fs_ltref_idc[0]);
+        h264_dpb_remove_ltref_list(p_dpb, p_dpb->fs_ltref_idc[0]);
+    }
+
+    // output frames in POC order
+    if (output_all) {
+        while ((p_dpb->used_size > 0) && (p_dpb->used_size - keep_complement)) {
+            h264_dpb_queue_update(pInfo, 1, 0, 0,num_ref_frames);
+        }
+    }
+
+    flag = 1;
+    while (flag) {
+        h264_dpb_remove_unused_frame_from_dpb(p_dpb, &flag);
+    }
+
+    return;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_reset_dpb ()
+//
+// Used to reset the contents of dpb
+// Must calculate memory (aligned) pointers for each of the possible frame stores
+//
+// Also want to calculate possible max dpb size in terms of frames
+// We should have an active SPS when we call this ftn to calc bumping level
+//////////////////////////////////////////////////////////////////////////////
+void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs, int32_t SizeChange, int32_t no_output_of_prior_pics_flag)
+{
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+    int32_t num_ref_frames = pInfo->active_SPS.num_ref_frames;
+
+
+    // If half way through a frame then Frame in progress will still be high,
+    // so mark the previous field as a dangling field. This is also needed to
+    // keep cs7050_sif_dpb_disp_numb_ptr correct. Better to reset instead?
+    if (p_dpb->used_size)
+    {
+        int32_t idx;
+        idx = p_dpb->used_size-1;
+        if (p_dpb->fs_dpb_idc[idx] != MPD_DPB_FS_NULL_IDC)
+        {
+            h264_dpb_set_active_fs(p_dpb, p_dpb->fs_dpb_idc[idx]);
+
+            if (viddec_h264_get_is_used(p_dpb->active_fs) != 3)
+                h264_dpb_mark_dangling_field(p_dpb, p_dpb->active_fs->fs_idc);       //, DANGLING_TYPE_DPB_RESET
+        }
+    }
+
+    // initialize software DPB
+    if (p_dpb->active_fs) {
+        viddec_h264_set_dec_structure(p_dpb->active_fs, INVALID);
+    }
+    h264_dpb_idr_memory_management(pInfo, &pInfo->active_SPS, no_output_of_prior_pics_flag);  // implied no_output_of_prior_pics_flag==1
+
+
+    // May always be a size change which calls this function now...
+    // could eliminate below branch
+    if (SizeChange)
+    {
+
+        /***
+        Note : 21/03/2005 14:16
+        Danger asociated with resetting curr_alloc_mem as it would allow the FW top reallocate
+        frame stores from 0 -> NUM_FRAME_STORES again - could lead to queue overflow and corruption
+
+        Placed in size change condition in the hope that this will only ensure dpb is empty
+        and thus this behaviour is valid before continuing again
+        ***/
+
+
+        p_dpb->PicWidthInMbs      = PicWidthInMbs;
+        p_dpb->FrameHeightInMbs   = FrameHeightInMbs;
+
+        p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+        //Flush the current DPB.
+        h264_dpb_flush_dpb(pInfo, 1,0,num_ref_frames);
+    }
+
+    return;
+} ///// End of reset DPB
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+// ---------------------------------------------------------------------------
+// Note that if an 'missing_pip_fb' condition exists, the message will
+// sent to the host each time setup_free_fb is called. However, since this
+// condition is not expected to happen if pre-defined steps are followed, we let
+// it be for now and will change it if required. Basically, as long as host
+// enables PiP after adding PiP buffers and disables PiP before removing buffers
+// and matches PiP fb_id's with normal decode fb_id's this condition should
+// not occur.
+// ---------------------------------------------------------------------------
+int32_t dpb_setup_free_fb( h264_DecodedPictureBuffer *p_dpb, uint8_t* fb_id, pip_setting_t* pip_setting )
+{
+    uint8_t  idx;
+
+    //remove warning
+    pip_setting = pip_setting;
+
+
+    for (idx = 0; idx < NUM_DPB_FRAME_STORES; idx++)
+    {
+        if (p_dpb->fs[idx].fs_idc == MPD_DPB_FS_NULL_IDC)
+        {
+            *fb_id = idx;
+            break;
+        }
+    }
+
+    if (idx == NUM_DPB_FRAME_STORES)
+        return 1;
+
+    p_dpb->fs[idx].fs_idc = idx;
+
+    return 0;
+
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_assign_frame_store ()
+//
+// may need a non-existing option parameter
+//
+
+int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting)
+{
+    uint8_t idc = MPD_DPB_FS_NULL_IDC;
+    pip_setting_t pip_setting;
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+
+
+    while ( dpb_setup_free_fb(p_dpb, &idc, &pip_setting)  != 0 ) {
+        ///
+        /// Generally this is triggered a error case, no more frame buffer avaliable for next
+        /// What we do here is just remove one with min-POC before get more info
+        ///
+
+        int32_t pos = 0, poc = 0, existing = 1;
+
+        // find smallest non-output POC
+        h264_dpb_get_smallest_poc(p_dpb, &poc, &pos);
+        if (pos != MPD_DPB_FS_NULL_IDC)
+        {
+            // put it into the output queue
+            h264_dpb_frame_output(pInfo, p_dpb->fs_dpb_idc[pos], 0, &existing);
+            p_dpb->last_output_poc = poc;
+            h264_dpb_remove_frame_from_dpb(p_dpb, pos);	 // Remove dpb.fs_dpb_idc[pos]
+        }
+    }
+
+
+    if (NonExisting) {
+        p_dpb->fs_non_exist_idc = idc;
+    } else {
+        p_dpb->fs_dec_idc = idc;
+    }
+
+    //add to support frame relocation interface to host
+    if (!NonExisting)
+    {
+        p_dpb->frame_numbers_need_to_be_allocated = 1;
+        p_dpb->frame_id_need_to_be_allocated = p_dpb->fs_dec_idc;
+    }
+
+
+    ///////////////////////////////h264_dpb_reset_fs();
+    h264_dpb_set_active_fs(p_dpb, idc);
+    p_dpb->active_fs->fs_flag_1 = 0;
+    p_dpb->active_fs->fs_flag_2 = 0;
+    viddec_h264_set_is_non_existent(p_dpb->active_fs, NonExisting);
+    viddec_h264_set_is_output(p_dpb->active_fs, (NonExisting?1:0));
+
+    p_dpb->active_fs->pic_type = ((FRAME_TYPE_INVALID<<FRAME_TYPE_TOP_OFFSET)|(FRAME_TYPE_INVALID<<FRAME_TYPE_BOTTOM_OFFSET));			//----
+
+    // Only put members in here which will not be reset somewhere else
+    // and which could be used before they are overwritten again with
+    // new valid values
+    // eg ->is_used is reset on removal from dpb, no need for it here
+    //    ->poc would only be changed when we overwrite on insert_Picture_in_dpb()
+    //    but would be used by get_smallest_poc()
+    //    ->top.poc would also not be overwritten until a new valid value comes along,
+    //    but I don't think it is used before then so no need to reset
+    //p_dpb->active_fs->is_long_term    = 0;
+    p_dpb->active_fs->frame.used_for_reference    = 0;
+    p_dpb->active_fs->frame.poc			= 0;
+
+    return 1;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_update_queue_dangling_field (h264_Info * pInfo)
+//
+// Update DPB for Dangling field special case
+//
+void h264_dpb_update_queue_dangling_field(h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+    int32_t prev_pic_unpaired_field = 0;
+
+    if (dpb_ptr->used_size > dpb_ptr->BumpLevel)
+    {
+        if (dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1] != MPD_DPB_FS_NULL_IDC)
+        {
+            h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+            if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3)
+            {
+                prev_pic_unpaired_field = 1;
+            }
+        }
+
+        if (pInfo->img.structure != FRAME)
+        {
+            // To prove this is the second field,
+            // 1) The previous picture is an (as yet) unpaired field
+            if (prev_pic_unpaired_field)
+            {
+                // If we establish the previous pic was an unpaired field and this picture is not
+                // its complement, the previous picture was a dangling field
+                if (pInfo->img.second_field == 0) {
+                    while (dpb_ptr->used_size > dpb_ptr->BumpLevel)
+                        h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+                }
+            }
+        }
+        else if (prev_pic_unpaired_field) {
+            while (dpb_ptr->used_size > dpb_ptr->BumpLevel)
+                h264_dpb_queue_update(pInfo, 1, 0, 0,pInfo->active_SPS.num_ref_frames); // flush a frame
+        }
+    }
+
+
+    return;
+}	///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_dpb_init_frame_store (h264_Info * pInfo)
+//
+// Set the frame store to be used in decoding the picture
+//
+
+void h264_dpb_init_frame_store(h264_Info * pInfo)
+{
+    h264_DecodedPictureBuffer *dpb_ptr = &pInfo->dpb;
+
+    int32_t free_fs_found;
+    int32_t idx = 0;
+    int32_t prev_pic_unpaired_field = 0;
+    int32_t prev_idc = MPD_DPB_FS_NULL_IDC;
+    int32_t structure = pInfo->img.structure;
+
+    if (dpb_ptr->used_size)
+    {
+        idx = dpb_ptr->used_size-1;
+        prev_idc = dpb_ptr->fs_dpb_idc[idx];
+    }
+
+    if (prev_idc != MPD_DPB_FS_NULL_IDC)
+    {
+        h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dpb_idc[dpb_ptr->used_size-1]);
+        if (viddec_h264_get_is_used(dpb_ptr->active_fs) != 3)
+        {
+            //PRINTF(MFD_NONE, " FN: %d p_dpb->active_fs->is_used = %d \n", (h264_frame_number+1), p_dpb->active_fs->is_used);
+            prev_pic_unpaired_field = 1;
+        }
+    }
+
+    //if ((pInfo->img.curr_has_mmco_5) || (pInfo->img.idr_flag))   curr_fld_not_prev_comp = 1;
+
+    if (structure != FRAME)
+    {
+
+        // To prove this is the second field,
+        // 1) The previous picture is an (as yet) unpaired field
+        if (prev_pic_unpaired_field)
+        {
+            // If we establish the previous pic was an unpaired field and this picture is not
+            // its complement, the previous picture was a dangling field
+            if (pInfo->img.second_field == 0)
+                h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc);  //, DANGLING_TYPE_FIELD
+        }
+    }
+    else if (prev_pic_unpaired_field) {
+        h264_dpb_mark_dangling_field(dpb_ptr, dpb_ptr->active_fs->fs_idc);		//, DANGLING_TYPE_FRAME
+    }
+
+    free_fs_found = 0;
+
+    // If this is not a second field, we must find a free space for the current picture
+    if (!(pInfo->img.second_field))
+    {
+        dpb_ptr->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+        free_fs_found = h264_dpb_assign_frame_store(pInfo, 0);
+        //h264_frame_number++;
+        //PRINTF(MFD_NONE, " FN: %d (inc) fs_idc =  %d \n", (h264_frame_number+1), dpb.fs_dec_idc);
+    }
+
+    h264_dpb_set_active_fs(dpb_ptr, dpb_ptr->fs_dec_idc);
+
+    ////////////// TODO: THe following init
+#if 1
+    if ( pInfo->img.second_field) {
+        //p_dpb->active_fs->second_dsn = pInfo->img.dsn;
+        //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn;
+        if (dpb_ptr->active_fs->pic_type == FRAME_TYPE_IDR ||
+                dpb_ptr->active_fs->pic_type == FRAME_TYPE_I) {
+
+            viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 1);
+        } else {
+            viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0);
+        }
+
+    }
+    else {
+        //p_dpb->active_fs->first_dsn = pInfo->img.dsn;
+        //p_dpb->active_fs->prev_dsn = pInfo->img.prev_dsn;
+        viddec_h264_set_first_field_intra(dpb_ptr->active_fs, 0);
+    }
+
+    if (pInfo->img.structure == FRAME) {
+        //dpb_ptr->active_fs->second_dsn = 0x0;
+    }
+
+    if ( pInfo->sei_information.broken_link_pic )
+    {
+        viddec_h264_set_broken_link_picture(dpb_ptr->active_fs, 1);
+        pInfo->sei_information.broken_link_pic = 0;
+    }
+
+    if ((pInfo->img.frame_num == pInfo->sei_information.recovery_frame_num)&&(pInfo->SliceHeader.nal_ref_idc != 0))
+        viddec_h264_set_recovery_pt_picture(dpb_ptr->active_fs, 1);
+
+    //if ((( gRestartMode.aud ) || ( gRestartMode.sei )) && ( !gRestartMode.idr))
+    if (pInfo->img.recovery_point_found == 6)
+    {
+        viddec_h264_set_open_gop_entry(dpb_ptr->active_fs, 1);
+        pInfo->dpb.SuspendOutput         = 1;
+    }
+#endif
+
+    if ((pInfo->img.second_field) || (free_fs_found))
+    {
+        viddec_h264_set_dec_structure(dpb_ptr->active_fs, pInfo->img.structure);
+        viddec_h264_set_is_output(dpb_ptr->active_fs, 0);
+
+        switch (pInfo->img.structure)
+        {
+        case (FRAME)     : {
+            dpb_ptr->active_fs->frame.pic_num = pInfo->img.frame_num;
+            dpb_ptr->active_fs->frame.long_term_frame_idx = 0;
+            dpb_ptr->active_fs->frame.long_term_pic_num = 0;
+            dpb_ptr->active_fs->frame.used_for_reference = 0;
+            dpb_ptr->active_fs->frame.is_long_term = 0;
+            //dpb_ptr->active_fs->frame.structure = pInfo->img.structure;
+            dpb_ptr->active_fs->frame.poc = pInfo->img.framepoc;
+        }
+        break;
+        case (TOP_FIELD) : {
+            dpb_ptr->active_fs->top_field.pic_num = pInfo->img.frame_num;
+            dpb_ptr->active_fs->top_field.long_term_frame_idx = 0;
+            dpb_ptr->active_fs->top_field.long_term_pic_num = 0;
+            dpb_ptr->active_fs->top_field.used_for_reference = 0;
+            dpb_ptr->active_fs->top_field.is_long_term = 0;
+            //dpb_ptr->active_fs->top_field.structure = pInfo->img.structure;
+            dpb_ptr->active_fs->top_field.poc = pInfo->img.toppoc;
+        }
+        break;
+        case(BOTTOM_FIELD) : {
+            dpb_ptr->active_fs->bottom_field.pic_num = pInfo->img.frame_num;
+            dpb_ptr->active_fs->bottom_field.long_term_frame_idx = 0;
+            dpb_ptr->active_fs->bottom_field.long_term_pic_num = 0;
+            dpb_ptr->active_fs->bottom_field.used_for_reference = 0;
+            dpb_ptr->active_fs->bottom_field.is_long_term = 0;
+            //dpb_ptr->active_fs->bottom_field.structure = pInfo->img.structure;
+            dpb_ptr->active_fs->bottom_field.poc = pInfo->img.bottompoc;
+        }
+        break;
+        }
+    }
+    else
+    {
+        // Need to drop a frame or something here
+    }
+
+    return;
+}	///// End of init Frame Store
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// Decoding POC for current Picture
+// 1) pic_order_cnt_type (0, 1, 2)
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num)
+{
+    int32_t MaxPicOrderCntLsb = (1<<(pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4));
+    int32_t delta_pic_order_count[2];
+    int32_t MaxFrameNum = 1 << (pInfo->active_SPS.log2_max_frame_num_minus4 + 4);
+
+    int32_t AbsFrameNum =0;
+    int32_t ExpectedDeltaPerPicOrderCntCycle =0;
+    int32_t PicOrderCntCycleCnt = 0;
+    int32_t FrameNumInPicOrderCntCycle =0;
+    int32_t ExpectedPicOrderCnt =0;
+
+    int32_t actual_frame_num =0;
+
+
+
+    if (NonExisting)    actual_frame_num = frame_num;
+    else               actual_frame_num = pInfo->img.frame_num;
+
+    switch (pInfo->active_SPS.pic_order_cnt_type)
+    {
+    case 0:
+        if (NonExisting != 0) break;
+
+        if (pInfo->SliceHeader.idr_flag)
+        {
+            pInfo->img.PicOrderCntMsb = 0;
+            pInfo->img.PrevPicOrderCntLsb = 0;
+        }
+        else if (pInfo->img.last_has_mmco_5)
+        {
+            if (pInfo->img.last_pic_bottom_field)
+            {
+                pInfo->img.PicOrderCntMsb     = 0;
+                pInfo->img.PrevPicOrderCntLsb = 0;
+            }
+            else
+            {
+                pInfo->img.PicOrderCntMsb     = 0;
+                pInfo->img.PrevPicOrderCntLsb = pInfo->img.toppoc;
+            }
+        }
+
+        // Calculate the MSBs of current picture
+        if ((pInfo->img.pic_order_cnt_lsb < pInfo->img.PrevPicOrderCntLsb)  &&
+                ((pInfo->img.PrevPicOrderCntLsb - pInfo->img.pic_order_cnt_lsb )>=(MaxPicOrderCntLsb>>1)) )
+        {
+            pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb + MaxPicOrderCntLsb;
+        } else if ((pInfo->img.pic_order_cnt_lsb  >  pInfo->img.PrevPicOrderCntLsb)  &&
+                   ((pInfo->img.pic_order_cnt_lsb - pInfo->img.PrevPicOrderCntLsb ) > (MaxPicOrderCntLsb>>1)) )
+        {
+            pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb - MaxPicOrderCntLsb;
+        } else
+        {
+            pInfo->img.CurrPicOrderCntMsb = pInfo->img.PicOrderCntMsb;
+        }
+
+        // 2nd
+
+        if (pInfo->img.field_pic_flag==0)
+        {
+            //frame pix
+            pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+            pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.delta_pic_order_cnt_bottom;
+            pInfo->img.ThisPOC = pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc; // POC200301
+        }
+        else if (pInfo->img.bottom_field_flag==0)
+        {  //top field
+            pInfo->img.ThisPOC= pInfo->img.toppoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+        }
+        else
+        {  //bottom field
+            pInfo->img.ThisPOC= pInfo->img.bottompoc = pInfo->img.CurrPicOrderCntMsb + pInfo->img.pic_order_cnt_lsb;
+        }
+        pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+        if ( pInfo->img.frame_num != pInfo->old_slice.frame_num)
+            pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+
+        if (pInfo->SliceHeader.nal_ref_idc)
+        {
+            pInfo->img.PrevPicOrderCntLsb = pInfo->img.pic_order_cnt_lsb;
+            pInfo->img.PicOrderCntMsb = pInfo->img.CurrPicOrderCntMsb;
+        }
+
+        break;
+    case 1: {
+        if (NonExisting)
+        {
+            delta_pic_order_count[0] = 0;
+            delta_pic_order_count[1] = 0;
+        }
+        else
+        {
+            delta_pic_order_count[0] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 : pInfo->img.delta_pic_order_cnt[0];
+            delta_pic_order_count[1] = ( pInfo->img.delta_pic_order_always_zero_flag ) ? 0 :
+                                       ( (!pInfo->active_PPS.pic_order_present_flag)  && (!(pInfo->img.field_pic_flag))) ? 0 :
+                                       pInfo->img.delta_pic_order_cnt[1];
+        }
+
+        // this if branch should not be taken during processing of a gap_in_frame_num pic since
+        // an IDR picture cannot produce non-existent frames...
+        if (pInfo->SliceHeader.idr_flag)
+        {
+            pInfo->img.FrameNumOffset         = 0;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if (pInfo->img.frame_num)
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+        }
+        else
+        {
+
+            if (actual_frame_num < pInfo->img.PreviousFrameNum)
+            {
+                pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+            }
+            else
+            {
+                pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+            }
+        }
+
+        // pInfo->img.num_ref_frames_in_pic_order_cnt_cycle set from SPS
+        // so constant between existent and non-existent frames
+        if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+            AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+        else
+            AbsFrameNum = 0;
+
+        // pInfo->img.disposable_flag should never be true for a non-existent frame since these are always
+        // references...
+        if ((pInfo->SliceHeader.nal_ref_idc == 0) && (AbsFrameNum > 0)) AbsFrameNum = AbsFrameNum - 1;
+
+        // 3rd
+        ExpectedDeltaPerPicOrderCntCycle = pInfo->active_SPS.expectedDeltaPerPOCCycle;
+
+        if (AbsFrameNum)
+        {
+            // Rem: pInfo->img.num_ref_frames_in_pic_order_cnt_cycle takes max value of 255 (8 bit)
+            // Frame NUm may be 2^16 (17 bits)
+            // I guess we really have to treat AbsFrameNum as a 32 bit number
+            uint32_t temp = 0;
+            int32_t i=0;
+            int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE];
+
+            if (pInfo->img.num_ref_frames_in_pic_order_cnt_cycle)
+                PicOrderCntCycleCnt = ldiv_mod_u((uint32_t)(AbsFrameNum-1), (uint32_t)pInfo->img.num_ref_frames_in_pic_order_cnt_cycle, &temp);
+
+            ExpectedPicOrderCnt = mult_u((uint32_t)PicOrderCntCycleCnt, (uint32_t)ExpectedDeltaPerPicOrderCntCycle);
+
+            FrameNumInPicOrderCntCycle = temp;
+
+            //ExpectedPicOrderCnt +=pInfo->active_SPS.expectedDeltaPerPOCCycle;
+#ifndef USER_MODE
+            h264_Parse_Copy_Offset_Ref_Frames_From_DDR(pInfo, offset_for_ref_frame, pInfo->active_SPS.seq_parameter_set_id);
+            for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+                ExpectedPicOrderCnt += offset_for_ref_frame[i];
+#else
+            for (i = 0; i <= FrameNumInPicOrderCntCycle; i++)
+                ExpectedPicOrderCnt += pInfo->active_SPS.offset_for_ref_frame[i];
+#endif
+        }
+        else {
+            ExpectedPicOrderCnt = 0;
+        }
+
+        if (pInfo->SliceHeader.nal_ref_idc == 0)
+            ExpectedPicOrderCnt += pInfo->img.offset_for_non_ref_pic;
+
+        if (!(pInfo->img.field_pic_flag))
+        {
+            pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+            pInfo->img.bottompoc = pInfo->img.toppoc + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[1];
+            pInfo->img.framepoc = (pInfo->img.toppoc < pInfo->img.bottompoc)? pInfo->img.toppoc : pInfo->img.bottompoc;
+            pInfo->img.ThisPOC = pInfo->img.framepoc;
+        }
+        else if (!(pInfo->img.bottom_field_flag))
+        {
+            //top field
+            pInfo->img.toppoc = ExpectedPicOrderCnt + delta_pic_order_count[0];
+            pInfo->img.ThisPOC = pInfo->img.toppoc;
+            pInfo->img.bottompoc = 0;
+        }
+        else
+        {
+            //bottom field
+            pInfo->img.toppoc = 0;
+            pInfo->img.bottompoc = ExpectedPicOrderCnt + pInfo->img.offset_for_top_to_bottom_field + delta_pic_order_count[0];
+            pInfo->img.ThisPOC = pInfo->img.bottompoc;
+        }
+
+        //CONFORMANCE_ISSUE
+        pInfo->img.framepoc=pInfo->img.ThisPOC;
+
+        //CONFORMANCE_ISSUE
+        pInfo->img.PreviousFrameNum=pInfo->img.frame_num;
+        pInfo->img.PreviousFrameNumOffset=pInfo->img.FrameNumOffset;
+
+    }
+    break;
+    case 2: {     // POC MODE 2
+        if (pInfo->SliceHeader.idr_flag)
+        {
+            pInfo->img.FrameNumOffset = 0;
+            pInfo->img.framepoc = 0;
+            pInfo->img.toppoc = 0;
+            pInfo->img.bottompoc = 0;
+            pInfo->img.ThisPOC = 0;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if (pInfo->img.frame_num)
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+        }
+        else
+        {
+            if (pInfo->img.last_has_mmco_5)
+            {
+                pInfo->img.PreviousFrameNum = 0;
+                pInfo->img.PreviousFrameNumOffset = 0;
+            }
+            if (actual_frame_num < pInfo->img.PreviousFrameNum)
+                pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset + MaxFrameNum;
+            else
+                pInfo->img.FrameNumOffset = pInfo->img.PreviousFrameNumOffset;
+
+            AbsFrameNum = pInfo->img.FrameNumOffset + actual_frame_num;
+            if (pInfo->SliceHeader.nal_ref_idc == 0) pInfo->img.ThisPOC = (AbsFrameNum<<1) - 1;
+            else                     pInfo->img.ThisPOC = (AbsFrameNum<<1);
+
+            if (!(pInfo->img.field_pic_flag))
+            {
+                pInfo->img.toppoc    = pInfo->img.ThisPOC;
+                pInfo->img.bottompoc = pInfo->img.ThisPOC;
+                pInfo->img.framepoc  = pInfo->img.ThisPOC;
+            }
+            else if (!(pInfo->img.bottom_field_flag))
+            {
+                pInfo->img.toppoc   = pInfo->img.ThisPOC;
+                pInfo->img.framepoc = pInfo->img.ThisPOC;
+            }
+            else
+            {
+                pInfo->img.bottompoc = pInfo->img.ThisPOC;
+                pInfo->img.framepoc  = pInfo->img.ThisPOC;
+            }
+        }
+
+        //CONFORMANCE_ISSUE
+        pInfo->img.PreviousFrameNum = pInfo->img.frame_num;
+        pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+    }
+    break;
+    default:
+        break;
+    }
+
+    return;
+}  //// End of decoding_POC
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+//////////////////////////////////////////////////////////////////////////////
+// h264_hdr_post_poc ()
+//
+//////////////////////////////////////////////////////////////////////////////
+
+void h264_hdr_post_poc(h264_Info* pInfo, int32_t NonExisting, int32_t frame_num, int32_t use_old)
+{
+    int32_t actual_frame_num = (NonExisting)? frame_num :
+                               (use_old)?	pInfo->old_slice.frame_num :
+                               pInfo->img.frame_num;
+
+    int32_t disposable_flag = (use_old)?(pInfo->old_slice.nal_ref_idc == 0) :
+                              (pInfo->SliceHeader.nal_ref_idc == 0);
+
+    switch (pInfo->img.pic_order_cnt_type)
+    {
+    case  0: {
+        pInfo->img.PreviousFrameNum   = actual_frame_num;
+        if ((disposable_flag == 0) && (NonExisting == 0))
+        {
+            pInfo->img.PrevPicOrderCntLsb = (use_old)? pInfo->old_slice.pic_order_cnt_lsb :
+                                            pInfo->SliceHeader.pic_order_cnt_lsb;
+            pInfo->img.PicOrderCntMsb     = pInfo->img.CurrPicOrderCntMsb;
+        }
+    }
+    break;
+    case  1: {
+        pInfo->img.PreviousFrameNum       = actual_frame_num;
+        pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+    }
+    break;
+    case  2: {
+        pInfo->img.PreviousFrameNum       = actual_frame_num;
+        pInfo->img.PreviousFrameNumOffset = pInfo->img.FrameNumOffset;
+
+    }
+    break;
+
+    default: {
+    } break;
+    }
+
+    return;
+} ///// End of h264_hdr_post_poc
+
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_math.c b/mixvbp/vbp_plugin/h264/h264parse_math.c
new file mode 100755
index 0000000..ec48cc8
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_math.c
@@ -0,0 +1,84 @@
+//#include "math.h"
+// Arithmatic functions using add & subtract
+
+#include "h264parse.h"
+
+uint32_t mult_u(register uint32_t var1, register uint32_t var2)
+{
+
+    register unsigned long var_out = 0;
+
+    while (var2 > 0)
+    {
+
+        if (var2 & 0x01)
+        {
+            var_out += var1;
+        }
+        var2 >>= 1;
+        var1 <<= 1;
+    }
+    return var_out;
+
+}// mult_u
+
+uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod)
+{
+    register unsigned long div = b;
+    register unsigned long res = 0;
+    register unsigned long bit = 0x1;
+
+    if (!div)
+    {
+        *mod = 0;
+        return 0xffffffff ; // Div by 0
+    }
+
+    if (a < b)
+    {
+        *mod = a;
+        return 0; // It won't even go once
+    }
+
+    while (!(div & 0x80000000))
+    {
+        div <<= 1;
+        bit <<= 1;
+    }
+
+    while (bit)
+    {
+        if (div <= a)
+        {
+            res |= bit;
+            a -= div;
+        }
+        div >>= 1;
+        bit >>= 1;
+    }
+    *mod = a;
+    return res;
+}// ldiv_mod_u
+
+
+unsigned ldiv_u(register unsigned a, register unsigned  b)
+{
+    register unsigned div = b << 16;
+    register unsigned res = 0;
+    register unsigned bit = 0x10000;
+
+    while (bit)
+    {
+        div >>= 1;
+        bit >>= 1;
+        if (div < a)
+        {
+            res |= bit;
+            a -= div;
+        }
+    }
+
+    return res;
+}
+
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_mem.c b/mixvbp/vbp_plugin/h264/h264parse_mem.c
new file mode 100755
index 0000000..b5a0145
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_mem.c
@@ -0,0 +1,198 @@
+
+/*!
+ ***********************************************************************
+ *  \file: h264_dpb_ctl.c
+ *
+ ***********************************************************************
+ */
+
+//#include <limits.h>
+
+#include "h264parse.h"
+
+
+// ---------------------------------------------------------------------------
+// IMPORTANT: note that in this implementation int c is an int not a char
+// ---------------------------------------------------------------------------
+void* h264_memset( void* buf, uint32_t c, uint32_t num )
+{
+    uint32_t* buf32 = (uint32_t*)buf;
+    uint32_t  size32 = ( num >> 2 );
+    uint32_t  i;
+
+    for ( i = 0; i < size32; i++ )
+    {
+        *buf32++ = c;
+    }
+
+    return buf;
+}
+
+
+void* h264_memcpy( void* dest, void* src, uint32_t num )
+{
+    int32_t*	dest32 = (int32_t*)dest;
+    int32_t*    src32 = (int32_t*)src;
+    uint32_t	size32 = ( num >> 2 );
+    uint32_t	i;
+
+    for ( i = 0; i < size32; i++ )
+    {
+        *dest32++ = *src32++;
+    }
+
+    return dest;
+}
+
+
+#ifndef USER_MODE
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem
+void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+    uint32_t  copy_size = sizeof(pic_param_set);
+    uint32_t  pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+    if (nPPSId < MAX_NUM_PPS)
+    {
+        cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 1, 0);
+    }
+
+    return;
+
+}
+//end of h264_Parse_Copy_Pps_To_DDR
+
+
+// h264_Parse_Copy_Pps_From_DDR copy a pps with nPPSId from ddr mem to local PPS
+void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId)
+{
+
+    uint32_t copy_size= sizeof(pic_param_set);
+    uint32_t pps_entry_ptr = pInfo->PPS_PADDR_GL+nPPSId*copy_size;
+
+    if ( nPPSId < MAX_NUM_PPS)
+    {
+        cp_using_dma(pps_entry_ptr, (uint32_t)PPS, copy_size, 0, 0);
+    }
+
+    return;
+}
+//end of h264_Parse_Copy_Pps_From_DDR
+
+
+//h264_Parse_Copy_Sps_To_DDR () copy local sps to ddr mem with nSPSId
+void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+    uint32_t  copy_size = sizeof(seq_param_set_used);
+    uint32_t  sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 1, 0);
+    }
+
+    //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+    return;
+}
+
+//end of h264_Parse_Copy_Sps_To_DDR
+
+
+// h264_Parse_Copy_Sps_From_DDR copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId)
+{
+    uint32_t copy_size= sizeof(seq_param_set_used);
+    uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*sizeof(seq_param_set_all);
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        cp_using_dma(sps_entry_ptr, (uint32_t)SPS, copy_size, 0, 0);
+    }
+
+    return;
+
+}
+//end of h264_Parse_Copy_Sps_From_DDR
+
+//h264_Parse_Copy_Offset_Ref_Frames_To_DDR () copy local offset_ref_frames to ddr mem with nSPSId
+void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+    uint32_t  copy_size = sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+    uint32_t  offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 1, 0);
+        h264_memcpy((int32_t *)offset_ref_frames_entry_ptr,pOffset_ref_frames, copy_size);
+    }
+
+    return;
+}
+
+//end of h264_Parse_Copy_Offset_Ref_Frames_To_DDR
+
+
+// h264_Parse_Copy_Offset_Ref_Frames_From_DDR copy a offset_ref_frames with nSPSId from ddr mem to local offset_ref_frames
+void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId)
+{
+    uint32_t copy_size= sizeof(int32_t)*MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+    uint32_t offset_ref_frames_entry_ptr = pInfo->OFFSET_REF_FRAME_PADDR_GL+nSPSId*copy_size;
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        //cp_using_dma(offset_ref_frames_entry_ptr, (uint32_t)pOffset_ref_frames, copy_size, 0, 0);
+        h264_memcpy(pOffset_ref_frames, (int32_t *)offset_ref_frames_entry_ptr, copy_size);
+    }
+
+    return;
+
+}
+//end of h264_Parse_Copy_Offset_Ref_Frames_From_DDR
+
+
+//h264_Parse_Check_Sps_Updated_Flag () copy local sps to ddr mem with nSPSId
+uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+    uint32_t  is_updated=0;
+    uint32_t  copy_size = sizeof(uint32_t);
+    uint32_t  sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 1, 0);
+    }
+
+    //OS_INFO("SPS->seq_parameter_set_id = %d\n", SPS->seq_parameter_set_id);
+
+
+    return is_updated;
+}
+
+//end of h264_Parse_Check_Sps_Updated_Flag
+
+
+// h264_Parse_Clear_Sps_Updated_Flag copy a sps with nSPSId from ddr mem to local SPS
+void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId)
+{
+    uint32_t  is_updated=0;
+    uint32_t copy_size= sizeof(uint32_t);
+    uint32_t sps_entry_ptr = pInfo->SPS_PADDR_GL+nSPSId*copy_size;
+
+    if (nSPSId < MAX_NUM_SPS)
+    {
+        cp_using_dma(sps_entry_ptr, (uint32_t)(&is_updated), copy_size, 0, 0);
+    }
+
+    return;
+
+}
+//end of h264_Parse_Clear_Sps_Updated_Flag
+
+
+#endif
+
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_pps.c b/mixvbp/vbp_plugin/h264/h264parse_pps.c
new file mode 100755
index 0000000..17f0930
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_pps.c
@@ -0,0 +1,194 @@
+
+
+#include "h264.h"
+#include "h264parse.h"
+
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+/*---------------------------------------------*/
+h264_Status h264_Parse_PicParameterSet(void *parent,h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet)
+{
+    h264_Status ret = H264_PPS_ERROR;
+
+    //h264_PicParameterSet_t* PictureParameterSet = &pInfo->PictureParameterSet;
+    uint32_t code=0, i = 0;
+
+    do {
+        ///// PPS par1: pic_parameter_set_id & seq_parameter_set_id
+        code = h264_GetVLCElement(parent, pInfo, false);
+        if (code > MAX_PIC_PARAMS) {
+            break;
+        }
+        PictureParameterSet->pic_parameter_set_id = (uint8_t)code;
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (code > 255)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+
+        code = h264_GetVLCElement(parent, pInfo, false);
+        if (code > MAX_NUM_SPS-1) {
+            break;
+        }
+        PictureParameterSet->seq_parameter_set_id = (uint8_t)code;
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (code > 31)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        ///// entropy_coding_mode_flag
+        viddec_pm_get_bits(parent, &code, 1);
+        PictureParameterSet->entropy_coding_mode_flag = (uint8_t)code;
+        ///// pic_order_present_flag
+        viddec_pm_get_bits(parent, &code, 1);
+        PictureParameterSet->pic_order_present_flag = (uint8_t)code;
+
+        PictureParameterSet->num_slice_groups_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (PictureParameterSet->num_slice_groups_minus1 > 8)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        //
+        // In main profile, FMO is excluded and num_slice_groups_minus1 should be 0
+        //
+        if (PictureParameterSet->num_slice_groups_minus1 > 0) //MAX_NUM_SLICE_GRPS)
+            break;
+
+        PictureParameterSet->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false)+1;
+        PictureParameterSet->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+
+        //// PPS->num_ref_idx_l0_active --- [0,32]
+        if (((PictureParameterSet->num_ref_idx_l0_active) > MAX_NUM_REF_FRAMES) || ((PictureParameterSet->num_ref_idx_l1_active) > MAX_NUM_REF_FRAMES))
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+
+        //// weighting prediction
+        viddec_pm_get_bits(parent, &code, 1);
+        PictureParameterSet->weighted_pred_flag = (uint8_t)code;
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (code > 2)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        viddec_pm_get_bits(parent, &code, 2);
+        PictureParameterSet->weighted_bipred_idc = (uint8_t)code;
+
+        //// QP
+        PictureParameterSet->pic_init_qp_minus26 = h264_GetVLCElement(parent, pInfo, true);
+        PictureParameterSet->pic_init_qs_minus26 = h264_GetVLCElement(parent, pInfo, true);
+        if (((PictureParameterSet->pic_init_qp_minus26+26) > MAX_QP) || ((PictureParameterSet->pic_init_qs_minus26+26) > MAX_QP))
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+        PictureParameterSet->chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true);
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if ((12 < PictureParameterSet->chroma_qp_index_offset) || (-12 > PictureParameterSet->chroma_qp_index_offset) )
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        //// Deblocking ctl parameters
+        viddec_pm_get_bits(parent, &code, 1);
+        PictureParameterSet->deblocking_filter_control_present_flag = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code, 1);
+        PictureParameterSet->constrained_intra_pred_flag = (uint8_t)code;
+
+        if ( viddec_pm_get_bits(parent, &code, 1) == -1)
+            break;
+        PictureParameterSet->redundant_pic_cnt_present_flag = (uint8_t)code;
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (code && (pInfo->active_SPS.profile_idc != h264_ProfileBaseline))
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        //// Check if have more RBSP Data for additional parameters
+        if (h264_More_RBSP_Data(parent, pInfo))
+        {
+            viddec_pm_get_bits(parent,  &code, 1);
+            PictureParameterSet->transform_8x8_mode_flag = (uint8_t)code;
+
+            if ( viddec_pm_get_bits(parent, &code, 1) == -1)
+                break;
+            PictureParameterSet->pic_scaling_matrix_present_flag = (uint8_t)code;
+
+            if (PictureParameterSet->pic_scaling_matrix_present_flag)
+            {
+                uint32_t n_ScalingList = 6 + (PictureParameterSet->transform_8x8_mode_flag << 1);
+                for (i=0; i<n_ScalingList; i++)
+                {
+                    if ( viddec_pm_get_bits(parent, &code, 1) == -1)
+                        break;
+                    PictureParameterSet->pic_scaling_list_present_flag[i] = (uint8_t)code;
+
+                    if (PictureParameterSet->pic_scaling_list_present_flag[i])
+                    {
+                        if (i<6)
+                            h264_Scaling_List(parent, PictureParameterSet->ScalingList4x4[i], 16, &PictureParameterSet->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+                        else
+                            h264_Scaling_List(parent, PictureParameterSet->ScalingList8x8[i-6], 64, &PictureParameterSet->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+                    }
+                }
+            }
+
+            PictureParameterSet->second_chroma_qp_index_offset = h264_GetVLCElement(parent, pInfo, true); //fix
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if((PictureParameterSet->second_chroma_qp_index_offset>12) || (PictureParameterSet->second_chroma_qp_index_offset < -12))
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+        }
+        else
+        {
+            PictureParameterSet->transform_8x8_mode_flag = 0;
+            PictureParameterSet->pic_scaling_matrix_present_flag = 0;
+            PictureParameterSet->second_chroma_qp_index_offset = PictureParameterSet->chroma_qp_index_offset;
+        }
+
+        ret = H264_STATUS_OK;
+    } while (0);
+
+    //h264_Parse_rbsp_trailing_bits(pInfo);
+    return ret;
+}
+
+////////// EOF///////////////
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_sei.c b/mixvbp/vbp_plugin/h264/h264parse_sei.c
new file mode 100755
index 0000000..f70e64c
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_sei.c
@@ -0,0 +1,1138 @@
+#define H264_PARSE_SEI_C
+
+#ifdef H264_PARSE_SEI_C
+
+#include "h264.h"
+#include "h264parse.h"
+#include "h264parse_dpb.h"
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_item_types.h"
+#include "viddec_fw_workload.h"
+
+//////////////////////////////////////////////////////////////////////////////
+// avc_sei_stream_initialise ()
+//
+//
+
+void h264_sei_stream_initialise (h264_Info* pInfo)
+{
+    pInfo->sei_information.capture_POC     = 0;
+    pInfo->sei_information.disp_frozen     = 0;
+    pInfo->sei_information.release_POC     = 0;
+    pInfo->sei_information.capture_fn      = 0;
+    pInfo->sei_information.recovery_fn     = 0xFFFFFFFF;
+    pInfo->sei_information.scan_format     = 0;
+    pInfo->sei_information.broken_link_pic = 0;
+    return;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_buffering_period(void *parent,h264_Info* pInfo)
+{
+    h264_Status ret = H264_STATUS_SEI_ERROR;
+
+    h264_SEI_buffering_period_t* sei_msg_ptr;
+    h264_SEI_buffering_period_t  sei_buffering_period;
+    int32_t SchedSelIdx;
+    int num_bits = 0;
+
+    sei_msg_ptr = (h264_SEI_buffering_period_t *)(&sei_buffering_period);
+
+    do {
+        if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+        {
+            num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+        }
+        else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+        {
+            num_bits = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 + 1;
+        }
+
+        sei_msg_ptr->seq_param_set_id = h264_GetVLCElement(parent, pInfo, false);
+        if (sei_msg_ptr->seq_param_set_id >= NUM_SPS)
+            break;
+
+        //check if this id is same as the id of the current SPS  //fix
+
+        if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+        {
+            if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+                break;
+
+            for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; SchedSelIdx++)
+            {
+                viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_nal, num_bits);
+                viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_nal, num_bits);
+            }
+        }
+
+        if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+        {
+            if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+                break;
+
+            for (SchedSelIdx = 0; SchedSelIdx <= pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; SchedSelIdx++)
+            {
+                viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_vcl, num_bits);
+                viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->initial_cpb_removal_delay_offset_vcl, num_bits);
+            }
+        }
+
+        ret = H264_STATUS_OK;
+    } while (0);
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pic_timing(void *parent,h264_Info* pInfo)
+{
+    int32_t CpbDpbDelaysPresentFlag = 0;
+    h264_SEI_pic_timing_t* sei_msg_ptr;
+    h264_SEI_pic_timing_t  sei_pic_timing;
+    int32_t num_bits_cpb = 0, num_bits_dpb = 0, time_offset_length = 0;
+    uint32_t code;
+    uint32_t clock_timestamp_flag = 0;
+    uint32_t full_timestamp_flag = 0;
+    uint32_t seconds_flag = 0;
+    uint32_t minutes_flag = 0;
+    uint32_t hours_flag = 0;
+    uint32_t time_offset = 0;
+
+
+
+
+    sei_msg_ptr = (h264_SEI_pic_timing_t *)(&sei_pic_timing);
+
+    if (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag)
+    {
+        num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 +1;
+        num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 + 1;
+        time_offset_length = pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_time_offset_length;
+    }
+    else if (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+    {
+        num_bits_cpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 +1;
+        num_bits_dpb = pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 + 1;
+    }
+
+
+    CpbDpbDelaysPresentFlag = 1;		// as per amphion code
+    if (CpbDpbDelaysPresentFlag)
+    {
+        viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->cpb_removal_delay, num_bits_cpb);
+        viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->dpb_output_delay, num_bits_dpb);
+    }
+
+    if (pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag)
+    {
+        int32_t i = 0, NumClockTS = 0;
+
+        viddec_workload_item_t     wi;
+
+        wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+        viddec_pm_get_bits(parent, &code , 4);
+        sei_msg_ptr->pic_struct = (uint8_t)code;
+
+
+        if ((sei_msg_ptr->pic_struct == 0) || (sei_msg_ptr->pic_struct == 7) || (sei_msg_ptr->pic_struct == 8)) {
+            pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_PROGRESSIVE;
+        } else {
+            pInfo->sei_information.scan_format = SEI_SCAN_FORMAT_INTERLACED;
+        }
+
+        wi.vwi_type = VIDDEC_WORKLOAD_SEI_PIC_TIMING;
+        wi.h264_sei_pic_timing.pic_struct = sei_msg_ptr->pic_struct;
+
+#ifndef VBP
+        //Push to current if we are in first frame, or we do not detect previous frame end
+        viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done)));
+#endif
+
+        if (sei_msg_ptr->pic_struct < 3) {
+            NumClockTS = 1;
+        } else if ((sei_msg_ptr->pic_struct < 5) || (sei_msg_ptr->pic_struct == 7)) {
+            NumClockTS = 2;
+        } else {
+            NumClockTS = 3;
+        }
+
+        for (i = 0; i < NumClockTS; i++)
+        {
+            viddec_pm_get_bits(parent, &code , 1);
+            clock_timestamp_flag = code;
+            //sei_msg_ptr->clock_timestamp_flag[i] = (uint8_t)code;
+
+            if (clock_timestamp_flag)
+            {
+                viddec_pm_get_bits(parent, &code , 2);
+                //sei_msg_ptr->ct_type[i] = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code , 1);
+                //sei_msg_ptr->nuit_field_based_flag[i] = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code , 5);
+                //sei_msg_ptr->counting_type[i] = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code , 1);
+                //sei_msg_ptr->full_timestamp_flag[i] = (uint8_t)code;
+                full_timestamp_flag = code;
+
+                viddec_pm_get_bits(parent, &code , 1);
+                //sei_msg_ptr->discontinuity_flag[i] = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code , 1);
+                //sei_msg_ptr->cnt_dropped_flag[i] = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code , 8);
+                //sei_msg_ptr->n_frames[i] = (uint8_t)code;
+
+
+                if (full_timestamp_flag)
+                {
+                    viddec_pm_get_bits(parent, &code , 6);
+                    //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+                    viddec_pm_get_bits(parent, &code , 6);
+                    //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+                    viddec_pm_get_bits(parent, &code , 5);
+                    //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+                }
+                else
+                {
+                    viddec_pm_get_bits(parent, &code , 1);
+                    //sei_msg_ptr->seconds_flag[i] = (uint8_t)code;
+                    seconds_flag = code;
+
+                    if (seconds_flag)
+                    {
+                        viddec_pm_get_bits(parent, &code , 6);
+                        //sei_msg_ptr->seconds_value[i] = (uint8_t)code;
+
+                        viddec_pm_get_bits(parent, &code , 1);
+                        //sei_msg_ptr->minutes_flag[i] = (uint8_t)code;
+                        minutes_flag = code;
+
+                        if (minutes_flag)
+                        {
+                            viddec_pm_get_bits(parent, &code , 6);
+                            //sei_msg_ptr->minutes_value[i] = (uint8_t)code;
+
+                            viddec_pm_get_bits(parent, &code , 1);
+                            //sei_msg_ptr->hours_flag[i] = (uint8_t)code;
+                            hours_flag = code;
+
+                            if (hours_flag) {
+                                viddec_pm_get_bits(parent, &code , 6);
+                                //sei_msg_ptr->hours_value[i] = (uint8_t)code;
+                            }
+                        }
+                    }
+                }
+
+                if (time_offset_length > 0)
+                {
+                    viddec_pm_get_bits(parent, (uint32_t *)&time_offset, time_offset_length);
+                }
+            }
+        }
+    }
+
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_pan_scan(void *parent,h264_Info* pInfo)
+{
+    h264_SEI_pan_scan_rectangle_t* sei_msg_ptr;
+    h264_SEI_pan_scan_rectangle_t  sei_pan_scan;
+    uint32_t code;
+
+    viddec_workload_item_t     wi;
+
+    h264_memset( &(sei_pan_scan), 0x0, sizeof(h264_SEI_pan_scan_rectangle_t) );
+
+    viddec_fw_reset_workload_item(&wi);
+    wi.vwi_type = VIDDEC_WORKLOAD_H264_PAN_SCAN;
+
+    sei_msg_ptr = (h264_SEI_pan_scan_rectangle_t *)(&sei_pan_scan);
+
+    sei_msg_ptr->pan_scan_rect_id = h264_GetVLCElement(parent, pInfo, false);
+
+    wi.h264_sei_pan_scan.pan_scan_rect_id = sei_msg_ptr->pan_scan_rect_id;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->pan_scan_rect_cancel_flag = (uint8_t)code;
+    viddec_fw_h264_sei_pan_scan_set_cancel_flag(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_rect_cancel_flag);
+
+    if (!sei_msg_ptr->pan_scan_rect_cancel_flag)
+    {
+        int32_t i;
+        sei_msg_ptr->pan_scan_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+        viddec_fw_h264_sei_pan_scan_set_cnt_minus1(&(wi.h264_sei_pan_scan), sei_msg_ptr->pan_scan_cnt_minus1);
+        if (sei_msg_ptr->pan_scan_cnt_minus1 > MAX_PAN_SCAN_CNT -1)
+        {
+            return H264_STATUS_SEI_ERROR;
+        }
+        for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+        {
+            sei_msg_ptr->pan_scan_rect_left_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+            sei_msg_ptr->pan_scan_rect_right_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+            sei_msg_ptr->pan_scan_rect_top_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+            sei_msg_ptr->pan_scan_rect_bottom_offset[i] = h264_GetVLCElement(parent, pInfo, true);
+        }
+        sei_msg_ptr->pan_scan_rect_repetition_period = h264_GetVLCElement(parent, pInfo, false);
+        wi.h264_sei_pan_scan.pan_scan_rect_repetition_period = sei_msg_ptr->pan_scan_rect_repetition_period;
+    }
+#ifndef VBP
+    //cur is first frame
+    viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done)));
+#endif
+
+    if (!sei_msg_ptr->pan_scan_rect_cancel_flag)
+    {
+        int32_t i;
+
+        viddec_fw_reset_workload_item(&wi);
+        wi.vwi_type = VIDDEC_WORKLOAD_SEI_PAN_SCAN_RECT;
+
+        for (i=0; i<= sei_msg_ptr->pan_scan_cnt_minus1; i++)
+        {
+            viddec_fw_h264_pan_scan_set_left(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_left_offset[i]);
+            viddec_fw_h264_pan_scan_set_right(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_right_offset[i]);
+            viddec_fw_h264_pan_scan_set_top(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_top_offset[i]);
+            viddec_fw_h264_pan_scan_set_bottom(&(wi.h264_pan_scan_rect), sei_msg_ptr->pan_scan_rect_bottom_offset[i]);
+#ifndef VBP
+            //cur is first frame
+            viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream);
+#endif
+        }
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_filler_payload(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+    h264_SEI_filler_payload_t* sei_msg_ptr;
+    h264_SEI_filler_payload_t sei_filler_payload;
+    uint32_t k;
+    uint32_t code;
+
+    //remove warning
+    pInfo = pInfo;
+
+    sei_msg_ptr = (h264_SEI_filler_payload_t *)(&sei_filler_payload);
+    for (k=0; k < payload_size; k++)
+    {
+        viddec_pm_get_bits(parent, &code , 8);
+        sei_msg_ptr->ff_byte = (uint8_t)code;
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_reg(void *parent,h264_Info* pInfo, uint32_t payload_size)
+{
+
+    h264_SEI_userdata_registered_t* sei_msg_ptr;
+    h264_SEI_userdata_registered_t  sei_userdata_registered;
+    uint32_t i;
+    int32_t byte = 0;
+    uint32_t code = 0;
+    viddec_workload_item_t     wi;
+
+    wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_REGISTERED;
+    wi.vwi_payload[0] = wi.vwi_payload[1] = wi.vwi_payload[2] = 0;
+    //remove warning
+    pInfo = pInfo;
+
+    sei_msg_ptr = (h264_SEI_userdata_registered_t *)(&sei_userdata_registered);
+
+    viddec_pm_get_bits(parent, &code , 8);
+    sei_msg_ptr->itu_t_t35_country_code = (uint8_t)code;
+
+    if (sei_msg_ptr->itu_t_t35_country_code != 0xff)	{
+        i = 1;
+    } else {
+        viddec_pm_get_bits(parent, &code , 8);
+        sei_msg_ptr->itu_t_t35_country_code_extension_byte = (uint8_t)code;
+        i = 2;
+    }
+
+
+    wi.user_data.size =0;
+    do
+    {
+
+        viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+        if (wi.user_data.size < 11)
+        {
+            wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+        }
+        wi.user_data.size++;
+
+        if (11 == wi.user_data.size)
+        {
+            viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+            //cur is first frame
+            viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done)));
+#endif
+            wi.user_data.size =0;
+        }
+
+        i++;
+    } while (i < payload_size);
+
+    if (0!=wi.user_data.size)
+    {
+        viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+        //cur is first frame
+        viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream);
+#endif
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_userdata_unreg(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+
+    h264_SEI_userdata_unregistered_t* sei_msg_ptr;
+    h264_SEI_userdata_unregistered_t  sei_userdata_unregistered;
+    uint32_t i;
+    int32_t byte = 0;
+    uint32_t code;
+
+    viddec_workload_item_t     wi;
+
+    wi.vwi_type = VIDDEC_WORKLOAD_SEI_USER_DATA_UNREGISTERED;
+
+    //remove warning
+    pInfo = pInfo;
+
+    sei_msg_ptr = (h264_SEI_userdata_unregistered_t *)(&sei_userdata_unregistered);
+
+    for (i = 0; i < 4; i++)
+    {
+        viddec_pm_get_bits(parent, &code , 32);
+        sei_msg_ptr->uuid_iso_iec_11578[i] = (uint8_t)code;
+    }
+
+    wi.user_data.size =0;
+    for (i = 16; i < payload_size; i++)
+    {
+
+        viddec_pm_get_bits(parent, (uint32_t *)&byte, 8);
+        if (wi.user_data.size < 11)
+        {
+            wi.user_data.data_payload[wi.user_data.size]=(uint8_t)byte;
+        }
+        wi.user_data.size++;
+
+        if (11 == wi.user_data.size)
+        {
+            viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+            //cur is first frame
+            viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done)));
+#endif
+            wi.user_data.size =0;
+        }
+    }
+
+    if (0!=wi.user_data.size)
+    {
+        viddec_pm_setup_userdata(&wi);
+#ifndef VBP
+        //cur is first frame
+        viddec_pm_append_workitem( parent, &wi , !pInfo->Is_first_frame_in_stream);
+#endif
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_recovery_point(void *parent, h264_Info* pInfo)
+{
+
+    h264_SEI_recovery_point_t* sei_msg_ptr;
+    h264_SEI_recovery_point_t  sei_recovery_point;
+    uint32_t code;
+    viddec_workload_item_t     wi;
+
+
+    sei_msg_ptr = (h264_SEI_recovery_point_t *)(&sei_recovery_point);
+
+    sei_msg_ptr->recovery_frame_cnt = h264_GetVLCElement(parent, pInfo, false);
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->exact_match_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->broken_link_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 2);
+    sei_msg_ptr->changing_slice_group_idc = (uint8_t)code;
+
+    pInfo->sei_information.recovery_point = 1;
+    pInfo->sei_information.recovery_frame_cnt = (int32_t) sei_msg_ptr->recovery_frame_cnt;
+    pInfo->sei_information.capture_fn         = 1;
+    pInfo->sei_information.broken_link_pic    = sei_msg_ptr->broken_link_flag;
+
+    if (pInfo->got_start)	{
+        pInfo->img.recovery_point_found |= 2;
+
+        //// Enable the RP recovery if no IDR ---Cisco
+        if ((pInfo->img.recovery_point_found & 1)==0)
+            pInfo->sei_rp_received = 1;
+    }
+
+    //
+    /// Append workload for SEI
+    //
+    viddec_fw_reset_workload_item(&wi);
+    wi.vwi_type = VIDDEC_WORKLOAD_SEI_RECOVERY_POINT;
+    wi.h264_sei_recovery_point.recovery_frame_cnt = sei_msg_ptr->recovery_frame_cnt;
+    viddec_fw_h264_h264_sei_recovery_set_exact_match_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->exact_match_flag);
+    viddec_fw_h264_h264_sei_recovery_set_broken_link_flag(&(wi.h264_sei_recovery_point), sei_msg_ptr->broken_link_flag);
+    wi.h264_sei_recovery_point.changing_slice_group_idc = sei_msg_ptr->changing_slice_group_idc;
+#ifndef VBP
+    //cur is first frame
+    viddec_pm_append_workitem( parent, &wi , !(pInfo->Is_first_frame_in_stream ||(!pInfo->is_current_workload_done)));
+#endif
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_dec_ref_pic_marking_rep(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_decoded_ref_pic_marking_repetition_t* sei_msg_ptr;
+    h264_SEI_decoded_ref_pic_marking_repetition_t  sei_ref_pic;
+    uint32_t code;
+
+    sei_msg_ptr = (h264_SEI_decoded_ref_pic_marking_repetition_t *)(&sei_ref_pic);
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->original_idr_flag = (uint8_t)code;
+
+    sei_msg_ptr->original_frame_num = h264_GetVLCElement(parent, pInfo, false);
+
+    if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+    {
+        viddec_pm_get_bits(parent, &code , 1);
+        sei_msg_ptr->orignal_field_pic_flag = (uint8_t)code;
+
+        if (sei_msg_ptr->orignal_field_pic_flag)
+        {
+            viddec_pm_get_bits(parent, &code , 1);
+            sei_msg_ptr->original_bottom_field_pic_flag = (uint8_t)code;
+        }
+    }
+    h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, &pInfo->SliceHeader);
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_spare_pic(void *parent,h264_Info* pInfo)
+{
+
+    //h264_SEI_spare_picture_t* sei_msg_ptr;
+
+    //remove warning
+    pInfo = pInfo;
+    parent = parent;
+
+    //sei_msg_ptr = (h264_SEI_spare_picture_t *)(&user_data->user_data[0]);
+
+    //OS_INFO("Not supported SEI\n");
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_scene_info(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_scene_info_t* sei_msg_ptr;
+    h264_SEI_scene_info_t  sei_scene_info;
+    uint32_t code;
+
+    sei_msg_ptr = (h264_SEI_scene_info_t*)(&sei_scene_info);
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->scene_info_present_flag = (uint8_t)code;
+
+    if (sei_msg_ptr->scene_info_present_flag)
+    {
+        sei_msg_ptr->scene_id = h264_GetVLCElement(parent, pInfo, false);
+        sei_msg_ptr->scene_transitioning_type= h264_GetVLCElement(parent, pInfo, false);
+        if (sei_msg_ptr->scene_transitioning_type > 3)
+        {
+            sei_msg_ptr->second_scene_id = h264_GetVLCElement(parent, pInfo, false);
+        }
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_info(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_sub_sequence_info_t* sei_msg_ptr;
+    h264_SEI_sub_sequence_info_t  sei_sub_sequence_info;
+    uint32_t code;
+
+    sei_msg_ptr = (h264_SEI_sub_sequence_info_t *)(&sei_sub_sequence_info);
+
+    sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo,false);
+    sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo,false);
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->first_ref_pic_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->leading_non_ref_pic_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->last_pic_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->sub_seq_frame_num_flag = (uint8_t)code;
+
+
+    if (sei_msg_ptr->sub_seq_frame_num_flag)
+    {
+        sei_msg_ptr->sub_seq_frame_num = h264_GetVLCElement(parent, pInfo,false);
+    }
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq_layer(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_sub_sequence_layer_t* sei_msg_ptr;
+    h264_SEI_sub_sequence_layer_t  sei_sub_sequence_layer;
+    int32_t layer;
+    uint32_t code;
+
+    sei_msg_ptr = (h264_SEI_sub_sequence_layer_t *)(&sei_sub_sequence_layer);
+    sei_msg_ptr->num_sub_seq_layers_minus1 = h264_GetVLCElement(parent, pInfo,false);
+
+    if (sei_msg_ptr->num_sub_seq_layers_minus1 >= MAX_SUB_SEQ_LAYERS)
+    {
+        return H264_STATUS_SEI_ERROR;
+    }
+
+    for (layer = 0; layer <= sei_msg_ptr->num_sub_seq_layers_minus1; layer++)
+    {
+        viddec_pm_get_bits(parent, &code , 1);
+        sei_msg_ptr->accurate_statistics_flag[layer] = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code , 16);
+        sei_msg_ptr->average_bit_rate[layer] = (uint16_t)code;
+
+        viddec_pm_get_bits(parent, &code , 16);
+        sei_msg_ptr->average_frame_rate[layer] = (uint16_t)code;
+
+    }
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_sub_seq(void *parent,h264_Info* pInfo)
+{
+    int32_t n;
+    uint32_t code;
+
+    h264_SEI_sub_sequence_t* sei_msg_ptr;
+    h264_SEI_sub_sequence_t  sei_sub_sequence;
+
+    sei_msg_ptr = (h264_SEI_sub_sequence_t *)(&sei_sub_sequence);
+
+    sei_msg_ptr->sub_seq_layer_num = h264_GetVLCElement(parent, pInfo, false);
+    sei_msg_ptr->sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->duration_flag = (uint8_t)code;
+
+    if (sei_msg_ptr->duration_flag)
+    {
+        viddec_pm_get_bits(parent, (uint32_t *)&sei_msg_ptr->sub_seq_duration, 32);
+    }
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->average_rate_flag = (uint8_t)code;
+
+    if (sei_msg_ptr->average_rate_flag)
+    {
+        viddec_pm_get_bits(parent, &code , 1);
+        sei_msg_ptr->average_statistics_flag = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code , 16);
+        sei_msg_ptr->average_bit_rate = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code , 16);
+        sei_msg_ptr->average_frame_rate = (uint8_t)code;
+
+    }
+    sei_msg_ptr->num_referenced_subseqs = h264_GetVLCElement(parent, pInfo, false);
+    if (sei_msg_ptr->num_referenced_subseqs >= MAX_NUM_REF_SUBSEQS)
+    {
+        return H264_STATUS_SEI_ERROR;
+    }
+
+    for (n = 0; n < sei_msg_ptr->num_referenced_subseqs; n++)
+    {
+        sei_msg_ptr->ref_sub_seq_layer_num= h264_GetVLCElement(parent, pInfo, false);
+        sei_msg_ptr->ref_sub_seq_id= h264_GetVLCElement(parent, pInfo, false);
+
+        viddec_pm_get_bits(parent, &code , 1);
+        sei_msg_ptr->ref_sub_seq_direction = (uint8_t)code;
+    }
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_full_frame_freeze_t* sei_msg_ptr;
+    h264_SEI_full_frame_freeze_t  sei_full_frame_freeze;
+
+    sei_msg_ptr = (h264_SEI_full_frame_freeze_t *)(&sei_full_frame_freeze);
+
+    sei_msg_ptr->full_frame_freeze_repetition_period= h264_GetVLCElement(parent, pInfo, false);
+
+    pInfo->sei_information.capture_POC        = 1;
+    pInfo->sei_information.freeze_rep_period  = sei_msg_ptr->full_frame_freeze_repetition_period;
+    //pInfo->img.sei_freeze_this_image          = 1;
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_freeze_release(void *parent,h264_Info* pInfo)
+{
+    //remove warning
+    parent = parent;
+    pInfo = pInfo;
+
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_full_frame_snapshot(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_full_frame_snapshot_t* sei_msg_ptr;
+    h264_SEI_full_frame_snapshot_t  sei_full_frame_snapshot;
+
+    sei_msg_ptr = (h264_SEI_full_frame_snapshot_t *)(&sei_full_frame_snapshot);
+
+    sei_msg_ptr->snapshot_id = h264_GetVLCElement(parent, pInfo, false);
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segement_start(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_progressive_segment_start_t* sei_msg_ptr;
+    h264_SEI_progressive_segment_start_t  sei_progressive_segment_start;
+
+    sei_msg_ptr = (h264_SEI_progressive_segment_start_t *)(&sei_progressive_segment_start);
+
+    sei_msg_ptr->progressive_refinement_id= h264_GetVLCElement(parent, pInfo, false);
+    sei_msg_ptr->num_refinement_steps_minus1= h264_GetVLCElement(parent, pInfo, false);
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_progressive_segment_end(void *parent,h264_Info* pInfo)
+{
+
+    h264_SEI_progressive_segment_end_t* sei_msg_ptr;
+    h264_SEI_progressive_segment_end_t  sei_progressive_segment_end;
+
+    sei_msg_ptr = (h264_SEI_progressive_segment_end_t *)(&sei_progressive_segment_end);
+
+    sei_msg_ptr->progressive_refinement_id = h264_GetVLCElement(parent, pInfo, false);
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_motion_constrained_slice_grp_set(void *parent, h264_Info* pInfo)
+{
+    int32_t i;
+    uint32_t code;
+    h264_SEI_motion_constrained_slice_group_t* sei_msg_ptr;
+    h264_SEI_motion_constrained_slice_group_t  sei_motion_constrained_slice_group;
+
+    sei_msg_ptr = (h264_SEI_motion_constrained_slice_group_t *)(&sei_motion_constrained_slice_group);
+
+    sei_msg_ptr->num_slice_groups_in_set_minus1= h264_GetVLCElement(parent, pInfo, false);
+    if (sei_msg_ptr->num_slice_groups_in_set_minus1 >= MAX_NUM_SLICE_GRPS)
+    {
+        return H264_STATUS_SEI_ERROR;
+    }
+
+    for (i=0; i<= sei_msg_ptr->num_slice_groups_in_set_minus1; i++)
+    {
+        viddec_pm_get_bits(parent, &code , 1);
+        sei_msg_ptr->slice_group_id[i] = (uint8_t)code;
+    }
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->exact_sample_value_match_flag = (uint8_t)code;
+
+    viddec_pm_get_bits(parent, &code , 1);
+    sei_msg_ptr->pan_scan_rect_flag = (uint8_t)code;
+
+
+    if (sei_msg_ptr->pan_scan_rect_flag)
+    {
+        sei_msg_ptr->pan_scan_rect_id= h264_GetVLCElement(parent, pInfo, false);
+    }
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_film_grain_characteristics(void *parent,h264_Info* pInfo)
+{
+    //OS_INFO("Not supported SEI\n");
+
+    //remove warning
+    parent = parent;
+    pInfo = pInfo;
+
+
+
+
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_deblocking_filter_display_preferences(void *parent,h264_Info* pInfo)
+{
+
+    //h264_SEI_deblocking_filter_display_pref_t* sei_msg_ptr;
+
+    //remove warning
+    parent = parent;
+    pInfo = pInfo;
+
+    //sei_msg_ptr = (h264_SEI_deblocking_filter_display_pref_t *)(&user_data->user_data[0]);
+
+    //OS_INFO("Not supported SEI\n");
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_sei_stereo_video_info(void *parent,h264_Info* pInfo)
+{
+
+    //h264_SEI_stereo_video_info_t* sei_msg_ptr;
+
+    //remove warning
+    parent = parent;
+    pInfo = pInfo;
+
+
+    //sei_msg_ptr = (h264_SEI_stereo_video_info_t *)(&user_data->user_data[0]);
+
+    //OS_INFO("Not supported SEI\n");
+    return H264_STATUS_OK;
+}
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+uint32_t h264_sei_reserved_sei_message(void *parent, h264_Info* pInfo, uint32_t payload_size)
+{
+    int32_t k, byte_index, user_data_byte_index;
+    uint32_t i;
+    int32_t word, bits;
+    uint32_t user_data;
+    //h264_SEI_reserved_t* sei_msg_ptr;
+    //h264_SEI_reserved_t  sei_reserved;
+
+    //remove warning
+    pInfo = pInfo;
+
+    //sei_msg_ptr = (h264_SEI_reserved_t *)(&sei_reserved);
+
+    byte_index = 0;
+    word = 0;
+    user_data_byte_index = 0x0;
+
+    for (i = 0, k = 0; i < payload_size; i++)
+    {
+        if (byte_index == 0) word = 0;
+        viddec_pm_get_bits(parent, (uint32_t *)&bits, 8);
+
+        switch (byte_index)
+        {
+        case 1:
+            word = (bits << 8) | word;
+            break;
+        case 2:
+            word = (bits << 16) | word;
+            break;
+        case 3:
+            word = (bits << 24) | word;
+            break;
+        default :
+            word = bits;
+            break;
+        }
+
+        if (byte_index == 3)
+        {
+            byte_index = 0;
+            user_data = word;
+            k++;
+        }
+        else
+        {
+            byte_index++;
+        }
+
+        user_data_byte_index++;
+        if ( user_data_byte_index == MAX_USER_DATA_SIZE)
+        {
+            //user_data->user_data_size = user_data_byte_index;
+            //sei_msg_ptr = (h264_SEI_reserved_t *)(&user_data->user_data[0]);
+            byte_index = 0;
+            word = 0;
+            user_data_byte_index = 0x0;
+        }
+    }
+
+    if (byte_index)
+        user_data = word;
+
+    //user_data->user_data_size = user_data_byte_index;
+
+    return user_data_byte_index;
+
+//	return H264_STATUS_OK;
+}
+
+////// TODO
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize)
+{
+    //int32_t bit_equal_to_zero;
+    h264_Status status = H264_STATUS_OK;
+
+    //removing warning
+    payloadSize = payloadSize;
+
+    switch (payloadType)
+    {
+    case SEI_BUF_PERIOD:
+        status = h264_sei_buffering_period(parent, pInfo);
+        break;
+    case SEI_PIC_TIMING:
+        status = h264_sei_pic_timing(parent, pInfo);
+        break;
+    case SEI_PAN_SCAN:
+        status = h264_sei_pan_scan(parent, pInfo);
+        break;
+    case SEI_FILLER_PAYLOAD:
+        status = h264_sei_filler_payload(parent, pInfo, payloadSize);
+        break;
+    case SEI_REG_USERDATA:
+        status = h264_sei_userdata_reg(parent, pInfo, payloadSize);
+        break;
+    case SEI_UNREG_USERDATA:
+        status = h264_sei_userdata_unreg(parent, pInfo, payloadSize);
+        break;
+    case SEI_RECOVERY_POINT:
+        h264_sei_recovery_point(parent, pInfo);
+        break;
+    case SEI_DEC_REF_PIC_MARKING_REP:
+        status = h264_sei_dec_ref_pic_marking_rep(parent, pInfo);
+        break;
+    case SEI_SPARE_PIC:
+        status = h264_sei_spare_pic(parent, pInfo);
+        break;
+    case SEI_SCENE_INFO:
+        status = h264_sei_scene_info(parent, pInfo);
+        break;
+    case SEI_SUB_SEQ_INFO:
+        status = h264_sei_sub_seq_info(parent, pInfo);
+        break;
+    case SEI_SUB_SEQ_LAYER:
+        status = h264_sei_sub_seq_layer(parent, pInfo);
+        break;
+    case SEI_SUB_SEQ:
+        status = h264_sei_sub_seq(parent, pInfo);
+        break;
+    case SEI_FULL_FRAME_FREEZE:
+        status = h264_sei_full_frame_freeze(parent, pInfo);
+        break;
+    case SEI_FULL_FRAME_FREEZE_RELEASE:
+        h264_sei_full_frame_freeze_release(parent, pInfo);
+        break;
+    case SEI_FULL_FRAME_SNAPSHOT:
+        status = h264_sei_full_frame_snapshot(parent, pInfo);
+        break;
+    case SEI_PROGRESSIVE_SEGMENT_START:
+        status = h264_sei_progressive_segement_start(parent, pInfo);
+        break;
+    case SEI_PROGRESSIVE_SEGMENT_END:
+        status = h264_sei_progressive_segment_end(parent, pInfo);
+        break;
+    case SEI_MOTION_CONSTRAINED_SLICE_GRP_SET:
+        status = h264_sei_motion_constrained_slice_grp_set(parent, pInfo);
+        break;
+    case SEI_FILM_GRAIN_CHARACTERISTICS:
+        status = h264_sei_film_grain_characteristics(parent, pInfo);
+        break;
+    case SEI_DEBLK_FILTER_DISPLAY_PREFERENCE:
+        status = h264_sei_deblocking_filter_display_preferences(parent, pInfo);
+        break;
+    case SEI_STEREO_VIDEO_INFO:
+        status = h264_sei_stereo_video_info(parent, pInfo);
+        break;
+    default:
+        status = (h264_Status)h264_sei_reserved_sei_message(parent, pInfo, payloadSize);
+        break;
+    }
+
+    /*
+    	viddec_pm_get_bits(parent, (uint32_t *)&tmp, 1);
+
+    	if(tmp == 0x1)		// if byte is not aligned
+    	{
+    		while(pInfo->bitoff != 0)
+    		{
+    			viddec_pm_get_bits(parent, (uint32_t *)&bit_equal_to_zero, 1);
+    		}
+    	}
+    */
+    return status;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent, h264_Info* pInfo)
+{
+    h264_Status status = H264_STATUS_OK;
+    int32_t  payload_type, payload_size;
+    uint32_t next_8_bits = 0,bits_offset=0,byte_offset = 0;
+    uint8_t  is_emul = 0;
+    int32_t  bits_operation_result = 0;
+
+    do {
+        //// payload_type
+        payload_type = 0;
+        viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+        while (next_8_bits == 0xFF)
+        {
+            bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+            if (-1 == bits_operation_result)
+            {
+                status = H264_STATUS_SEI_ERROR;
+                return status;
+            }
+            payload_type += 255;
+
+        }
+        //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+        payload_type += next_8_bits;
+
+        //// payload_size
+        payload_size = 0;
+        viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+        while (next_8_bits == 0xFF)
+        {
+            payload_size += 255;
+            bits_operation_result = viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+            if (-1 == bits_operation_result)
+            {
+                status = H264_STATUS_SEI_ERROR;
+                return status;
+            }
+        }
+        //viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8);
+        payload_size += next_8_bits;
+
+        //PRINTF(MFD_NONE, " SEI: payload type = %d, payload size = %d \n", payload_type, payload_size);
+
+
+        /////////////////////////////////
+        // Parse SEI payloads
+        /////////////////////////////////
+        status = h264_SEI_payload(parent, pInfo, (h264_sei_payloadtype)payload_type, payload_size);
+        if (status != H264_STATUS_OK)
+            break;
+
+        viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+        // OS_INFO("SEI byte_offset 3= %d, bits_offset=%d\n", byte_offset, bits_offset);
+
+        if (bits_offset!=0)
+        {
+            viddec_pm_get_bits(parent, (uint32_t *)&next_8_bits, 8-bits_offset);
+        }
+
+        bits_operation_result = viddec_pm_peek_bits(parent, (uint32_t *)&next_8_bits, 8);
+        if (-1 == bits_operation_result)
+        {
+            status = H264_STATUS_SEI_ERROR;
+            return status;
+        }
+
+        // OS_INFO("next_8_bits = %08x\n", next_8_bits);
+
+    } while (next_8_bits != 0x80);
+
+    //} while (h264_More_RBSP_Data(parent, pInfo) && status == H264_STATUS_OK);
+
+    return status;
+}
+
+#endif
+
diff --git a/mixvbp/vbp_plugin/h264/h264parse_sh.c b/mixvbp/vbp_plugin/h264/h264parse_sh.c
new file mode 100755
index 0000000..9db8cee
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_sh.c
@@ -0,0 +1,837 @@
+//#define H264_PARSE_SLICE_HDR
+//#ifdef H264_PARSE_SLICE_HDR
+
+#include "h264.h"
+#include "h264parse.h"
+
+extern int32_t viddec_pm_get_au_pos(void *parent, uint32_t *bit, uint32_t *byte, unsigned char *is_emul);
+
+
+/*-----------------------------------------------------------------------------------------*/
+// Slice header 1----
+// 1) first_mb_in_slice, slice_type, pic_parameter_id
+/*-----------------------------------------------------------------------------------------*/
+h264_Status h264_Parse_Slice_Header_1(void *parent,h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status ret = H264_STATUS_ERROR;
+
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+    int32_t slice_type =0;
+    uint32_t data =0;
+
+    do {
+        ///// first_mb_in_slice
+        SliceHeader->first_mb_in_slice = h264_GetVLCElement(parent, pInfo, false);
+
+        ///// slice_type
+        slice_type = h264_GetVLCElement(parent, pInfo, false);
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if (slice_type > 9)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        SliceHeader->slice_type = (slice_type%5);
+
+        if (SliceHeader->slice_type > h264_PtypeI)	{
+            ret = H264_STATUS_NOTSUPPORT;
+            break;
+        }
+
+
+        ////// pic_parameter_id
+        data = h264_GetVLCElement(parent, pInfo, false);
+        if (data > MAX_PIC_PARAMS) {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            ret = H264_PPS_INVALID_PIC_ID;
+            break;
+        }
+        SliceHeader->pic_parameter_id  = (uint8_t)data;
+        ret = H264_STATUS_OK;
+    } while (0);
+
+    return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 2
+// 	frame_num
+// 	field_pic_flag, structure
+// 	idr_pic_id
+// 	pic_order_cnt_lsb, delta_pic_order_cnt_bottom
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status ret = H264_SliceHeader_ERROR;
+
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+    uint32_t code;
+    int32_t max_mb_num=0;
+
+    do {
+        //////////////////////////////////// Slice header part 2//////////////////
+
+        /// Frame_num
+        viddec_pm_get_bits(parent, &code, pInfo->active_SPS.log2_max_frame_num_minus4+4);
+        SliceHeader->frame_num = (int32_t)code;
+
+        /// Picture structure
+        SliceHeader->structure = FRAME;
+        SliceHeader->field_pic_flag = 0;
+        SliceHeader->bottom_field_flag = 0;
+
+        if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+        {
+            /// field_pic_flag
+            viddec_pm_get_bits(parent, &code, 1);
+            SliceHeader->field_pic_flag = (uint8_t)code;
+
+            if (SliceHeader->field_pic_flag)
+            {
+                viddec_pm_get_bits(parent, &code, 1);
+                SliceHeader->bottom_field_flag = (uint8_t)code;
+
+                SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD;
+            }
+        }
+
+        ////// Check valid or not of first_mb_in_slice
+        if (SliceHeader->structure == FRAME) {
+            max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs;
+        } else {
+            max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2;
+        }
+
+
+        ///if(pInfo->img.MbaffFrameFlag)
+        if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) {
+            SliceHeader->first_mb_in_slice <<=1;
+        }
+
+        if (SliceHeader->first_mb_in_slice >= max_mb_num)
+            break;
+
+
+        if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+        {
+            SliceHeader->idr_pic_id = h264_GetVLCElement(parent, pInfo, false);
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if (SliceHeader->idr_pic_id > 65535)
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+        }
+
+        if (pInfo->active_SPS.pic_order_cnt_type == 0)
+        {
+            viddec_pm_get_bits(parent, &code , pInfo->active_SPS.log2_max_pic_order_cnt_lsb_minus4+4);
+            SliceHeader->pic_order_cnt_lsb = (uint32_t)code;
+
+
+            if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+            {
+                SliceHeader->delta_pic_order_cnt_bottom = h264_GetVLCElement(parent, pInfo, true);
+            }
+            else
+            {
+                SliceHeader->delta_pic_order_cnt_bottom = 0;
+            }
+        }
+
+        if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag))
+        {
+            SliceHeader->delta_pic_order_cnt[0] = h264_GetVLCElement(parent, pInfo, true);
+            if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+            {
+                SliceHeader->delta_pic_order_cnt[1] = h264_GetVLCElement(parent, pInfo, true);
+            }
+        }
+
+        if (pInfo->active_PPS.redundant_pic_cnt_present_flag)
+        {
+            SliceHeader->redundant_pic_cnt = h264_GetVLCElement(parent, pInfo, false);
+            if (SliceHeader->redundant_pic_cnt > 127)
+                break;
+        } else {
+            SliceHeader->redundant_pic_cnt = 0;
+        }
+
+        ret = H264_STATUS_OK;
+    } while (0);
+
+    //////////// FMO is not supported curently, so comment out the following code
+    //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+    //{
+    //	SliceHeader->slice_group_change_cycle = 0;				//one of the variables is not known in the high profile
+    //}
+
+    return ret;
+}
+
+/*-----------------------------------------------------------------------------------------*/
+// slice header 3
+// (direct_spatial_mv_pred_flag, num_ref_idx, pic_list_reorder, PWT,  ref_pic_remark, alpha, beta, etc)
+/*-----------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status ret = H264_SliceHeader_ERROR;
+
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+    int32_t  slice_alpha_c0_offset, slice_beta_offset;
+    uint32_t code;
+    uint32_t bits_offset =0, byte_offset =0;
+    uint8_t  is_emul =0;
+
+    do {
+        /// direct_spatial_mv_pred_flag
+        if (SliceHeader->slice_type == h264_PtypeB)
+        {
+            viddec_pm_get_bits(parent, &code , 1);
+            SliceHeader->direct_spatial_mv_pred_flag = (uint8_t)code;
+        }
+        else
+        {
+            SliceHeader->direct_spatial_mv_pred_flag = 0;
+        }
+
+        //
+        // Reset ref_idx and Overide it if exist
+        //
+        SliceHeader->num_ref_idx_l0_active = pInfo->active_PPS.num_ref_idx_l0_active;
+        SliceHeader->num_ref_idx_l1_active = pInfo->active_PPS.num_ref_idx_l1_active;
+
+        if ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP) || (SliceHeader->slice_type == h264_PtypeB))
+        {
+            viddec_pm_get_bits(parent, &code, 1);
+            SliceHeader->num_ref_idx_active_override_flag  = (uint8_t)code;
+
+            if (SliceHeader->num_ref_idx_active_override_flag)
+            {
+                SliceHeader->num_ref_idx_l0_active = h264_GetVLCElement(parent, pInfo, false) + 1;
+                if (SliceHeader->slice_type == h264_PtypeB)
+                {
+                    SliceHeader->num_ref_idx_l1_active = h264_GetVLCElement(parent, pInfo, false)+1;
+                }
+            }
+        }
+
+        if (SliceHeader->slice_type != h264_PtypeB) {
+            SliceHeader->num_ref_idx_l1_active = 0;
+        }
+
+        if ((SliceHeader->num_ref_idx_l0_active > MAX_NUM_REF_FRAMES) || (SliceHeader->num_ref_idx_l1_active > MAX_NUM_REF_FRAMES))
+        {
+            break;
+        }
+
+        if (h264_Parse_Ref_Pic_List_Reordering(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+        {
+            break;
+        }
+
+
+        ////
+        //// Parse Pred_weight_table but not store it becasue it will be reparsed in HW
+        ////
+        if (((pInfo->active_PPS.weighted_pred_flag) && ((SliceHeader->slice_type == h264_PtypeP) || (SliceHeader->slice_type == h264_PtypeSP))) || ((pInfo->active_PPS.weighted_bipred_idc == 1) && (SliceHeader->slice_type == h264_PtypeB)))
+        {
+
+            viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+            pInfo->h264_pwt_enabled = 1;
+            pInfo->h264_pwt_start_byte_offset = byte_offset;
+            pInfo->h264_pwt_start_bit_offset  = bits_offset;
+
+            if (h264_Parse_Pred_Weight_Table(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+            {
+                break;
+            }
+
+            viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+            if (0 == bits_offset)
+            {
+                pInfo->h264_pwt_end_byte_offset = byte_offset-1;
+                pInfo->h264_pwt_end_bit_offset  = 8;
+            }
+            else
+            {
+                pInfo->h264_pwt_end_byte_offset = byte_offset;
+                pInfo->h264_pwt_end_bit_offset  = bits_offset;
+            }
+
+        }
+
+
+
+        ////
+        //// Parse Ref_pic marking if there
+        ////
+        if (SliceHeader->nal_ref_idc != 0)
+        {
+            if (h264_Parse_Dec_Ref_Pic_Marking(parent, pInfo, SliceHeader) != H264_STATUS_OK)
+            {
+                break;
+            }
+        }
+
+        if ((pInfo->active_PPS.entropy_coding_mode_flag) && (SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+        {
+            SliceHeader->cabac_init_idc = h264_GetVLCElement(parent, pInfo, false);
+        }
+        else
+        {
+            SliceHeader->cabac_init_idc = 0;
+        }
+
+        if (SliceHeader->cabac_init_idc > 2)
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+
+        SliceHeader->slice_qp_delta = h264_GetVLCElement(parent, pInfo, true);
+        if ( (SliceHeader->slice_qp_delta > (25-pInfo->active_PPS.pic_init_qp_minus26)) || (SliceHeader->slice_qp_delta < -(26+pInfo->active_PPS.pic_init_qp_minus26)))
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+
+        if ((SliceHeader->slice_type == h264_PtypeSP)|| (SliceHeader->slice_type == h264_PtypeSI) )
+        {
+            if (SliceHeader->slice_type == h264_PtypeSP)
+            {
+                viddec_pm_get_bits(parent, &code, 1);
+                SliceHeader->sp_for_switch_flag  = (uint8_t)code;
+
+            }
+            SliceHeader->slice_qs_delta = h264_GetVLCElement(parent, pInfo, true);
+
+            if ( (SliceHeader->slice_qs_delta > (25-pInfo->active_PPS.pic_init_qs_minus26)) || (SliceHeader->slice_qs_delta < -(26+pInfo->active_PPS.pic_init_qs_minus26)) )
+            {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+                pInfo->sw_bail = 1;
+#endif
+#endif
+                break;
+            }
+        }
+        if (pInfo->active_PPS.deblocking_filter_control_present_flag)
+        {
+            SliceHeader->disable_deblocking_filter_idc = h264_GetVLCElement(parent, pInfo, false);
+            if (SliceHeader->disable_deblocking_filter_idc != 1)
+            {
+                SliceHeader->slice_alpha_c0_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+                slice_alpha_c0_offset = SliceHeader->slice_alpha_c0_offset_div2 << 1;
+                if (slice_alpha_c0_offset < -12 || slice_alpha_c0_offset > 12)
+                {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+                    pInfo->sw_bail = 1;
+#endif
+#endif
+                    break;
+                }
+
+                SliceHeader->slice_beta_offset_div2 = h264_GetVLCElement(parent, pInfo, true);
+                slice_beta_offset = SliceHeader->slice_beta_offset_div2 << 1;
+                if (slice_beta_offset < -12 || slice_beta_offset > 12)
+                {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+                    pInfo->sw_bail = 1;
+#endif
+#endif
+                    break;
+                }
+            }
+            else
+            {
+                SliceHeader->slice_alpha_c0_offset_div2 = 0;
+                SliceHeader->slice_beta_offset_div2 = 0;
+            }
+        }
+
+        ret = H264_STATUS_OK;
+    } while (0);
+
+    //////////// FMO is not supported curently, so comment out the following code
+    //if((pInfo->active_PPS.num_slice_groups_minus1 > 0) && (pInfo->active_PPS.slice_group_map_type >= 3) && (pInfo->active_PPS.slice_group_map_type <= 5) )
+    //{
+    //	SliceHeader->slice_group_change_cycle = 0;				//one of the variables is not known in the high profile
+    //}
+
+    return ret;
+}
+
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// The syntax elements reordering_of_pic_nums_idc, abs_diff_pic_num_minus1, and long_term_pic_num
+// specify the change from the initial reference picture lists to the reference picture lists to be used
+// for decoding the slice
+
+// reordering_of_pic_nums_idc:
+//		0:	abs_diff_pic_num_minus1 is present and corresponds to a difference to subtract from a picture number prediction value
+//		1:	abs_diff_pic_num_minus1 is present and corresponds to a difference to add to a picture number prediction value
+//		2:	long_term_pic_num is present and specifies the long-term picture number for a reference picture
+//		3:	End loop for reordering of the initial reference picture list
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+    int32_t reorder= -1;
+    uint32_t code;
+
+
+    if ((SliceHeader->slice_type != h264_PtypeI) && (SliceHeader->slice_type != h264_PtypeSI))
+    {
+        viddec_pm_get_bits(parent, &code, 1);
+        SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag = (uint8_t)code;
+
+        if (SliceHeader->sh_refpic_l0.ref_pic_list_reordering_flag)
+        {
+
+            reorder= -1;
+            do
+            {
+                reorder++;
+
+                if (reorder > MAX_NUM_REF_FRAMES)
+                {
+                    return H264_SliceHeader_ERROR;
+                }
+
+                SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+                if ((SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 1))
+                {
+                    SliceHeader->sh_refpic_l0.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+                }
+                else if (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] == 2)
+                {
+                    SliceHeader->sh_refpic_l0.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+                }
+
+            } while (SliceHeader->sh_refpic_l0.reordering_of_pic_nums_idc[reorder] != 3);
+        }
+    }
+
+    if (SliceHeader->slice_type == h264_PtypeB)
+    {
+        viddec_pm_get_bits(parent, &code, 1);
+        SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag = (uint8_t)code;
+
+        if (SliceHeader->sh_refpic_l1.ref_pic_list_reordering_flag)
+        {
+
+            reorder = -1;
+            do
+            {
+                reorder++;
+                if (reorder > MAX_NUM_REF_FRAMES)
+                {
+                    return H264_SliceHeader_ERROR;
+                }
+                SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] = h264_GetVLCElement(parent, pInfo, false);
+                if ((SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 0) || (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 1))
+                {
+                    SliceHeader->sh_refpic_l1.list_reordering_num[reorder].abs_diff_pic_num_minus1 = h264_GetVLCElement(parent, pInfo, false);
+                }
+                else if (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] == 2)
+                {
+                    SliceHeader->sh_refpic_l1.list_reordering_num[reorder].long_term_pic_num = h264_GetVLCElement(parent, pInfo, false);
+                }
+            } while (SliceHeader->sh_refpic_l1.reordering_of_pic_nums_idc[reorder] != 3);
+        }
+    }
+
+    //currently just two reference frames but in case mroe than two, then should use an array for the above structures that is why reorder
+    return H264_STATUS_OK;
+
+}
+
+#ifdef VBP
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+    uint32_t i =0, j=0;
+    uint32_t flag;
+
+    SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(parent, pInfo, false);
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+    if (SliceHeader->sh_predwttbl.luma_log2_weight_denom > 7)
+    {
+        pInfo->sw_bail = 1;
+    }
+#endif
+#endif
+    if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+    {
+        SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(parent,pInfo, false);
+    }
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+    if (SliceHeader->sh_predwttbl.chroma_log2_weight_denom > 7)
+    {
+        pInfo->sw_bail = 1;
+    }
+#endif
+#endif
+    for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+    {
+        viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+        SliceHeader->sh_predwttbl.luma_weight_l0_flag = flag;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if ((-128 > flag) || (127 < flag))
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+        if (SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+        {
+            SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+            SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(parent, pInfo, true);
+        }
+        else
+        {
+            SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+            SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+        }
+
+        if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+        {
+            viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+            SliceHeader->sh_predwttbl.chroma_weight_l0_flag = flag;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if ((-128 > flag) || (127 < flag))
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+            if (SliceHeader->sh_predwttbl.chroma_weight_l0_flag)
+            {
+                for (j=0; j <2; j++)
+                {
+                    SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+                    SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(parent, pInfo, true);
+                }
+            }
+            else
+            {
+                for (j=0; j <2; j++)
+                {
+                    SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+                    SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+                }
+            }
+        }
+
+    }
+
+    if (SliceHeader->slice_type == h264_PtypeB)
+    {
+        for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+        {
+            viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+            SliceHeader->sh_predwttbl.luma_weight_l1_flag = flag;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            if ((-128 > flag) || (127 < flag))
+            {
+                pInfo->sw_bail = 1;
+            }
+#endif
+#endif
+            if (SliceHeader->sh_predwttbl.luma_weight_l1_flag)
+            {
+                SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+                SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(parent, pInfo, true);
+            }
+            else
+            {
+                SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+                SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+            }
+
+            if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+            {
+                viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+                SliceHeader->sh_predwttbl.chroma_weight_l1_flag = flag;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+                if ((-128 > flag) || (127 < flag))
+                {
+                    pInfo->sw_bail = 1;
+                }
+#endif
+#endif
+                if (SliceHeader->sh_predwttbl.chroma_weight_l1_flag)
+                {
+                    for (j=0; j <2; j++)
+                    {
+                        SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+                        SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(parent, pInfo, true);
+                    }
+                }
+                else
+                {
+                    for (j=0; j <2; j++)
+                    {
+                        SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+                        SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+                    }
+                }
+            }
+
+        }
+    }
+
+    return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#else
+
+/*--------------------------------------------------------------------------------------------------*/
+//
+// Parse Prediction weight table
+// Note: This table will be reparsed in HW Accelerator, so needn't keep it in parser
+//
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Pred_Weight_Table(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+    uint32_t i =0, j=0;
+    uint32_t flag, val;
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SPS.SliceHeader;
+
+    //SliceHeader->sh_predwttbl.luma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "luma_log2_weight_denom");
+    val = h264_GetVLCElement(parent, pInfo, false);
+
+    if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+    {
+        //SliceHeader->sh_predwttbl.chroma_log2_weight_denom = h264_GetVLCElement(pInfo, false, "chroma_log2_weight_denom");
+        val = h264_GetVLCElement(parent,pInfo, false);
+    }
+
+    for (i=0; i< SliceHeader->num_ref_idx_l0_active; i++)
+    {
+        //SliceHeader->sh_predwttbl.luma_weight_l0_flag = h264_GetBits(pInfo, 1, "luma_weight_l0_flag");
+        viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+
+        //if(SliceHeader->sh_predwttbl.luma_weight_l0_flag)
+        if (flag)
+        {
+            //SliceHeader->sh_predwttbl.luma_weight_l0[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l0");
+            val = h264_GetVLCElement(parent, pInfo, true);
+            //SliceHeader->sh_predwttbl.luma_offset_l0[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l0");
+            val = h264_GetVLCElement(parent, pInfo, true);
+        }
+        else
+        {
+            //SliceHeader->sh_predwttbl.luma_weight_l0[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+            //SliceHeader->sh_predwttbl.luma_offset_l0[i] = 0;
+        }
+
+        if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+        {
+            //SliceHeader->sh_predwttbl.chroma_weight_l0_flag = h264_GetBits(pInfo, 1, "chroma_weight_l0_flag");
+            viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+            if (flag)
+            {
+                for (j=0; j <2; j++)
+                {
+                    //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l0");
+                    val = h264_GetVLCElement(parent, pInfo, true);
+                    //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l0");
+                    val = h264_GetVLCElement(parent, pInfo, true);
+                }
+            }
+            else
+            {
+                for (j=0; j <2; j++)
+                {
+                    //SliceHeader->sh_predwttbl.chroma_weight_l0[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+                    //SliceHeader->sh_predwttbl.chroma_offset_l0[i][j] = 0;
+                }
+            }
+        }
+
+    }
+
+    if (SliceHeader->slice_type == h264_PtypeB)
+    {
+        for (i=0; i< SliceHeader->num_ref_idx_l1_active; i++)
+        {
+            //SliceHeader->sh_predwttbl.luma_weight_l1_flag = h264_GetBits(pInfo, 1, "luma_weight_l1_flag");
+            viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+            if (flag)
+            {
+                //SliceHeader->sh_predwttbl.luma_weight_l1[i] = h264_GetVLCElement(pInfo, true, "luma_weight_l1");
+                val = h264_GetVLCElement(parent, pInfo, true);
+                //SliceHeader->sh_predwttbl.luma_offset_l1[i] = h264_GetVLCElement(pInfo, true, "luma_offset_l1");
+                val = h264_GetVLCElement(parent, pInfo, true);
+            }
+            else
+            {
+                //SliceHeader->sh_predwttbl.luma_weight_l1[i] = (1 << SliceHeader->sh_predwttbl.luma_log2_weight_denom);
+                //SliceHeader->sh_predwttbl.luma_offset_l1[i] = 0;
+            }
+
+            if (pInfo->active_SPS.sps_disp.chroma_format_idc != 0)
+            {
+                //SliceHeader->sh_predwttbl.chroma_weight_l1_flag = h264_GetBits(pInfo, 1, "chroma_weight_l1_flag");
+                viddec_pm_get_bits(parent, (uint32_t *)&flag, 1);
+                if (flag)
+                {
+                    for (j=0; j <2; j++)
+                    {
+                        //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_weight_l1");
+                        val = h264_GetVLCElement(parent, pInfo, true);
+                        //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = h264_GetVLCElement(pInfo, true, "chroma_offset_l1");
+                        val = h264_GetVLCElement(parent, pInfo, true);
+                    }
+                }
+                else
+                {
+                    for (j=0; j <2; j++)
+                    {
+                        //SliceHeader->sh_predwttbl.chroma_weight_l1[i][j] = (1 << SliceHeader->sh_predwttbl.chroma_log2_weight_denom);
+                        //SliceHeader->sh_predwttbl.chroma_offset_l1[i][j] = 0;
+                    }
+                }
+            }
+
+        }
+    }
+
+    return H264_STATUS_OK;
+} ///// End of h264_Parse_Pred_Weight_Table
+
+#endif
+
+/*--------------------------------------------------------------------------------------------------*/
+// The syntax elements specify marking of the reference pictures.
+//			1)IDR:		no_output_of_prior_pics_flag,
+//						long_term_reference_flag,
+//			2)NonIDR:	adaptive_ref_pic_marking_mode_flag,
+//						memory_management_control_operation,
+//						difference_of_pic_nums_minus1,
+//						long_term_frame_idx,
+//						long_term_pic_num, and
+//						max_long_term_frame_idx_plus1
+//
+//The marking of a reference picture can be "unused for reference", "used for short-term reference", or "used for longterm
+// reference", but only one among these three.
+/*--------------------------------------------------------------------------------------------------*/
+
+
+h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent, h264_Info* pInfo,h264_Slice_Header_t *SliceHeader)
+{
+    //h264_Slice_Header_t* SliceHeader = &pInfo->SliceHeader;
+    uint8_t i = 0;
+    uint32_t code = 0;
+
+    if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        viddec_pm_get_bits(parent, &code, 1);
+        SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)code;
+        pInfo->img.long_term_reference_flag = (uint8_t)code;
+    }
+    else
+    {
+        viddec_pm_get_bits(parent, &code, 1);
+        SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = (uint8_t)code;
+
+        ///////////////////////////////////////////////////////////////////////////////////////
+        //adaptive_ref_pic_marking_mode_flag 			Reference picture marking mode specified
+        //	0 						Sliding window reference picture marking mode: A marking mode
+        //							providing a first-in first-out mechanism for short-term reference pictures.
+        //  	1 						Adaptive reference picture marking mode: A reference picture
+        //							marking mode providing syntax elements to specify marking of
+        //							reference pictures as �unused for reference?and to assign long-term
+        //							frame indices.
+        ///////////////////////////////////////////////////////////////////////////////////////
+
+        if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag)
+        {
+            do
+            {
+                if (i < NUM_MMCO_OPERATIONS)
+                {
+                    SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = h264_GetVLCElement(parent, pInfo, false);
+                    if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3))
+                    {
+                        SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2)
+                    {
+                        SliceHeader->sh_dec_refpic.long_term_pic_num[i] = h264_GetVLCElement(parent, pInfo, false);
+                    }
+
+                    if ((SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) || (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6))
+                    {
+                        SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = h264_GetVLCElement(parent, pInfo, false);
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4)
+                    {
+                        SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = h264_GetVLCElement(parent, pInfo, false);
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5)
+                    {
+                        pInfo->img.curr_has_mmco_5 = 1;
+                    }
+                }
+
+                if (i >= NUM_MMCO_OPERATIONS) {
+                    return H264_STATUS_ERROR;
+                }
+
+            } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0);
+        }
+    }
+
+
+
+
+    SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i;
+
+    return H264_STATUS_OK;
+}
+
+
+
+//#endif
diff --git a/mixvbp/vbp_plugin/h264/h264parse_sps.c b/mixvbp/vbp_plugin/h264/h264parse_sps.c
new file mode 100755
index 0000000..431892b
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/h264parse_sps.c
@@ -0,0 +1,691 @@
+//#define H264_PARSE_SPS_C
+//#ifdef H264_PARSE_SPS_C
+
+#include "h264.h"
+#include "h264parse.h"
+#ifdef VBP
+#include<math.h>
+#endif
+
+
+/// SPS extension unit (unit_type = 13)
+///
+#if 0
+h264_Status h264_Parse_SeqParameterSet_Extension(void *parent,h264_Info * pInfo)
+{
+    /*h264_SPS_Extension_RBSP_t* SPS_ext = pInfo->p_active_SPS_ext;
+
+    SPS_ext->seq_parameter_set_id = h264_GetVLCElement(pInfo, false);
+    if(SPS_ext->seq_parameter_set_id > MAX_SEQ_PARAMS-1)
+    {
+    	return H264_SPS_ERROR;
+    }
+    SPS_ext->aux_format_idc = h264_GetVLCElement(pInfo, false);
+    if(SPS_ext->aux_format_idc  > 3)
+    {
+    	return H264_SPS_ERROR;
+    }
+    if(SPS_ext->aux_format_idc != 0)
+    {
+    	SPS_ext->bit_depth_aux_minus8 = h264_GetVLCElement(pInfo, false);
+    	if(SPS_ext->bit_depth_aux_minus8 + 8 > 12)
+    	{
+    		return H264_SPS_ERROR;
+    	}
+
+    	SPS_ext->alpha_incr_flag = h264_GetBits(pInfo, 1, "alpha_incr_flag");
+    	if(SPS_ext->alpha_incr_flag > 1)
+    	{
+    		return H264_SPS_ERROR;
+    	}
+
+    	SPS_ext->alpha_opaque_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_opaque_value");		//+8 to get the bit_depth value
+    	SPS_ext->alpha_transparent_value = h264_GetBits(pInfo,(SPS_ext->bit_depth_aux_minus8+8+1), "alpha_transparent_value");		//+8 to get the bit_depth value
+    }
+    SPS_ext->additional_extension_flag = h264_GetBits(pInfo, 1, "additional_extension_flag");
+    */
+    return H264_STATUS_OK;
+}
+#endif
+
+
+h264_Status h264_Parse_HRD_Parameters(void *parent, h264_Info* pInfo, int nal_hrd,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+    //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+    int32_t i = 0;
+    uint32_t code;
+
+
+    if (nal_hrd)
+    {
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+        if (SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+        {
+            return H264_SPS_ERROR;
+        }
+
+        viddec_pm_get_bits(parent, &code, 8);
+        pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale = (uint8_t)(code>>4);
+        pVUI_Seq_Not_Used->nal_hrd_cpb_size_scale = (uint8_t)(code & 0xf);
+
+        for (i=0; i<=SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1; i++)
+        {
+            pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->nal_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+
+            viddec_pm_get_bits(parent, &code, 1);
+            pVUI_Seq_Not_Used->nal_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+        }
+
+        if ( viddec_pm_get_bits(parent, &code, 20) == -1)
+            return H264_SPS_ERROR;
+
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+
+    }
+    else
+    {
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 = h264_GetVLCElement(parent, pInfo, false);
+
+        if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1 >= MAX_CPB_CNT)
+        {
+            return H264_SPS_ERROR;
+        }
+
+        viddec_pm_get_bits(parent, &code, 8);
+        pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale = (uint8_t)(code>>4);
+        pVUI_Seq_Not_Used->vcl_hrd_cpb_size_scale = (uint8_t)(code&0xf);
+
+        for (i=0; i<=SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1; i++)
+        {
+            pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->vcl_hrd_parameters.cpb_size_value_minus1[i] = h264_GetVLCElement(parent, pInfo, false);
+            viddec_pm_get_bits(parent, &code, 1);
+            pVUI_Seq_Not_Used->vcl_hrd_parameters.cbr_flag[i] = (uint8_t)code;
+        }
+
+        if ( viddec_pm_get_bits(parent, &code, 20) == -1)
+            return H264_SPS_ERROR;
+
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_initial_cpb_removal_delay_length_minus1 = (uint8_t)((code>>15)&0x1f);
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_removal_delay_length_minus1 = (uint8_t)((code>>10)&0x1f);;
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_dpb_output_delay_length_minus1 = (uint8_t)((code>>5)&0x1f);;
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_time_offset_length = (uint8_t)(code&0x1f);;
+    }
+
+    return H264_STATUS_OK;
+}
+
+
+
+h264_Status h264_Parse_Vui_Parameters(void *parent, h264_Info* pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used)
+{
+    h264_Status ret = H264_STATUS_OK;
+    //seq_param_set_ptr SPS = pInfo->p_active_SPS;
+    int32_t nal_hrd = 0;
+    uint32_t code;
+
+    do {
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag = (uint8_t)code;
+
+
+        if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+        {
+            viddec_pm_get_bits(parent, &code, 8);
+            SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc = (uint8_t)code;
+
+            if (SPS->sps_disp.vui_seq_parameters.aspect_ratio_idc == h264_AR_Extended_SAR)
+            {
+                viddec_pm_get_bits(parent, &code, 16);
+                SPS->sps_disp.vui_seq_parameters.sar_width = (uint16_t)code;
+
+                viddec_pm_get_bits(parent, &code, 16);
+                SPS->sps_disp.vui_seq_parameters.sar_height = (uint16_t)code;
+
+            }
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        pVUI_Seq_Not_Used->overscan_info_present_flag = (uint8_t)code;
+
+        if (pVUI_Seq_Not_Used->overscan_info_present_flag)
+        {
+            viddec_pm_get_bits(parent, &code, 1);
+            pVUI_Seq_Not_Used->overscan_appropriate_flag = (uint8_t)code;
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+        {
+            viddec_pm_get_bits(parent, &code, 3);
+            SPS->sps_disp.vui_seq_parameters.video_format = (uint8_t)code;
+
+            viddec_pm_get_bits(parent, &code, 1);
+            pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code;
+#ifdef VBP
+            SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code;
+#endif
+
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code;
+
+            if (SPS->sps_disp.vui_seq_parameters.colour_description_present_flag)
+            {
+                viddec_pm_get_bits(parent, &code, 8);
+                SPS->sps_disp.vui_seq_parameters.colour_primaries = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code, 8);
+                SPS->sps_disp.vui_seq_parameters.transfer_characteristics = (uint8_t)code;
+
+                viddec_pm_get_bits(parent, &code, 8);
+                pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code;
+#ifdef VBP
+                SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code;
+#endif
+            }
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        pVUI_Seq_Not_Used->chroma_location_info_present_flag = (uint8_t)code;
+
+        if (pVUI_Seq_Not_Used->chroma_location_info_present_flag)
+        {
+            pVUI_Seq_Not_Used->chroma_sample_loc_type_top_field = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->chroma_sample_loc_type_bottom_field = h264_GetVLCElement(parent, pInfo, false);
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.timing_info_present_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+        {
+            viddec_pm_get_bits(parent, &code, 32);
+            SPS->sps_disp.vui_seq_parameters.num_units_in_tick = (uint32_t)code;
+
+            viddec_pm_get_bits(parent, &code, 32);
+            SPS->sps_disp.vui_seq_parameters.time_scale = (uint32_t)code;
+
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->sps_disp.vui_seq_parameters.fixed_frame_rate_flag = (uint8_t)code;
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+        {
+            nal_hrd = 1;
+            ret = h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1)
+        {
+            nal_hrd = 0;
+            ret = (h264_Status)h264_Parse_HRD_Parameters(parent,pInfo, nal_hrd,SPS, pVUI_Seq_Not_Used);
+        }
+
+        if ((SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1) || (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+        {
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->sps_disp.vui_seq_parameters.low_delay_hrd_flag = (uint8_t)code;
+        }
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.vui_seq_parameters.pic_struct_present_flag = (uint8_t)code;
+
+        if (viddec_pm_get_bits(parent, &code, 1) == -1) {
+            ret = H264_STATUS_ERROR;
+            break;
+        }
+        SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.vui_seq_parameters.bitstream_restriction_flag)
+        {
+            viddec_pm_get_bits(parent, &code, 1);
+            pVUI_Seq_Not_Used->motion_vectors_over_pic_boundaries_flag = (uint8_t)code;
+
+            pVUI_Seq_Not_Used->max_bytes_per_pic_denom = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->max_bits_per_mb_denom = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->log2_max_mv_length_horizontal = h264_GetVLCElement(parent, pInfo, false);
+            pVUI_Seq_Not_Used->log2_max_mv_length_vertical = h264_GetVLCElement(parent, pInfo, false);
+            SPS->sps_disp.vui_seq_parameters.num_reorder_frames = h264_GetVLCElement(parent, pInfo, false);
+            SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering = h264_GetVLCElement(parent, pInfo, false);
+
+            if (SPS->sps_disp.vui_seq_parameters.max_dec_frame_buffering == MAX_INT32_VALUE)
+                ret = H264_STATUS_ERROR;
+        }
+    } while (0);
+
+    return ret;
+}
+
+
+h264_Status h264_Parse_SeqParameterSet(void *parent,h264_Info * pInfo, seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame)
+{
+    h264_Status ret = H264_SPS_ERROR;
+
+    int32_t i = 0, tmp = 0;
+    int32_t PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs;
+    uint32_t code = 0;
+    uint32_t data = 0;
+
+    //SPS->profile_idc = h264_GetBits(pInfo, 8, "Profile");
+    viddec_pm_get_bits(parent, &code, 8);
+    SPS->profile_idc = (uint8_t)code;
+
+    switch (SPS->profile_idc)
+    {
+    case h264_ProfileBaseline:
+    case h264_ProfileMain:
+    case h264_ProfileExtended:
+    case h264_ProfileHigh10:
+    case h264_ProfileHigh422:
+    case h264_ProfileHigh444:
+    case h264_ProfileHigh:
+        break;
+    default:
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        pInfo->sw_bail = 1;
+#endif
+#endif
+        return H264_SPS_INVALID_PROFILE;
+        break;
+    }
+
+    //SPS->constraint_set0_flag = h264_GetBits(pInfo, 1, "constraint_set0_flag");
+    //SPS->constraint_set1_flag = h264_GetBits(pInfo, 1, "constraint_set1_flag");		//should be 1
+    //SPS->constraint_set2_flag = h264_GetBits(pInfo, 1, "constraint_set2_flag");
+    //SPS->constraint_set3_flag = h264_GetBits(pInfo, 1, "constraint_set3_flag");
+
+#ifdef VBP
+    viddec_pm_get_bits(parent, &code, 5);	 //constraint flag set0...set4 (h.264 Spec v2009)
+    SPS->constraint_set_flags = (uint8_t)code;
+
+    //// reserved_zero_3bits
+    viddec_pm_get_bits(parent, (uint32_t *)&code, 3); //3bits zero reserved (h.264 Spec v2009)
+#else
+
+    viddec_pm_get_bits(parent, &code, 4);
+    SPS->constraint_set_flags = (uint8_t)code;
+
+    //// reserved_zero_4bits
+    viddec_pm_get_bits(parent, (uint32_t *)&code, 4);
+#endif
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+    if (code != 0)
+    {
+        pInfo->sw_bail = 1;
+    }
+#endif
+#endif
+    viddec_pm_get_bits(parent, &code, 8);
+    SPS->level_idc = (uint8_t)code;
+
+    switch (SPS->level_idc)
+    {
+    case h264_Level1b:
+    case h264_Level1:
+    case h264_Level11:
+    case h264_Level12:
+    case h264_Level13:
+    case h264_Level2:
+    case h264_Level21:
+    case h264_Level22:
+    case h264_Level3:
+    case h264_Level31:
+    case h264_Level32:
+    case h264_Level4:
+    case h264_Level41:
+    case h264_Level42:
+    case h264_Level5:
+    case h264_Level51:
+        break;
+    default:
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        pInfo->sw_bail = 1;
+#endif
+#endif
+        return H264_SPS_INVALID_LEVEL;
+    }
+
+    do {
+        SPS->seq_parameter_set_id = h264_GetVLCElement(parent, pInfo, false);
+
+        //// seq_parameter_set_id ---[0,31]
+        if (SPS->seq_parameter_set_id > MAX_NUM_SPS -1)
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+#ifdef VBP
+        SPS->sps_disp.separate_colour_plane_flag = 0;
+#endif
+
+        if ((SPS->profile_idc == h264_ProfileHigh) || (SPS->profile_idc == h264_ProfileHigh10) ||
+                (SPS->profile_idc == h264_ProfileHigh422) || (SPS->profile_idc == h264_ProfileHigh444)   )
+        {
+            //// chroma_format_idc ---[0,3], currently we don't support 444, so [0,2]
+            data = h264_GetVLCElement(parent, pInfo, false);
+            if ( data > H264_CHROMA_422)
+                break;
+            SPS->sps_disp.chroma_format_idc = (uint8_t)data;
+            //if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {}
+
+#ifdef VBP
+            if(SPS->sps_disp.chroma_format_idc == H264_CHROMA_444) {
+                viddec_pm_get_bits(parent, &code, 1);
+                SPS->sps_disp.separate_colour_plane_flag = (uint8_t)data;
+            }
+#endif
+            //// bit_depth_luma_minus8 ---[0,4], -----only support 8-bit pixel
+            data = h264_GetVLCElement(parent, pInfo, false);
+            if ( data)
+                break;
+            SPS->bit_depth_luma_minus8 = (uint8_t)data;
+
+            //// bit_depth_chroma_minus8 ---[0,4]
+            data = h264_GetVLCElement(parent, pInfo, false);
+            if ( data )
+                break;
+            SPS->bit_depth_chroma_minus8 = (uint8_t)data;
+
+
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->lossless_qpprime_y_zero_flag = (uint8_t)code;
+
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->seq_scaling_matrix_present_flag = (uint8_t)code;
+
+            if (SPS->seq_scaling_matrix_present_flag == 1)
+            {
+                //int n_ScalingList = (SPS->sps_disp.chroma_format_idc != H264_CHROMA_444) ? 8 : 12;
+                int n_ScalingList = 8;				/// We do not support 444 currrently
+
+                for (i=0; i<n_ScalingList; i++)
+                {
+                    viddec_pm_get_bits(parent, &code, 1);
+                    SPS->seq_scaling_list_present_flag[i] = (uint8_t)code;
+
+                    if (SPS->seq_scaling_list_present_flag[i])
+                    {
+                        if (i<6)
+                            h264_Scaling_List(parent, SPS->ScalingList4x4[i], 16, &SPS->UseDefaultScalingMatrix4x4Flag[i], pInfo);
+                        else
+                            h264_Scaling_List(parent, SPS->ScalingList8x8[i-6], 64, &SPS->UseDefaultScalingMatrix8x8Flag[i-6], pInfo);
+                    }
+                }
+            }
+        }
+        else
+        {
+            SPS->sps_disp.chroma_format_idc = 1;
+            SPS->seq_scaling_matrix_present_flag = 0;
+
+            SPS->bit_depth_luma_minus8 = 0;
+            SPS->bit_depth_chroma_minus8 = 0;
+            //h264_SetDefaultScalingLists(pInfo);
+        }
+
+        //// log2_max_frame_num_minus4 ---[0,12]
+        data = (h264_GetVLCElement(parent, pInfo, false));
+        if ( data > 12)
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+        SPS->log2_max_frame_num_minus4 = (uint8_t)data;
+
+        //// pic_order_cnt_type ---- [0,2]
+        data = h264_GetVLCElement(parent, pInfo, false);
+        if ( data > 2)
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+        SPS->pic_order_cnt_type = (uint8_t)data;
+
+
+        SPS->expectedDeltaPerPOCCycle = 0;
+        if (SPS->pic_order_cnt_type == 0)	{
+            SPS->log2_max_pic_order_cnt_lsb_minus4 = h264_GetVLCElement(parent, pInfo, false);
+        } else if (SPS->pic_order_cnt_type == 1) {
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->delta_pic_order_always_zero_flag = (uint8_t)code;
+
+            SPS->offset_for_non_ref_pic = h264_GetVLCElement(parent, pInfo, true);
+            SPS->offset_for_top_to_bottom_field = h264_GetVLCElement(parent, pInfo, true);
+
+            //// num_ref_frames_in_pic_order_cnt_cycle ---- [0,255]
+            data = h264_GetVLCElement(parent, pInfo, false);
+            if ( data > 255)
+            {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+                pInfo->sw_bail = 1;
+#endif
+#endif
+                break;
+            }
+            SPS->num_ref_frames_in_pic_order_cnt_cycle = (uint8_t)data;
+
+
+            //Alloc memory for frame offset -- FIXME
+            for (i=0; i< SPS->num_ref_frames_in_pic_order_cnt_cycle; i++)
+            {
+                /////SPS->offset_for_ref_frame[i] could be removed from SPS
+#ifndef USER_MODE
+                tmp = h264_GetVLCElement(parent, pInfo, true);
+                pOffset_ref_frame[i]=tmp;
+                SPS->expectedDeltaPerPOCCycle += tmp;
+#else
+                tmp = h264_GetVLCElement(parent, pInfo, true);
+                SPS->offset_for_ref_frame[i]=tmp;
+                SPS->expectedDeltaPerPOCCycle += tmp;
+#endif
+            }
+        }
+
+        //// num_ref_frames ---[0,16]
+        data = h264_GetVLCElement(parent, pInfo, false);
+        if ( data > 16)
+        {
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+            pInfo->sw_bail = 1;
+#endif
+#endif
+            break;
+        }
+        SPS->num_ref_frames = (uint8_t)data;
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->gaps_in_frame_num_value_allowed_flag = (uint8_t)code;
+
+
+        SPS->sps_disp.pic_width_in_mbs_minus1 = h264_GetVLCElement(parent, pInfo, false);
+        SPS->sps_disp.pic_height_in_map_units_minus1 = h264_GetVLCElement(parent, pInfo, false);
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.frame_mbs_only_flag = (uint8_t)code;
+
+        /// err check for size
+        PicWidthInMbs       = (SPS->sps_disp.pic_width_in_mbs_minus1 + 1);
+        PicHeightInMapUnits = (SPS->sps_disp.pic_height_in_map_units_minus1 + 1);
+        FrameHeightInMbs    = SPS->sps_disp.frame_mbs_only_flag? PicHeightInMapUnits: (PicHeightInMapUnits<<1);
+        if ((PicWidthInMbs < 2) || (PicWidthInMbs > 128) || (FrameHeightInMbs < 2) || (FrameHeightInMbs>128))
+            break;
+
+        if (!SPS->sps_disp.frame_mbs_only_flag)
+        {
+            viddec_pm_get_bits(parent, &code, 1);
+            SPS->sps_disp.mb_adaptive_frame_field_flag = (uint8_t)code;
+        }
+
+        //SPS->frame_height_in_mbs = (2-SPS->sps_disp.frame_mbs_only_flag)*(SPS->sps_disp.pic_height_in_map_units_minus1+1);
+        //SPS->pic_size_in_map_units = (SPS->sps_disp.pic_width_in_mbs_minus1+1)*SPS->sps_disp.frame_height_in_mbs;
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.direct_8x8_inference_flag = (uint8_t)code;
+
+        viddec_pm_get_bits(parent, &code, 1);
+        SPS->sps_disp.frame_cropping_flag = (uint8_t)code;
+
+        if (SPS->sps_disp.frame_cropping_flag)
+        {
+            SPS->sps_disp.frame_crop_rect_left_offset = h264_GetVLCElement(parent, pInfo, false);
+            SPS->sps_disp.frame_crop_rect_right_offset = h264_GetVLCElement(parent, pInfo, false);
+            SPS->sps_disp.frame_crop_rect_top_offset = h264_GetVLCElement(parent, pInfo, false);
+            SPS->sps_disp.frame_crop_rect_bottom_offset = h264_GetVLCElement(parent, pInfo, false);
+        }
+
+        //// when frame_mbs_only_flag is equal to 0, direct_8x8_inference_flag shall be equal to 1
+        if (SPS->sps_disp.frame_mbs_only_flag == 0 && SPS->sps_disp.direct_8x8_inference_flag == 0) {
+            break;
+        }
+
+        ////// vui_parameters
+        if (viddec_pm_get_bits(parent, &code, 1) == -1)
+            break;
+        SPS->sps_disp.vui_parameters_present_flag = (uint8_t)code;
+        ret = H264_STATUS_OK;
+
+        if (SPS->sps_disp.vui_parameters_present_flag)
+        {
+#ifndef VBP
+            ret = h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used);
+#else
+            // Ignore VUI parsing result
+            h264_Parse_Vui_Parameters(parent, pInfo, SPS, pVUI_Seq_Not_Used);
+            if (SPS->sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag)
+            {
+                i = SPS->sps_disp.vui_seq_parameters.nal_hrd_cpb_cnt_minus1;
+                uint32_t bit_rate_value = 0;
+                bit_rate_value = pVUI_Seq_Not_Used->nal_hrd_parameters.bit_rate_value_minus1[i] + 1;
+                bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->nal_hrd_bit_rate_scale);
+                SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value;
+            }
+            /*
+            else if (SPS->sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag)
+            {
+                i = SPS->sps_disp.vui_seq_parameters.vcl_hrd_cpb_cnt_minus1;
+                uint32_t bit_rate_value = 0;
+                bit_rate_value = pVUI_Seq_Not_Used->vcl_hrd_parameters.bit_rate_value_minus1[i] + 1;
+                bit_rate_value *= pow(2, 6 + pVUI_Seq_Not_Used->vcl_hrd_bit_rate_scale);
+                SPS->sps_disp.vui_seq_parameters.bit_rate_value = bit_rate_value;
+             }*/
+
+#endif
+        }
+    } while (0);
+#ifdef VBP
+    if (SPS->sps_disp.vui_seq_parameters.bit_rate_value == 0)
+    {
+        int maxBR = 0;
+        switch(SPS->level_idc)
+        {
+        case h264_Level1:
+            maxBR = 64;
+            break;
+
+        case h264_Level1b:
+            maxBR = 128;
+            break;
+
+        case h264_Level11:
+            maxBR = 192;
+            break;
+
+        case h264_Level12:
+            maxBR = 384;
+            break;
+
+        case h264_Level13:
+            maxBR = 768;
+            break;
+
+        case h264_Level2:
+            maxBR = 2000;
+            break;
+
+        case h264_Level21:
+        case h264_Level22:
+            maxBR = 4000;
+            break;
+
+        case h264_Level3:
+            maxBR = 10000;
+            break;
+
+        case h264_Level31:
+            maxBR = 14000;
+            break;
+
+        case h264_Level32:
+        case h264_Level4:
+            maxBR = 20000;
+            break;
+
+        case h264_Level41:
+        case h264_Level42:
+            maxBR = 50000;
+            break;
+
+        case h264_Level5:
+            maxBR = 135000;
+            break;
+
+        case h264_Level51:
+            maxBR = 240000;
+            break;
+        }
+
+        uint32_t cpbBrVclFactor = 1200;
+        if (SPS->profile_idc == 100)
+        {
+            cpbBrVclFactor = 1500; // HIGH
+        }
+        else if (SPS->profile_idc == 110)
+        {
+            cpbBrVclFactor = 3600; // HIGH 10
+        }
+        else if (SPS->profile_idc == 122 ||
+                 SPS->profile_idc == 144)
+        {
+            cpbBrVclFactor = 4800; // HIGH 4:2:2 and HIGH 4:4:4
+        }
+
+        SPS->sps_disp.vui_seq_parameters.bit_rate_value = maxBR *  cpbBrVclFactor;
+    }
+#endif
+
+    //h264_Parse_rbsp_trailing_bits(pInfo);
+
+    return ret;
+}
+
+//#endif
+
diff --git a/mixvbp/vbp_plugin/h264/include/h264.h b/mixvbp/vbp_plugin/h264/include/h264.h
new file mode 100755
index 0000000..eac5541
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/include/h264.h
@@ -0,0 +1,1118 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_H_
+#define _H264_H_
+
+#ifdef HOST_ONLY
+#include <stdio.h>
+#include <stdlib.h>
+#include <memory.h>
+#endif
+
+#include "stdint.h"
+#include "viddec_debug.h"
+
+#include "viddec_fw_workload.h"
+#include "h264parse_sei.h"
+
+#ifdef VBP
+//#define SW_ERROR_CONCEALEMNT
+#endif
+
+#ifdef WIN32
+#define mfd_printf OS_INFO
+#endif
+
+#ifdef H264_VERBOSE
+#define PRINTF(format, args...) OS_INFO("%s:  %s[%d]:: " format, __FILE__, __FUNCTION__ , __LINE__ ,  ## args )
+#else
+//#define PRINTF(args...)
+#endif
+
+//#pragma warning(disable : 4710) // function not inlined
+//#pragma warning(disable : 4514) // unreferenced inline function has been removed CL
+//#pragma warning(disable : 4100) // unreferenced formal parameter CL
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define MAX_INT32_VALUE 	0x7fffffff
+
+#define MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE 256
+#define MAX_CPB_CNT	32
+#define MAX_NUM_SLICE_GRPS 	1				//As per Annex A for high profile, the num_slice_groups_minus1 is 0
+#define MAX_PIC_LIST_NUM	8
+
+//#define MAX_PIC_SIZE_IN_MAP_UNITS	1024 //0 ???????? Henry
+#define MAX_NUM_REF_IDX_L0_ACTIVE	32
+//#define STARTCODE_BUF_SIZE 			2048+1024
+
+#define NUM_MMCO_OPERATIONS         17
+
+// Used to check whether the SEI RP is the only way for recovery (cisco contents)
+// This threshold will decide the interval of recovery even no error detected if no IDR during this time
+#define SEI_REC_CHECK_TH				8
+
+//SPS
+#define MAX_NUM_SPS			32
+#define SCL_DEFAULT 		1
+
+//PPS
+#define MAX_PIC_PARAMS		255
+#define MAX_NUM_REF_FRAMES	32
+#define MAX_QP				51
+#define MAX_NUM_PPS			256
+
+#define PUT_FS_IDC_BITS(w)                                (w&0x1F)
+#define PUT_LIST_INDEX_FIELD_BIT(w)                       ((w&0x1)<<5)
+#define PUT_LIST_LONG_TERM_BITS(w)                        ((w&0x1)<<6)
+#define PUT_LIST_PTR_LIST_ID_BIT(id)                      (id<<5)
+
+
+// DPB
+#define FRAME_FLAG_DANGLING_TOP_FIELD        ( 0x1 << 3  )
+#define FRAME_FLAG_DANGLING_BOTTOM_FIELD     ( 0x1 << 4  )
+
+#define MPD_DPB_FS_NULL_IDC			31            // May need to be changed if we alter gaps_in_frame_num to use 
+
+#define MFD_H264_MAX_FRAME_BUFFERS  17
+#define NUM_DPB_FRAME_STORES        (MFD_H264_MAX_FRAME_BUFFERS + 1)  // 1 extra for storign non-existent pictures.
+
+//Scalling Matrix Type
+#define PPS_QM                  0
+#define SPS_QM                  1
+#define FB_QM                   2
+#define DEFAULT_QM              3
+
+//Frame Type
+#define FRAME_TYPE_IDR        0x00
+#define FRAME_TYPE_I          0x01
+#define FRAME_TYPE_P          0x02
+#define FRAME_TYPE_B          0x03
+#define FRAME_TYPE_INVALID    0x04
+
+
+#define FRAME_TYPE_FRAME_OFFSET     3
+#define FRAME_TYPE_TOP_OFFSET       3
+#define FRAME_TYPE_BOTTOM_OFFSET    0
+#define FRAME_TYPE_STRUCTRUE_OFFSET 6
+
+//// Error handling
+#define FIELD_ERR_OFFSET		17			//offset for Field error flag ----refer to the structure definition viddec_fw_workload_error_codes in viddec_fw_common_defs.h
+
+////Bits Handling
+#define h264_bitfields_extract(x_32, start, mask)     (((x_32) >> (start)) & (mask) )
+#define h264_bitfields_insert(x_32, val_32, start, mask) ((x_32) = (((x_32) & ~( (mask) << (start))) | (((val_32) & (mask)) << (start))))
+
+
+//// PIP
+    typedef enum _pip_setting_t
+    {
+        PIP_SCALER_DISABLED,
+        PIP_SCALE_FACTOR_1_BY_4,
+        PIP_SCALE_FACTOR_1_BY_2,
+        PIP_SCALER_INVALID,
+
+    } pip_setting_t;
+
+
+#ifdef VERBOSE
+#define DEBUGGETBITS(args...)  OS_INFO( args )
+#else
+//#define DEBUGGETBITS(args...)
+#endif
+
+    /* status codes */
+    typedef enum _h264_Status
+    {
+        H264_STATUS_EOF          =  1,   // end of file
+        H264_STATUS_OK           =  0,   // no error
+        H264_STATUS_NO_MEM       =  2,   // out of memory
+        H264_STATUS_FILE_ERROR   =  3,   // file error
+        H264_STATUS_NOTSUPPORT   =  4,   // not supported mode
+        H264_STATUS_PARSE_ERROR  =  5,   // fail in parse MPEG-4 stream
+        H264_STATUS_ERROR        =  6,   // unknown/unspecified error
+        H264_NAL_ERROR,
+        H264_SPS_INVALID_PROFILE,
+        H264_SPS_INVALID_LEVEL,
+        H264_SPS_INVALID_SEQ_PARAM_ID,
+        H264_SPS_ERROR,
+        H264_PPS_INVALID_PIC_ID,
+        H264_PPS_INVALID_SEQ_ID,
+        H264_PPS_ERROR,
+        H264_SliceHeader_INVALID_MB,
+        H264_SliceHeader_ERROR,
+        H264_FRAME_DONE,
+        H264_SLICE_DONE,
+        H264_STATUS_POLL_ONCE_ERROR,
+        H264_STATUS_DEC_MEMINIT_ERROR,
+        H264_STATUS_NAL_UNIT_TYPE_ERROR,
+        H264_STATUS_SEI_ERROR,
+        H264_STATUS_SEI_DONE,
+    } h264_Status;
+
+
+
+    typedef enum _picture_structure_t
+    {
+        TOP_FIELD		= 1,
+        BOTTOM_FIELD		= 2,
+        FRAME			= 3,
+        INVALID			= 4
+    } picture_structure_t;
+
+///// Chorma format
+
+    typedef enum _h264_chroma_format_t
+    {
+        H264_CHROMA_MONOCHROME,
+        H264_CHROMA_420,
+        H264_CHROMA_422,
+        H264_CHROMA_444,
+    } h264_chroma_format_t;
+
+    /* H264 start code values */
+    typedef enum _h264_nal_unit_type
+    {
+        h264_NAL_UNIT_TYPE_unspecified = 0,
+        h264_NAL_UNIT_TYPE_SLICE,
+        h264_NAL_UNIT_TYPE_DPA,
+        h264_NAL_UNIT_TYPE_DPB,
+        h264_NAL_UNIT_TYPE_DPC,
+        h264_NAL_UNIT_TYPE_IDR,
+        h264_NAL_UNIT_TYPE_SEI,
+        h264_NAL_UNIT_TYPE_SPS,
+        h264_NAL_UNIT_TYPE_PPS,
+        h264_NAL_UNIT_TYPE_Acc_unit_delimiter,
+        h264_NAL_UNIT_TYPE_EOSeq,
+        h264_NAL_UNIT_TYPE_EOstream,
+        h264_NAL_UNIT_TYPE_filler_data,
+        h264_NAL_UNIT_TYPE_SPS_extension,
+        h264_NAL_UNIT_TYPE_Reserved1			=14,		/*14-18*/
+        h264_NAL_UNIT_TYPE_Reserved2			=15,		/*14-18*/
+        h264_NAL_UNIT_TYPE_Reserved3			=16,		/*14-18*/
+        h264_NAL_UNIT_TYPE_Reserved4			=17,		/*14-18*/
+        h264_NAL_UNIT_TYPE_Reserved5			=18,		/*14-18*/
+        h264_NAL_UNIT_TYPE_ACP				=19,
+        h264_NAL_UNIT_TYPE_Reserved6			=20,		/*20-23*/
+        h264_NAL_UNIT_TYPE_unspecified2		=24,		/*24-31*/
+    } h264_nal_unit_type;
+
+#define h264_NAL_PRIORITY_HIGHEST     3
+#define h264_NAL_PRIORITY_HIGH        2
+#define h264_NAL_PRIRITY_LOW          1
+#define h264_NAL_PRIORITY_DISPOSABLE  0
+
+
+    typedef enum _h264_Profile
+    {
+        h264_ProfileBaseline = 66,  	/** Baseline profile */
+        h264_ProfileMain = 77,        	/** Main profile */
+        h264_ProfileExtended = 88,    	/** Extended profile */
+        h264_ProfileHigh = 100 ,     		/** High profile */
+        h264_ProfileHigh10 = 110,			/** High 10 profile */
+        h264_ProfileHigh422 = 122,		/** High profile 4:2:2 */
+        h264_ProfileHigh444 = 144,		/** High profile 4:4:4 */
+    } h264_Profile;
+
+
+    typedef enum _h264_Level
+    {
+        h264_Level1b	= 9,		    /** Level 1b */
+        h264_Level1		= 10,			/** Level 1 */
+        h264_Level11	= 11, 		    /** Level 1.1 */
+        h264_Level12	= 12, 		    /** Level 1.2 */
+        h264_Level13	= 13, 		    /** Level 1.3 */
+        h264_Level2		= 20,			/** Level 2 */
+        h264_Level21 	= 21, 		    /** Level 2.1 */
+        h264_Level22	= 22, 		    /** Level 2.2 */
+        h264_Level3		= 30, 		    /** Level 3 */
+        h264_Level31	= 31, 		    /** Level 3.1 */
+        h264_Level32	= 32, 		    /** Level 3.2 */
+        h264_Level4		= 40, 		    /** Level 4 */
+        h264_Level41	= 41, 		    /** Level 4.1 */
+        h264_Level42	= 42, 		    /** Level 4.2 */
+        h264_Level5		= 50, 		    /** Level 5 */
+        h264_Level51	= 51, 		    /** Level 5.1 */
+        h264_LevelReserved = 255  /** Unknown profile */
+    } h264_Level;
+
+
+    typedef enum _h264_video_format
+    {
+        h264_Component	=0,
+        h264_PAL,
+        h264_NTSC,
+        h264_SECAM,
+        h264_MAC,
+        h264_unspecified,
+        h264_Reserved6,
+        h264_Reserved7
+    } h264_video_format;
+
+
+    typedef enum _h264_fcm
+    {
+        h264_ProgressiveFrame = 0,
+        h264_InterlacedFrame  = 1,
+        h264_InterlacedField  = 3,
+        h264_PictureFormatNone
+    } h264_fcm;
+
+
+///// Define the picture types []
+    typedef enum _h264_ptype_t
+    {
+        h264_PtypeP = 0,
+        h264_PtypeB = 1,
+        h264_PtypeI = 2,
+        h264_PtypeSP = 3,
+        h264_PtypeSI = 4,
+        h264_Ptype_unspecified,
+    } h264_ptype_t;
+
+
+///// Aspect ratio
+    typedef enum _h264_aspect_ratio
+    {
+        h264_AR_Unspecified = 0,
+        h264_AR_1_1 = 1,
+        h264_AR_12_11 = 2,
+        h264_AR_10_11 = 3,
+        h264_AR_16_11 = 4,
+        h264_AR_40_33 = 5,
+        h264_AR_24_11 = 6,
+        h264_AR_20_11 = 7,
+        h264_AR_32_11 = 8,
+        h264_AR_80_33 = 9,
+        h264_AR_18_11 = 10,
+        h264_AR_15_11 = 11,
+        h264_AR_64_33 = 12,
+        h264_AR_160_99 = 13,
+        h264_AR_4_3 = 14,
+        h264_AR_3_2 = 15,
+        h264_AR_2_1 = 16,
+        h264_AR_RESERVED = 17,
+        h264_AR_Extended_SAR = 255,
+    } h264_aspect_ratio;
+
+
+//////////////////////////////////////////////
+
+//////////////////////////////////////////////
+// storable_picture
+
+    /* Structure details
+       If all members remain ints
+       Size = 11 ints, i.e. 44 bytes
+    */
+
+    typedef struct
+    {
+        int32_t	poc;
+        int32_t	pic_num;
+
+        int32_t	long_term_pic_num;
+
+        uint8_t	long_term_frame_idx;
+        uint8_t	is_long_term;
+        uint8_t	used_for_reference;
+        uint8_t	pad_flag;  		// Used to indicate the status
+
+    } storable_picture, *storable_picture_ptr;
+
+//////////////////////////////////////////////
+// frame store
+
+    /* Structure details
+       If all members remain ints
+       Size = 46 ints, i.e. 184 bytes
+    */
+
+    typedef struct _frame_store
+    {
+        storable_picture frame;
+        storable_picture top_field;
+        storable_picture bottom_field;
+
+        int32_t	frame_num;
+
+        int32_t	frame_num_wrap;
+
+
+        uint8_t	fs_idc;
+        uint8_t	pic_type;            //bit7 structure: 1 frame , 0 field;
+        //bit4,5,6 top field (frame) pic type,  00 IDR 01 I 10 P 11 B 100 INVALID
+        //bit1,2,3 bottom pic type,  00 IDR 01 I 10 P 11 B 100 INVALID
+        uint8_t	long_term_frame_idx; // No two frame stores may have the same long-term frame index
+
+#define viddec_h264_get_dec_structure(x)         h264_bitfields_extract( (x)->fs_flag_1, 0, 0x03)
+#define viddec_h264_set_dec_structure(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 0, 0x03)
+#define viddec_h264_get_is_used(x)         h264_bitfields_extract( (x)->fs_flag_1, 2, 0x03)
+#define viddec_h264_set_is_frame_used(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x03)
+#define viddec_h264_set_is_top_used(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 2, 0x01)
+#define viddec_h264_set_is_bottom_used(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 3, 0x01)
+#define viddec_h264_get_is_skipped(x)         h264_bitfields_extract( (x)->fs_flag_1, 4, 0x03)
+#define viddec_h264_set_is_frame_skipped(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x03)
+#define viddec_h264_set_is_top_skipped(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 4, 0x01)
+#define viddec_h264_set_is_bottom_skipped(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 5, 0x01)
+#define viddec_h264_get_is_long_term(x)         h264_bitfields_extract( (x)->fs_flag_1, 6, 0x03)
+#define viddec_h264_set_is_frame_long_term(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x03)
+#define viddec_h264_set_is_top_long_term(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 6, 0x01)
+#define viddec_h264_set_is_bottom_long_term(x, val)    h264_bitfields_insert ( (x)->fs_flag_1, (val), 7, 0x01)
+        uint8_t  fs_flag_1;
+
+
+#define viddec_h264_get_is_non_existent(x)            h264_bitfields_extract( (x)->fs_flag_2, 0, 0x01)
+#define viddec_h264_set_is_non_existent(x, val)       h264_bitfields_insert ( (x)->fs_flag_2, (val), 0, 0x01)
+#define viddec_h264_get_is_output(x)                  h264_bitfields_extract( (x)->fs_flag_2, 1, 0x01)
+#define viddec_h264_set_is_output(x, val)             h264_bitfields_insert ( (x)->fs_flag_2, (val), 1, 0x01)
+#define viddec_h264_get_is_dangling(x)                h264_bitfields_extract( (x)->fs_flag_2, 2, 0x01)
+#define viddec_h264_set_is_dangling(x, val)           h264_bitfields_insert ( (x)->fs_flag_2, (val), 2, 0x01)
+#define viddec_h264_get_recovery_pt_picture(x)        h264_bitfields_extract( (x)->fs_flag_2, 3, 0x01)
+#define viddec_h264_set_recovery_pt_picture(x, val)   h264_bitfields_insert ( (x)->fs_flag_2, (val), 3, 0x01)
+#define viddec_h264_get_broken_link_picture(x)        h264_bitfields_extract( (x)->fs_flag_2, 4, 0x01)
+#define viddec_h264_set_broken_link_picture(x, val)   h264_bitfields_insert ( (x)->fs_flag_2, (val), 4, 0x01)
+#define viddec_h264_get_open_gop_entry(x)             h264_bitfields_extract( (x)->fs_flag_2, 5, 0x01)
+#define viddec_h264_set_open_gop_entry(x, val)        h264_bitfields_insert ( (x)->fs_flag_2, (val), 5, 0x01)
+#define viddec_h264_get_first_field_intra(x)          h264_bitfields_extract( (x)->fs_flag_2, 6, 0x01)
+#define viddec_h264_set_first_field_intra(x, val)     h264_bitfields_insert ( (x)->fs_flag_2, (val), 6, 0x01)
+        uint8_t  fs_flag_2;
+
+        uint8_t  fs_flag_reserve_1;
+        uint8_t  fs_flag_reserve_2;
+        uint8_t  fs_flag_reserve_3;
+
+        // If non-reference, may have skipped pixel decode
+        //uint8_t	non_ref_skipped;
+    } frame_store, *frame_param_ptr;
+
+//! Decoded Picture Buffer
+    typedef struct _h264_decoded_picture_buffer
+    {
+        ///
+        int32_t     last_output_poc;
+        int32_t     max_long_term_pic_idx;
+
+        //// Resolutions
+        int32_t		PicWidthInMbs;
+        int32_t		FrameHeightInMbs;
+
+        frame_store	fs[NUM_DPB_FRAME_STORES];
+        frame_store*    active_fs;
+
+        uint8_t		fs_ref_idc[16];
+        uint8_t		fs_ltref_idc[16];
+
+        uint8_t		fs_dpb_idc[NUM_DPB_FRAME_STORES+2];
+
+        uint8_t		listX_0[33+3];  // [bit5}:field_flag:0 for top, 1 for bottom, [bit4~0]:fs_idc
+        uint8_t		listX_1[33+3];
+
+        uint8_t		listXsize[2]; // 1 to 32
+        uint8_t		nInitListSize[2];
+
+        //uint32_t	size;
+        uint8_t		fs_dec_idc;
+        uint8_t		fs_non_exist_idc;
+        uint8_t		BumpLevel;
+        uint8_t		used_size;
+
+        uint8_t		OutputLevel;
+        uint8_t		OutputLevelValid;
+        uint8_t		OutputCtrl;
+        uint8_t     num_ref_frames;
+
+        uint8_t		ref_frames_in_buffer;
+        uint8_t		ltref_frames_in_buffer;
+        uint8_t		SuspendOutput;
+        uint8_t		WaitSeiRecovery;
+
+
+        uint8_t		frame_numbers_need_to_be_allocated;
+        uint8_t		frame_id_need_to_be_allocated;
+
+        //// frame list to release from dpb, need be displayed
+        uint8_t		frame_numbers_need_to_be_removed;
+        uint8_t		frame_id_need_to_be_removed[17];
+
+        //// frame list to removed from dpb but not display
+        uint8_t		frame_numbers_need_to_be_dropped;
+        uint8_t		frame_id_need_to_be_dropped[17];
+
+        //// frame list to display (in display order)
+        uint8_t		frame_numbers_need_to_be_displayed;
+        uint8_t		frame_id_need_to_be_displayed[17];
+
+
+    } h264_DecodedPictureBuffer;
+
+
+//////////////////////////////////////////////
+// qm_matrix_set
+    typedef struct _qm_matrix_set
+    {
+// uint8_t scaling_default_vector;
+        uint8_t scaling_list[56];            // 0 to 23 for qm 0 to 5 (4x4), 24 to 55 for qm 6 & 7 (8x8)
+
+    } qm_matrix_set, *qm_matrix_set_ptr;
+
+    /*
+    ///////// Currently not enabled in parser fw///////////////////
+    typedef struct _h264_SPS_Extension_RBSP {
+    	int32_t 			seq_parameter_set_id;					//UE
+    	int32_t				aux_format_idc;							//UE
+    	int32_t				bit_depth_aux_minus8;					//UE
+    	int32_t				alpha_incr_flag;
+    	int32_t				alpha_opaque_value;
+    	int32_t				alpha_transparent_value;
+    	int32_t				additional_extension_flag;
+    //	h264_rbsp_trail_set* rbsp_trail_ptr;
+    }h264_SPS_Extension_RBSP_t;
+    */
+
+    typedef struct _h264_hrd_param_set {
+        int32_t				bit_rate_value_minus1[MAX_CPB_CNT];			// ue(v), 0 to (2^32)-2
+        int32_t				cpb_size_value_minus1[MAX_CPB_CNT];			// ue(v), 0 to (2^32)-2
+
+        uint8_t				cbr_flag[MAX_CPB_CNT];							// u(1) * 32
+
+    } h264_hrd_param_set, *h264_hrd_param_set_ptr;
+
+    typedef struct _vui_seq_parameters_t_used
+    {
+        uint32_t	num_units_in_tick;                             // u(32)
+        uint32_t	time_scale;                                    // u(32)
+
+        int32_t  num_reorder_frames;                               // ue(v), 0 to max_dec_frame_buffering
+        int32_t	max_dec_frame_buffering;                          // ue(v), 0 to MaxDpbSize, specified in subclause A.3
+
+        uint16_t	 sar_width;                                       // u(16)
+        uint16_t	 sar_height;                                      // u(16)
+
+        uint8_t   aspect_ratio_info_present_flag;                  // u(1)
+        uint8_t   aspect_ratio_idc;                                // u(8)
+        uint8_t   video_signal_type_present_flag;                  // u(1)
+        uint8_t   video_format;                                    // u(3)
+#ifdef VBP
+        uint8_t   video_full_range_flag;                           // u(1)
+        uint8_t   matrix_coefficients;                              // u(8)
+        uint32_t  bit_rate_value;
+#endif
+
+        uint8_t   colour_description_present_flag;                 // u(1)
+        uint8_t   colour_primaries;                                // u(8)
+        uint8_t   transfer_characteristics;                        // u(8)
+        uint8_t   timing_info_present_flag;                        // u(1)
+
+        uint8_t   fixed_frame_rate_flag;                           // u(1)
+        uint8_t   low_delay_hrd_flag;                              // u(1)
+        uint8_t   bitstream_restriction_flag;                      // u(1)
+        uint8_t   pic_struct_present_flag;
+
+        uint8_t   nal_hrd_parameters_present_flag;                 // u(1)
+        uint8_t 	 nal_hrd_cpb_removal_delay_length_minus1;				// u(5)
+        uint8_t   nal_hrd_dpb_output_delay_length_minus1;				// u(5)
+        uint8_t   nal_hrd_time_offset_length;								// u(5)
+
+        uint8_t   nal_hrd_cpb_cnt_minus1;									// ue(v), 0 to 31
+        uint8_t   nal_hrd_initial_cpb_removal_delay_length_minus1;	// u(5)
+        uint8_t   vcl_hrd_parameters_present_flag;                 // u(1)
+        uint8_t 	 vcl_hrd_cpb_removal_delay_length_minus1;				// u(5)
+
+        uint8_t   vcl_hrd_dpb_output_delay_length_minus1;				// u(5)
+        uint8_t   vcl_hrd_time_offset_length;								// u(5)
+        uint8_t   vcl_hrd_cpb_cnt_minus1;									// ue(v), 0 to 31
+        uint8_t   vcl_hrd_initial_cpb_removal_delay_length_minus1;	// u(5)
+
+        /////// Here should be kept as 32-bits aligned for next structures
+        /// 2 structures for NAL&VCL HRD
+
+
+    } vui_seq_parameters_t_used;
+
+
+    typedef struct _vui_seq_parameters_t_not_used
+    {
+        int16_t  chroma_sample_loc_type_top_field;                // ue(v)
+        int16_t  chroma_sample_loc_type_bottom_field;             // ue(v)
+
+        uint8_t   overscan_info_present_flag;                      // u(1)
+        uint8_t   overscan_appropriate_flag;                       // u(1)
+
+        uint8_t   video_full_range_flag;                           // u(1)
+        uint8_t   matrix_coefficients;                             // u(8)
+
+        uint8_t   chroma_location_info_present_flag;               // u(1)
+        uint8_t   max_bytes_per_pic_denom;                          // ue(v), 0 to 16
+        uint8_t   max_bits_per_mb_denom;                            // ue(v), 0 to 16
+        uint8_t   log2_max_mv_length_vertical;                      // ue(v), 0 to 16, default to 16
+        uint8_t   log2_max_mv_length_horizontal;                    // ue(v), 0 to 16, default to 16
+
+        uint8_t   motion_vectors_over_pic_boundaries_flag;          // u(1)
+
+        uint8_t   nal_hrd_bit_rate_scale;									// u(4)
+        uint8_t   nal_hrd_cpb_size_scale;									// u(4)
+
+        uint8_t   vcl_hrd_bit_rate_scale;									// u(4)
+        uint8_t   vcl_hrd_cpb_size_scale;									// u(4)
+
+        h264_hrd_param_set nal_hrd_parameters;
+        h264_hrd_param_set vcl_hrd_parameters;
+
+
+    } vui_seq_parameters_t_not_used, *vui_seq_parameters_t_not_used_ptr;
+
+
+//////////////////////////////////////////////
+// picture parameter set
+
+    typedef struct _PPS_PAR
+    {
+        //int32_t DOUBLE_ALIGN valid;                          // indicates the parameter set is valid
+
+        int32_t pic_init_qp_minus26;                             // se(v), -26 to +25
+        int32_t pic_init_qs_minus26;                             // se(v), -26 to +25
+        int32_t chroma_qp_index_offset;                          // se(v), -12 to +12
+        int32_t second_chroma_qp_index_offset;
+
+        uint8_t pic_parameter_set_id;                            // ue(v), 0 to 255, restricted to 0 to 127 by MPD_CTRL_MAXPPS = 128
+        uint8_t seq_parameter_set_id;                            // ue(v), 0 to 31
+        uint8_t entropy_coding_mode_flag;                        // u(1)
+        uint8_t pic_order_present_flag;                          // u(1)
+
+        uint8_t num_slice_groups_minus1;                         // ue(v), shall be 0 for MP
+        // Below are not relevant for main profile...
+        uint8_t slice_group_map_type;                            // ue(v), 0 to 6
+        uint8_t num_ref_idx_l0_active;							// ue(v), 0 to 31
+        uint8_t num_ref_idx_l1_active;							// ue(v), 0 to 31
+
+        uint8_t weighted_pred_flag;                              // u(1)
+        uint8_t weighted_bipred_idc;                             // u(2)
+        uint8_t deblocking_filter_control_present_flag;          // u(1)
+        uint8_t constrained_intra_pred_flag;                     // u(1)
+
+        uint8_t redundant_pic_cnt_present_flag;                  // u(1)
+        uint8_t transform_8x8_mode_flag;
+        uint8_t pic_scaling_matrix_present_flag;
+        uint8_t pps_status_flag;
+
+        //// Keep here with 32-bits aligned
+        uint8_t	pic_scaling_list_present_flag[MAX_PIC_LIST_NUM];
+
+        qm_matrix_set	pps_qm;
+
+        uint8_t 		ScalingList4x4[6][16];
+        uint8_t 		ScalingList8x8[2][64];
+        uint8_t   	UseDefaultScalingMatrix4x4Flag[6+2];
+        uint8_t		UseDefaultScalingMatrix8x8Flag[6+2];
+
+    } pic_param_set, *pic_param_set_ptr, h264_PicParameterSet_t;
+
+    typedef union _list_reordering_num_t
+    {
+        int32_t abs_diff_pic_num_minus1;
+        int32_t long_term_pic_num;
+    } list_reordering_num_t;
+
+    typedef struct _h264_Ref_Pic_List_Reordering				////size = 8*33+ 1 + 33
+    {
+        list_reordering_num_t list_reordering_num[MAX_NUM_REF_FRAMES+1];
+
+        uint8_t			ref_pic_list_reordering_flag;
+        uint8_t			reordering_of_pic_nums_idc[MAX_NUM_REF_FRAMES+1];							//UE
+
+    } h264_Ref_Pic_List_Reordering_t;
+
+    typedef enum _H264_DANGLING_TYPE
+    {
+        DANGLING_TYPE_LAST_FIELD,
+        DANGLING_TYPE_DPB_RESET,
+        DANGLING_TYPE_FIELD,
+        DANGLING_TYPE_FRAME,
+        DANGLING_TYPE_GAP_IN_FRAME
+
+    } H264_DANGLING_TYPE;
+
+
+    typedef struct _h264_Dec_Ref_Pic_Marking			//size = 17*4*2 + 17*3 + 4 + 1
+    {
+        int32_t		difference_of_pic_num_minus1[NUM_MMCO_OPERATIONS];
+        int32_t		long_term_pic_num[NUM_MMCO_OPERATIONS];
+
+        /// MMCO
+        uint8_t		memory_management_control_operation[NUM_MMCO_OPERATIONS];
+        uint8_t		max_long_term_frame_idx_plus1[NUM_MMCO_OPERATIONS];
+        uint8_t		long_term_frame_idx[NUM_MMCO_OPERATIONS];
+        uint8_t		long_term_reference_flag;
+
+        uint8_t		adaptive_ref_pic_marking_mode_flag;
+        uint8_t		dec_ref_pic_marking_count;
+        uint8_t		no_output_of_prior_pics_flag;
+
+        uint8_t		pad;
+    } h264_Dec_Ref_Pic_Marking_t;
+
+
+
+    typedef struct old_slice_par
+    {
+        int32_t		frame_num;
+        int32_t		pic_order_cnt_lsb;
+        int32_t		delta_pic_order_cnt_bottom;
+        int32_t		delta_pic_order_cnt[2];
+
+        uint8_t		field_pic_flag;
+        uint8_t		bottom_field_flag;
+        uint8_t		nal_ref_idc;
+        uint8_t		structure;
+
+        uint8_t		idr_flag;
+        uint8_t		idr_pic_id;
+        uint8_t		pic_parameter_id;
+        uint8_t		status;
+    } OldSliceParams;
+
+#ifdef VBP
+    typedef struct _h264__pred_weight_table
+    {
+        uint8_t luma_log2_weight_denom;
+        uint8_t chroma_log2_weight_denom;
+        uint8_t luma_weight_l0_flag;
+        int16_t luma_weight_l0[32];
+        int8_t luma_offset_l0[32];
+        uint8_t chroma_weight_l0_flag;
+        int16_t chroma_weight_l0[32][2];
+        int8_t chroma_offset_l0[32][2];
+
+        uint8_t luma_weight_l1_flag;
+        int16_t luma_weight_l1[32];
+        int8_t luma_offset_l1[32];
+        uint8_t chroma_weight_l1_flag;
+        int16_t chroma_weight_l1[32][2];
+        int8_t chroma_offset_l1[32][2];
+    } h264_pred_weight_table;
+#endif
+
+    typedef struct _h264_Slice_Header
+    {
+        int32_t 		first_mb_in_slice;								//UE
+        int32_t		frame_num;											//UV
+        int32_t		pic_order_cnt_lsb;								//UV
+        int32_t		delta_pic_order_cnt_bottom;					//SE
+        int32_t		delta_pic_order_cnt[2];								//SE
+        int32_t		redundant_pic_cnt;									//UE
+
+        uint32_t		num_ref_idx_l0_active;								//UE
+        uint32_t		num_ref_idx_l1_active;								//UE
+
+        int32_t		slice_qp_delta;										//SE
+        int32_t		slice_qs_delta;										//SE
+        int32_t		slice_alpha_c0_offset_div2;						//SE
+        int32_t		slice_beta_offset_div2;								//SE
+        int32_t		slice_group_change_cycle;							//UV
+
+#ifdef VBP
+        h264_pred_weight_table  sh_predwttbl;
+#endif
+
+        ///// Flags or IDs
+        //h264_ptype_t	slice_type;											//UE
+        uint8_t			slice_type;
+        uint8_t 			nal_ref_idc;
+        uint8_t			structure;
+        uint8_t 			pic_parameter_id;									//UE
+
+        uint8_t			field_pic_flag;
+        uint8_t			bottom_field_flag;
+        uint8_t			idr_flag;											//UE
+        uint8_t			idr_pic_id;											//UE
+
+        uint8_t 			sh_error;
+        uint8_t			cabac_init_idc;										//UE
+        uint8_t			sp_for_switch_flag;
+        uint8_t			disable_deblocking_filter_idc;						//UE
+
+        uint8_t			direct_spatial_mv_pred_flag;
+        uint8_t			num_ref_idx_active_override_flag;
+        int16_t			current_slice_nr;
+
+        //// For Ref list reordering
+        h264_Dec_Ref_Pic_Marking_t sh_dec_refpic;
+        h264_Ref_Pic_List_Reordering_t sh_refpic_l0;
+        h264_Ref_Pic_List_Reordering_t sh_refpic_l1;
+
+    } h264_Slice_Header_t;
+
+
+#define   MAX_USER_DATA_SIZE              1024
+    typedef struct _h264_user_data_t
+    {
+        h264_sei_payloadtype    user_data_type;
+
+        int32_t    user_data_id;
+        int32_t    dsn;
+        int32_t    user_data_size;
+        int32_t    user_data[MAX_USER_DATA_SIZE>>2];
+    } h264_user_data_t;
+
+// SPS DISPLAY parameters: seq_param_set_disp, *seq_param_set_disp_ptr;
+    typedef struct _SPS_DISP
+    {
+        ///// VUI info
+        vui_seq_parameters_t_used vui_seq_parameters;    //size =
+
+        ///// Resolution
+        int16_t pic_width_in_mbs_minus1;
+        int16_t pic_height_in_map_units_minus1;
+
+        ///// Cropping
+        int16_t frame_crop_rect_left_offset;
+        int16_t frame_crop_rect_right_offset;
+
+        int16_t frame_crop_rect_top_offset;
+        int16_t frame_crop_rect_bottom_offset;
+
+        ///// Flags
+        uint8_t frame_mbs_only_flag;
+        uint8_t mb_adaptive_frame_field_flag;
+        uint8_t direct_8x8_inference_flag;
+        uint8_t frame_cropping_flag;
+#ifdef VBP
+        uint8_t separate_colour_plane_flag;
+#endif
+
+        uint16_t vui_parameters_present_flag;
+        uint16_t chroma_format_idc;
+    } seq_param_set_disp, *seq_param_set_disp_ptr;
+
+
+////SPS: seq_param_set, *seq_param_set_ptr;
+
+    typedef struct _SPS_PAR_USED
+    {
+        uint32_t    is_updated;
+
+        /////////// Required for display section //////////////////////////
+        seq_param_set_disp sps_disp;
+
+        int32_t		expectedDeltaPerPOCCycle;
+        int32_t 		offset_for_non_ref_pic;                           // se(v), -2^31 to (2^31)-1, 32-bit integer
+        int32_t 		offset_for_top_to_bottom_field;                   // se(v), -2^31 to (2^31)-1, 32-bit integer
+
+        /////////// IDC
+        uint8_t 		profile_idc;                                      // u(8), 0x77 for MP
+        uint8_t 		constraint_set_flags;                             // bit 0 to 3 for set0 to set3
+        uint8_t 		level_idc;                                        // u(8)
+        uint8_t 		seq_parameter_set_id;                             // ue(v), 0 to 31
+
+
+        uint8_t 		pic_order_cnt_type;                               // ue(v), 0 to 2
+        uint8_t 		log2_max_frame_num_minus4;                        // ue(v), 0 to 12
+        uint8_t 		log2_max_pic_order_cnt_lsb_minus4;                // ue(v), 0 to 12
+        uint8_t 		num_ref_frames_in_pic_order_cnt_cycle;            // ue(v), 0 to 255
+
+        //int32_t offset_for_ref_frame[MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE];   // se(v), -2^31 to (2^31)-1, 32-bit integer
+        uint8_t 		num_ref_frames;                                   // ue(v), 0 to 16,
+        uint8_t 		gaps_in_frame_num_value_allowed_flag;             // u(1)
+        // This is my addition, we should calculate this once and leave it with the sps
+        // as opposed to calculating it each time in h264_hdr_decoding_POC()
+
+        uint8_t 		delta_pic_order_always_zero_flag;                 // u(1)
+        uint8_t		residual_colour_transform_flag;
+
+        uint8_t		bit_depth_luma_minus8;
+        uint8_t		bit_depth_chroma_minus8;
+        uint8_t		lossless_qpprime_y_zero_flag;
+        uint8_t		seq_scaling_matrix_present_flag;
+
+        uint8_t		seq_scaling_list_present_flag[MAX_PIC_LIST_NUM];			//0-7
+
+        //// Combine the scaling matrix to word ( 24 + 32)
+        uint8_t 		ScalingList4x4[6][16];
+        uint8_t 		ScalingList8x8[2][64];
+        uint8_t		UseDefaultScalingMatrix4x4Flag[6];
+        uint8_t		UseDefaultScalingMatrix8x8Flag[6];
+
+    } seq_param_set_used, *seq_param_set_used_ptr;
+
+
+    typedef struct _SPS_PAR_ALL
+    {
+
+        seq_param_set_used  sps_par_used;
+        vui_seq_parameters_t_not_used sps_vui_par_not_used;
+
+    } seq_param_set_all, *seq_param_set_all_ptr;
+
+
+///// Image control parameter////////////
+    typedef struct _h264_img_par
+    {
+        int32_t frame_num;				// decoding num of current frame
+        int32_t frame_count;				// count of decoded frames
+        int32_t current_slice_num;
+        int32_t gaps_in_frame_num;
+
+        // POC decoding
+        int32_t num_ref_frames_in_pic_order_cnt_cycle;
+        int32_t delta_pic_order_always_zero_flag;
+        int32_t offset_for_non_ref_pic;
+        int32_t offset_for_top_to_bottom_field;
+
+        int32_t pic_order_cnt_lsb;
+        int32_t pic_order_cnt_msb;
+        int32_t delta_pic_order_cnt_bottom;
+        int32_t delta_pic_order_cnt[2];
+
+        int32_t PicOrderCntMsb;
+        int32_t CurrPicOrderCntMsb;
+        int32_t PrevPicOrderCntLsb;
+
+        int32_t FrameNumOffset;
+
+        int32_t PreviousFrameNum;
+        int32_t PreviousFrameNumOffset;
+
+        int32_t toppoc;
+        int32_t bottompoc;
+        int32_t framepoc;
+        int32_t ThisPOC;
+
+        //int32_t sei_freeze_this_image;
+
+        ///////////////////// Resolutions
+        int32_t PicWidthInMbs;
+        int32_t FrameHeightInMbs;
+
+        ///////////////////// MMCO
+        uint8_t last_has_mmco_5;
+        uint8_t curr_has_mmco_5;
+
+        /////////////////// Flags
+        uint8_t g_new_frame;
+        uint8_t g_new_pic;
+
+        uint8_t structure;
+        uint8_t second_field;           // Set to one if this is the second field of a set of paired fields...
+        uint8_t field_pic_flag;
+        uint8_t last_pic_bottom_field;
+
+        uint8_t bottom_field_flag;
+        uint8_t MbaffFrameFlag;
+        uint8_t no_output_of_prior_pics_flag;
+        uint8_t long_term_reference_flag;
+
+        uint8_t skip_this_pic;
+        uint8_t pic_order_cnt_type;
+        // Recovery
+        uint8_t recovery_point_found;
+        uint8_t used_for_reference;
+    } h264_img_par;
+
+
+    typedef struct  _h264_slice_reg_data
+    {
+        uint32_t h264_bsd_slice_p1;      // 0x150
+        //uint32_t h264_mpr_list0[8];       // from 0x380 to 0x3BC
+        uint32_t h264_bsd_slice_p2;      // 0x154
+        uint32_t h264_bsd_slice_start;   // 0x158
+
+    } h264_slice_data;
+
+
+    typedef struct  _h264_pic_data
+    {
+        uint32_t h264_dpb_init;          // 0x40
+        //info For current pic
+        uint32_t h264_cur_bsd_img_init;      // 0x140
+        uint32_t h264_cur_mpr_tf_poc;        // 0x300
+        uint32_t h264_cur_mpr_bf_poc;        // 0x304
+
+        //info For framess in DPB
+        //uint32_t h264_dpb_bsd_img_init[16];      //0x140
+        //uint32_t h264_dpb_mpr_tf_poc[16];        // 0x300
+        //uint32_t h264_dpb_mpr_bf_poc[16];        // 0x304
+    } h264_pic_data;
+
+    enum h264_workload_item_type
+    {
+        VIDDEC_WORKLOAD_H264_SLICE_REG = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+        VIDDEC_WORKLOAD_H264_PIC_REG,
+        VIDDEC_WORKLOAD_H264_DPB_FRAME_POC,
+        VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET,
+        VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET,
+        VIDDEC_WORKLOAD_H264_PWT_ES_BYTES,
+        VIDDEC_WORKLOAD_H264_SCALING_MATRIX,
+        VIDDEC_WORKLOAD_H264_DEBUG
+    };
+
+
+
+////////////////////////////////////////////
+    /* Full Info set*/
+////////////////////////////////////////////
+    typedef struct _h264_Info
+    {
+
+        h264_DecodedPictureBuffer           dpb;
+
+        //// Structures
+        //// need to gurantee active_SPS and active_PPS start from 4-bytes alignment address
+        seq_param_set_used	active_SPS;
+        pic_param_set			active_PPS;
+
+
+        h264_Slice_Header_t  SliceHeader;
+        OldSliceParams       old_slice;
+        sei_info             sei_information;
+
+        h264_img_par      img;
+
+        uint32_t          SPS_PADDR_GL;
+        uint32_t          PPS_PADDR_GL;
+        uint32_t          OFFSET_REF_FRAME_PADDR_GL;
+        uint32_t				TMP_OFFSET_REFFRM_PADDR_GL;
+
+        uint32_t          h264_list_replacement;
+
+        uint32_t          h264_pwt_start_byte_offset;
+        uint32_t          h264_pwt_start_bit_offset;
+        uint32_t          h264_pwt_end_byte_offset;
+        uint32_t          h264_pwt_end_bit_offset;
+        uint32_t          h264_pwt_enabled;
+
+        uint32_t          sps_valid;
+
+        uint8_t           slice_ref_list0[32];
+        uint8_t           slice_ref_list1[32];
+
+
+        uint8_t           qm_present_list;
+        //h264_NAL_Unit_t
+        uint8_t           nal_unit_type;
+        uint8_t           old_nal_unit_type;
+        uint8_t    			got_start;
+
+        //workload
+        uint8_t           push_to_cur;
+        uint8_t           Is_first_frame_in_stream;
+        uint8_t           Is_SPS_updated;
+        uint8_t           number_of_first_au_info_nal_before_first_slice;
+
+        uint8_t           is_frame_boundary_detected_by_non_slice_nal;
+        uint8_t           is_frame_boundary_detected_by_slice_nal;
+        uint8_t           is_current_workload_done;
+        uint8_t			 primary_pic_type_plus_one;	  //AUD---[0,7]
+
+        //Error handling
+        uint8_t			sei_rp_received;
+        uint8_t			last_I_frame_idc;
+        uint8_t			sei_b_state_ready;
+        uint8_t			gop_err_flag;
+
+
+        uint32_t		wl_err_curr;
+        uint32_t		wl_err_next;
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        uint32_t                sw_bail;
+#endif
+#endif
+    } h264_Info;
+
+
+
+    struct h264_viddec_parser
+    {
+        uint32_t     sps_pps_ddr_paddr;
+        h264_Info    info;
+    };
+
+#ifdef __cplusplus
+}
+#endif
+
+#ifdef USE_AVC_SHORT_FORMAT
+#define MAX_OP  16
+
+typedef struct _dec_ref_pic_marking_t {
+    union {
+        uint8_t flags;
+        struct {
+            uint8_t idr_pic_flag:1;
+            uint8_t no_output_of_prior_pics_flag:1;
+            uint8_t long_term_reference_flag:1;
+            uint8_t adaptive_ref_pic_marking_mode_flag:1;
+        };
+    };
+    struct {
+        uint8_t memory_management_control_operation;
+        union {
+            struct {
+                uint8_t difference_of_pic_nums_minus1;
+            } op1;
+            struct {
+                uint8_t long_term_pic_num;
+            } op2;
+            struct {
+                uint8_t difference_of_pic_nums_minus1;
+                uint8_t long_term_frame_idx;
+            } op3;
+            struct {
+                uint8_t max_long_term_frame_idx_plus1;
+            } op4;
+            struct {
+                uint8_t long_term_frame_idx;
+            } op6;
+        };
+    } op[MAX_OP];
+} dec_ref_pic_marking_t;
+
+
+typedef struct _slice_header_t {
+    uint8_t nal_unit_type;
+    uint8_t pps_id;
+    uint8_t padding;
+    union {
+        uint8_t flags;
+        struct {
+            uint8_t field_pic_flag:1;
+            uint8_t bottom_field_flag:1;
+        };
+    };
+    uint32_t first_mb_in_slice;
+    uint32_t frame_num;
+    uint16_t idr_pic_id;
+    uint16_t pic_order_cnt_lsb;
+    int32_t delta_pic_order_cnt[2];
+    int32_t delta_pic_order_cnt_bottom;
+} slice_header_t;
+
+
+
+
+typedef struct _vbp_h264_sliceheader {
+    slice_header_t          slice_header;
+    dec_ref_pic_marking_t   ref_pic_marking;
+} vbp_h264_sliceheader;
+
+#endif
+
+
+
+#endif  //_H264_H_
+
+
diff --git a/mixvbp/vbp_plugin/h264/include/h264parse.h b/mixvbp/vbp_plugin/h264/include/h264parse.h
new file mode 100755
index 0000000..2e7b817
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/include/h264parse.h
@@ -0,0 +1,179 @@
+#ifndef __H264PARSE_H_
+#define __H264PARSE_H_
+
+#include "h264.h"
+
+#ifndef MFD_FIRMWARE
+#define true 1
+#define false 0
+#endif
+
+////////////////////////////////////////////////////////////////////
+// The following part is only for Parser Debug
+///////////////////////////////////////////////////////////////////
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+    enum h264_debug_point_id
+    {
+        WARNING_H264_GENERAL = 0xff000000,
+        WARNING_H264_DPB,
+        WARNING_H264_REFLIST,
+        WARNING_H264_SPS,
+        WARNING_H264_PPS,
+        WARNING_H264_SEI,
+        WARNING_H264_VCL,
+
+        ERROR_H264_GENERAL = 0xffff0000,
+        ERROR_H264_DPB,
+        ERROR_H264_REFLIST,
+        ERROR_H264_SPS,
+        ERROR_H264_PPS,
+        ERROR_H264_SEI,
+        ERROR_H264_VCL
+    };
+
+    static inline void MFD_PARSER_DEBUG(int debug_point_id)
+    {
+#ifdef H264_MFD_DEBUG
+
+        int p1,p2,p3,p4,p5,p6;
+
+        p1 = 0x0BAD;
+        p2 = 0xC0DE;
+        p3 = debug_point_id;
+        p4=p5=p6 = 0;
+
+        DEBUG_WRITE(p1,p2,p3,p4,p5,p6);
+#endif
+
+        debug_point_id = debug_point_id;
+
+        return;
+    }
+
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Init functions
+////////////////////////////////////////////////////////////////////
+    extern void h264_init_old_slice(h264_Info* pInfo);
+    extern void h264_init_img(h264_Info* pInfo);
+    extern void h264_init_Info(h264_Info* pInfo);
+    extern void h264_init_Info_under_sps_pps_level(h264_Info* pInfo);
+    extern void h264_init_sps_pps(struct h264_viddec_parser* parser, uint32_t *persist_mem);
+
+    extern void h264_update_old_slice(h264_Info * pInfo,h264_Slice_Header_t next_SliceHeader);
+    extern void h264_sei_stream_initialise (h264_Info* pInfo);
+    extern void h264_update_img_info(h264_Info * pInfo );
+    extern void h264_update_frame_type(h264_Info * pInfo );
+
+    extern int32_t h264_check_previous_frame_end(h264_Info * pInfo);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// bsd functions
+////////////////////////////////////////////////////////////////////
+    extern uint8_t h264_More_RBSP_Data(void *parent, h264_Info * pInfo);
+////// VLE and bit operation
+    extern uint32_t h264_get_codeNum(void *parent,h264_Info* pInfo);
+    extern int32_t h264_GetVLCElement(void *parent,h264_Info* pInfo, uint8_t bIsSigned);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// parse functions
+////////////////////////////////////////////////////////////////////
+
+//NAL
+    extern h264_Status h264_Parse_NAL_Unit(void *parent, h264_Info* pInfo, uint8_t *nal_ref_idc);
+
+////// Slice header
+    extern h264_Status h264_Parse_Slice_Layer_Without_Partitioning_RBSP(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+    extern h264_Status h264_Parse_Slice_Header_1(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+    extern h264_Status h264_Parse_Slice_Header_2(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+    extern h264_Status h264_Parse_Slice_Header_3(void *parent, h264_Info* pInfo, h264_Slice_Header_t *SliceHeader);
+
+
+////// SPS
+    extern h264_Status h264_Parse_SeqParameterSet(void *parent, h264_Info * pInfo,seq_param_set_used_ptr SPS, vui_seq_parameters_t_not_used_ptr pVUI_Seq_Not_Used, int32_t* pOffset_ref_frame);
+//extern h264_Status h264_Parse_SeqParameterSet_Extension(void *parent, h264_Info * pInfo);
+    extern h264_Status h264_Parse_PicParameterSet(void *parent, h264_Info * pInfo,h264_PicParameterSet_t* PictureParameterSet);
+
+////// SEI functions
+    h264_Status h264_Parse_Supplemental_Enhancement_Information_Message(void *parent,h264_Info* pInfo);
+    h264_Status h264_SEI_payload(void *parent, h264_Info* pInfo, h264_sei_payloadtype payloadType, int32_t payloadSize);
+
+//////
+    extern h264_Status h264_Scaling_List(void *parent, uint8_t *scalingList, int32_t sizeOfScalingList, uint8_t  *UseDefaultScalingMatrix, h264_Info* pInfo);
+    extern h264_Status h264_Parse_Ref_Pic_List_Reordering(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+    extern h264_Status h264_Parse_Pred_Weight_Table(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+    extern h264_Status h264_Parse_Dec_Ref_Pic_Marking(void *parent,h264_Info* pInfo,h264_Slice_Header_t *SliceHeader);
+
+
+
+///// Mem functions
+    extern void* h264_memset( void* buf, uint32_t c, uint32_t num );
+    extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+    extern void h264_Parse_Copy_Sps_To_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+    extern void h264_Parse_Copy_Sps_From_DDR(h264_Info* pInfo, seq_param_set_used_ptr SPS, uint32_t nSPSId);
+
+    extern void h264_Parse_Copy_Pps_To_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+    extern void h264_Parse_Copy_Pps_From_DDR(h264_Info* pInfo, pic_param_set_ptr PPS, uint32_t nPPSId);
+
+    extern void h264_Parse_Copy_Offset_Ref_Frames_To_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+    extern void h264_Parse_Copy_Offset_Ref_Frames_From_DDR(h264_Info* pInfo, int32_t* pOffset_ref_frames, uint32_t nSPSId);
+    extern uint32_t h264_Parse_Check_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+    extern void h264_Parse_Clear_Sps_Updated_Flag(h264_Info* pInfo, uint32_t nSPSId);
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// workload functions
+////////////////////////////////////////////////////////////////////
+
+    extern void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo );
+
+    extern void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo );
+
+    extern void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo );
+    extern void h264_parse_emit_eos( void *parent, h264_Info *pInfo );
+#ifdef __cplusplus
+}
+#endif
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions
+////////////////////////////////////////////////////////////////////
+extern int32_t h264_is_new_picture_start(h264_Info* pInfo, h264_Slice_Header_t cur_slice, h264_Slice_Header_t old_slice);
+extern int32_t h264_is_second_field(h264_Info * pInfo);
+///// Math functions
+uint32_t ldiv_mod_u(uint32_t a, uint32_t b, uint32_t * mod);
+uint32_t mult_u(uint32_t var1, uint32_t var2);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// utils functions outside h264
+////////////////////////////////////////////////////////////////////
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+extern void *memcpy(void *dest, const void *src, uint32_t n);
+extern uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap);
+extern int32_t viddec_pm_get_bits(void *parent, uint32_t *data, uint32_t num_bits);
+extern int32_t viddec_pm_peek_bits(void *parent, uint32_t *data, uint32_t num_bits);
+
+
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Second level parse functions
+////////////////////////////////////////////////////////////////////
+
+#endif  ////__H264PARSE_H_
+
+
+
diff --git a/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h b/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h
new file mode 100755
index 0000000..f7935a4
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/include/h264parse_dpb.h
@@ -0,0 +1,109 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_DPB_CTL_H_
+#define _H264_DPB_CTL_H_
+
+
+#include "h264.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+////////////////////////////////////////////////////////////////////
+///////////////////////////// Parser control functions
+////////////////////////////////////////////////////////////////////
+
+///// Reference list
+    extern void h264_dpb_update_ref_lists(h264_Info * pInfo);
+    extern void h264_dpb_reorder_lists(h264_Info * pInfo);
+
+    extern void h264_dpb_insert_ref_lists(h264_DecodedPictureBuffer * p_dpb,int32_t NonExisting);
+
+///// POC
+    extern void h264_hdr_decoding_poc (h264_Info * pInfo,int32_t NonExisting, int32_t frame_num);
+    extern void h264_hdr_post_poc(h264_Info* pInfo,int32_t NonExisting, int32_t frame_num, int32_t use_old);
+
+///// DPB buffer mangement
+    extern void h264_init_dpb(h264_DecodedPictureBuffer * p_dpb);
+
+    extern void h264_dpb_unmark_for_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+    extern void h264_dpb_unmark_for_long_term_reference(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+    extern void h264_dpb_unmark_long_term_frame_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx);
+    extern void h264_dpb_unmark_long_term_field_for_reference_by_frame_idx(h264_DecodedPictureBuffer *p_dpb, int32_t long_term_frame_idx, int32_t fs_idc, int32_t polarity);
+    extern void h264_dpb_mark_pic_long_term(h264_Info * pInfo, int32_t long_term_frame_idx, int32_t picNumX);
+    extern void h264_dpb_mark_dangling_field(h264_DecodedPictureBuffer *p_dpb, int32_t fs_idc);
+
+    extern void h264_dpb_update_queue_dangling_field(h264_Info * pInfo);
+    extern void h264_dpb_is_used_for_reference(h264_DecodedPictureBuffer * p_dpb, int32_t * flag);
+
+
+    extern void h264_dpb_set_active_fs(h264_DecodedPictureBuffer * p_dpb,int32_t index);
+    extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+    extern void h264_dpb_idr_memory_management (h264_Info * pInfo,
+            seq_param_set_used_ptr active_sps,
+            int32_t no_output_of_prior_pics_flag);
+
+    extern void h264_dpb_init_frame_store(h264_Info * pInfo);
+    extern void h264_dpb_reset_dpb(h264_Info * pInfo,int32_t PicWidthInMbs, int32_t FrameHeightInMbs,
+                                   int32_t SizeChange, int32_t no_output_of_prior_pics_flag);
+
+    extern void h264_dpb_gaps_in_frame_num_mem_management(h264_Info * pInfo);
+
+    extern int32_t h264_dpb_assign_frame_store(h264_Info * pInfo, int32_t NonExisting);
+
+    extern void h264_dpb_get_smallest_poc(h264_DecodedPictureBuffer *p_dpb, int32_t *poc, int32_t *pos);
+    extern void h264_dpb_remove_unused_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t * flag);
+
+    extern void h264_dpb_sliding_window_memory_management(h264_DecodedPictureBuffer *p_dpb,
+            int32_t NonExisting,
+            int32_t num_ref_frames);
+    extern int32_t h264_dpb_queue_update(h264_Info * pInfo,
+                                         int32_t push,
+                                         int32_t direct,
+                                         int32_t frame_request,
+                                         int32_t num_ref_frames);
+
+    extern void h264_dpb_split_field (h264_DecodedPictureBuffer *p_dpb, h264_Info * pInfo);
+    extern void h264_dpb_combine_field(h264_DecodedPictureBuffer *p_dpb, int32_t use_old);
+
+    extern void h264_dpb_insert_picture_in_dpb(h264_Info * pInfo,
+            int32_t used_for_reference,
+            int32_t add2dpb,
+            int32_t NonExisting,
+            int32_t use_old);
+
+    extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+            int32_t NonExisting,
+            int32_t use_old);
+
+    extern void h264_dpb_adaptive_memory_management (h264_Info * pInfo);
+
+    extern int32_t h264_dpb_output_one_frame_from_dpb(h264_Info* pInfo,
+            int32_t direct, int32_t request, int32_t num_ref_frames);
+
+    extern void h264_dpb_remove_frame_from_dpb(h264_DecodedPictureBuffer *p_dpb, int32_t idx);
+    extern void h264_dpb_frame_output(h264_Info * pInfo,int32_t fs_idc, int32_t direct, int32_t * existing);
+
+//////////////////////////////////////////////////////////// Globals
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif  //_H264_DPB_CTL_H_
+
+
diff --git a/mixvbp/vbp_plugin/h264/include/h264parse_sei.h b/mixvbp/vbp_plugin/h264/include/h264parse_sei.h
new file mode 100755
index 0000000..f0a591d
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/include/h264parse_sei.h
@@ -0,0 +1,314 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    MPEG-4 header.
+//
+*/
+
+
+#ifndef _H264_SEI_H_
+#define _H264_SEI_H_
+
+#include "h264.h"
+
+
+//defines for SEI
+#define MAX_CPB_CNT					32
+#define MAX_NUM_CLOCK_TS			3
+#define MAX_PAN_SCAN_CNT			3
+#define MAX_NUM_SPARE_PICS			16
+#define MAX_SUB_SEQ_LAYERS   		256
+#define MAX_SLICE_GRPS				1    // for high profile
+#define NUM_SPS						32
+#define MAX_NUM_REF_SUBSEQS			256
+
+
+#define SEI_SCAN_FORMAT_INTERLACED      0x1
+#define SEI_SCAN_FORMAT_PROGRESSIVE     0x3
+#define SEI_SCAN_FORMAT_VALID(r)        (r&0x1)
+#define SEI_SCAN_FORMAT(r)              ((r&0x2)>>1)
+
+typedef enum
+{
+    SEI_BUF_PERIOD = 0,
+    SEI_PIC_TIMING,
+    SEI_PAN_SCAN,
+    SEI_FILLER_PAYLOAD,
+    SEI_REG_USERDATA,
+    SEI_UNREG_USERDATA,
+    SEI_RECOVERY_POINT,
+    SEI_DEC_REF_PIC_MARKING_REP,
+    SEI_SPARE_PIC,
+    SEI_SCENE_INFO,
+    SEI_SUB_SEQ_INFO,
+    SEI_SUB_SEQ_LAYER,
+    SEI_SUB_SEQ,
+    SEI_FULL_FRAME_FREEZE,
+    SEI_FULL_FRAME_FREEZE_RELEASE,
+    SEI_FULL_FRAME_SNAPSHOT,
+    SEI_PROGRESSIVE_SEGMENT_START,
+    SEI_PROGRESSIVE_SEGMENT_END,
+    SEI_MOTION_CONSTRAINED_SLICE_GRP_SET,
+    SEI_FILM_GRAIN_CHARACTERISTICS,
+    SEI_DEBLK_FILTER_DISPLAY_PREFERENCE,
+    SEI_STEREO_VIDEO_INFO,
+    SEI_RESERVED,
+} h264_sei_payloadtype;
+
+
+
+typedef struct _h264_SEI_buffering_period
+{
+    int32_t seq_param_set_id;
+    int32_t initial_cpb_removal_delay_nal;
+    int32_t initial_cpb_removal_delay_offset_nal;
+    int32_t initial_cpb_removal_delay_vcl;
+    int32_t initial_cpb_removal_delay_offset_vcl;
+
+} h264_SEI_buffering_period_t;
+
+typedef struct _h264_SEI_pic_timing
+{
+    int32_t cpb_removal_delay;
+    int32_t dpb_output_delay;
+    int32_t pic_struct;
+} h264_SEI_pic_timing_t;
+
+#if 0
+int32_t clock_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t ct_type[MAX_NUM_CLOCK_TS];
+int32_t nuit_field_based_flag[MAX_NUM_CLOCK_TS];
+int32_t counting_type[MAX_NUM_CLOCK_TS];
+int32_t full_timestamp_flag[MAX_NUM_CLOCK_TS];
+int32_t discontinuity_flag[MAX_NUM_CLOCK_TS];
+int32_t cnt_dropped_flag[MAX_NUM_CLOCK_TS];
+int32_t n_frames[MAX_NUM_CLOCK_TS];
+int32_t seconds_value[MAX_NUM_CLOCK_TS];
+int32_t minutes_value[MAX_NUM_CLOCK_TS];
+int32_t hours_value[MAX_NUM_CLOCK_TS];
+int32_t seconds_flag[MAX_NUM_CLOCK_TS];
+int32_t minutes_flag[MAX_NUM_CLOCK_TS];
+int32_t hours_flag[MAX_NUM_CLOCK_TS];
+int32_t time_offset[MAX_NUM_CLOCK_TS];
+
+#endif
+
+typedef struct _h264_SEI_pan_scan_rectangle
+{
+    int32_t pan_scan_rect_id;
+    int32_t pan_scan_rect_cancel_flag;
+    int32_t pan_scan_cnt_minus1;
+    int32_t pan_scan_rect_left_offset[MAX_PAN_SCAN_CNT];
+    int32_t pan_scan_rect_right_offset[MAX_PAN_SCAN_CNT];
+    int32_t pan_scan_rect_top_offset[MAX_PAN_SCAN_CNT];
+    int32_t pan_scan_rect_bottom_offset[MAX_PAN_SCAN_CNT];
+    int32_t pan_scan_rect_repetition_period;
+} h264_SEI_pan_scan_rectangle_t;
+
+typedef struct _h264_SEI_filler_payload
+{
+    int32_t ff_byte;
+} h264_SEI_filler_payload_t;
+
+typedef struct _h264_SEI_userdata_registered
+{
+    int32_t itu_t_t35_country_code;
+    int32_t itu_t_t35_country_code_extension_byte;
+    int32_t itu_t_t35_payload_byte;
+} h264_SEI_userdata_registered_t;
+
+typedef struct _h264_SEI_userdata_unregistered
+{
+    int32_t uuid_iso_iec_11578[4];
+    int32_t user_data_payload_byte;
+} h264_SEI_userdata_unregistered_t;
+
+typedef struct _h264_SEI_recovery_point
+{
+    int32_t recovery_frame_cnt;
+    int32_t exact_match_flag;
+    int32_t broken_link_flag;
+    int32_t changing_slice_group_idc;
+} h264_SEI_recovery_point_t;
+
+typedef struct _h264_SEI_decoded_ref_pic_marking_repetition
+{
+    int32_t original_idr_flag;
+    int32_t original_frame_num;
+    int32_t orignal_field_pic_flag;
+    int32_t original_bottom_field_pic_flag;
+    int32_t no_output_of_prior_pics_flag;
+    int32_t long_term_reference_flag;
+    int32_t adaptive_ref_pic_marking_mode_flag;
+    int32_t memory_management_control_operation;				//UE
+    int32_t difference_of_pics_num_minus1;						//UE
+    int32_t long_term_pic_num;									//UE
+    int32_t long_term_frame_idx;								//UE
+    int32_t max_long_term_frame_idx_plus1;						//UE
+} h264_SEI_decoded_ref_pic_marking_repetition_t;
+
+typedef struct _h264_SEI_spare_picture
+{
+    int32_t target_frame_num;
+    int32_t spare_field_flag;
+    int32_t target_bottom_field_flag;
+    int32_t num_spare_pics_minus1;
+    int32_t delta_spare_frame_num[MAX_NUM_SPARE_PICS];
+    int32_t spare_bottom_field_flag[MAX_NUM_SPARE_PICS];
+    int32_t spare_area_idc[MAX_NUM_SPARE_PICS];				// not complete
+} h264_SEI_spare_picture_t;
+
+typedef struct _h264_SEI_scene_info
+{
+    int32_t scene_info_present_flag;
+    int32_t scene_id;
+    int32_t scene_transitioning_type;
+    int32_t second_scene_id;
+} h264_SEI_scene_info_t;
+
+typedef struct _h264_SEI_sub_sequence_info
+{
+    int32_t sub_seq_layer_num;
+    int32_t sub_seq_id;
+    int32_t first_ref_pic_flag;
+    int32_t leading_non_ref_pic_flag;
+    int32_t last_pic_flag;
+    int32_t sub_seq_frame_num_flag;
+    int32_t sub_seq_frame_num;
+} h264_SEI_sub_sequence_info_t;
+
+typedef struct _h264_SEI_sub_sequence_layer
+{
+    int32_t num_sub_seq_layers_minus1;
+    int32_t accurate_statistics_flag[MAX_SUB_SEQ_LAYERS];
+    int32_t average_bit_rate[MAX_SUB_SEQ_LAYERS];
+    int32_t average_frame_rate[MAX_SUB_SEQ_LAYERS];
+} h264_SEI_sub_sequence_layer_t;
+
+typedef struct _h264_SEI_sub_sequence
+{
+    int32_t sub_seq_layer_num;
+    int32_t sub_seq_id;
+    int32_t duration_flag;
+    int32_t sub_seq_duration;
+    int32_t average_rate_flag;
+    int32_t average_statistics_flag;
+    int32_t average_bit_rate;
+    int32_t average_frame_rate;
+    int32_t num_referenced_subseqs;
+    int32_t ref_sub_seq_layer_num;
+    int32_t ref_sub_seq_id;
+    int32_t ref_sub_seq_direction;
+} h264_SEI_sub_sequence_t;
+
+typedef struct _h264_SEI_full_frame_freeze
+{
+    int32_t full_frame_freeze_repetition_period;
+} h264_SEI_full_frame_freeze_t;
+
+typedef struct _h264_SEI_full_frame_snapshot
+{
+    int32_t snapshot_id;
+} h264_SEI_full_frame_snapshot_t;
+
+typedef struct _h264_SEI_progressive_segment_start
+{
+    int32_t progressive_refinement_id;
+    int32_t num_refinement_steps_minus1;
+} h264_SEI_progressive_segment_start_t;
+
+typedef struct _h264_SEI_progressive_segment_end
+{
+    int32_t progressive_refinement_id;
+} h264_SEI_progressive_segment_end_t;
+
+typedef struct _h264_SEI_motion_constrained_slice_group
+{
+    int32_t num_slice_groups_in_set_minus1;
+    int32_t slice_group_id[MAX_SLICE_GRPS];
+    int32_t exact_sample_value_match_flag;
+    int32_t pan_scan_rect_flag;
+    int32_t pan_scan_rect_id;
+} h264_SEI_motion_constrained_slice_group_t;
+
+typedef struct _h264_SEI_deblocking_filter_display_pref
+{
+    int32_t devlocking_display_preference_cancel_flag;
+    int32_t display_prior_to_deblocking_preferred_flag;
+    int32_t dec_frame_buffering_constraint_flag;
+    int32_t deblocking_display_preference_repetition_period;
+} h264_SEI_deblocking_filter_display_pref_t;
+
+typedef struct _h264_SEI_stereo_video_info
+{
+    int32_t field_views_flag;
+    int32_t top_field_is_left_view_flag;
+    int32_t curent_frame_is_left_view_flag;
+    int32_t next_frame_is_second_view_flag;
+    int32_t left_view_self_contained_flag;
+    int32_t right_view_self_contained_flag;
+} h264_SEI_stereo_video_info_t;
+
+typedef struct _h264_SEI_reserved
+{
+    int32_t reserved_sei_message_payload_byte;
+} h264_SEI_reserved_t;
+
+
+////////////////////////////
+// SEI Info
+/////////////////////////////
+
+typedef struct sei_info
+{
+    int32_t recovery_point;
+    int32_t recovery_frame_num;
+
+    int32_t capture_POC;
+    int32_t freeze_POC;
+    int32_t release_POC;        // The POC which when reached will allow display update to re-commence
+    int32_t disp_frozen;        // Indicates display is currently frozen
+    int32_t freeze_rep_period;
+    int32_t recovery_frame_cnt;
+    int32_t capture_fn;
+    int32_t recovery_fn;
+    int32_t broken_link;
+    int32_t scan_format;
+    int32_t broken_link_pic;
+} sei_info, *sei_info_ptr;
+
+/*typedef struct _h264_SEI
+{
+	h264_SEI_buffering_period_t buf_period;
+	h264_SEI_pic_timing_t pic_timing;
+	h264_SEI_pan_scan_rectangle_t pan_scan_timing;
+	h264_SEI_filler_payload_t filler_payload;
+	h264_SEI_userdata_registered_t userdata_reg;
+	h264_SEI_userdata_unregistered_t userdata_unreg;
+	h264_SEI_recovery_point_t recovery_point;
+	h264_SEI_decoded_ref_pic_marking_repetition_t dec_ref_pic_marking_rep;
+	h264_SEI_spare_picture_t spare_pic;
+	h264_SEI_scene_info_t scene_info;
+	h264_SEI_sub_sequence_info_t sub_sequence_info;
+	h264_SEI_sub_sequence_layer_t sub_sequence_layer;
+	h264_SEI_sub_sequence_t sub_sequence;
+	h264_SEI_full_frame_snapshot_t full_frame_snapshot;
+	h264_SEI_full_frame_t full_frame;
+	h264_SEI_progressive_segment_start_t progressive_segment_start;
+	h264_SEI_progressive_segment_end_t progressive_segment_end;
+	h264_SEI_motion_constrained_slice_group_t motion_constrained_slice_grp;
+	h264_SEI_deblocking_filter_display_pref_t deblk_filter_display_pref;
+	h264_SEI_stereo_video_info_t stereo_video_info;
+	h264_SEI_reserved_t reserved;
+}h264_SEI_t;
+*/
+
+
+#endif  //_H264_SEI_H_
+
+
diff --git a/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c
new file mode 100755
index 0000000..b564d8b
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/mix_vbp_h264_stubs.c
@@ -0,0 +1,574 @@
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+
+
+extern void* h264_memcpy( void* dest, void* src, uint32_t num );
+
+uint32_t cp_using_dma(uint32_t ddr_addr, uint32_t local_addr, uint32_t size, char to_ddr, char swap)
+{
+    if (swap != 0)
+    {
+        //g_warning("swap copying is not implemented.");
+    }
+
+    if (to_ddr)
+    {
+        memcpy((void*)ddr_addr, (void*)local_addr, size);
+    }
+    else
+    {
+        memcpy((void*)local_addr, (void*)ddr_addr, size);
+    }
+
+    return (0);
+}
+
+#if 0
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+    if (pInfo->Is_first_frame_in_stream) //new stream, fill new frame in cur
+    {
+
+        pInfo->img.g_new_frame = 0;
+        pInfo->Is_first_frame_in_stream =0;
+        pInfo->push_to_cur = 1;
+
+    }
+    else  // move to next for new frame
+    {
+        pInfo->push_to_cur = 0;
+    }
+
+
+
+    //fill dpb managemnt info
+
+
+
+
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+    pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+
+}
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+    ////
+    //// Now we can flush out all frames in DPB fro display
+    if (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].is_used != 3)
+    {
+        h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc);  //, DANGLING_TYPE_GAP_IN_FRAME
+    }
+
+    h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+    h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+}
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+    pInfo->qm_present_list=0;
+}
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+#if 1
+    uint32_t  i, nitems=0;
+
+
+    if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+    {
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+        {
+            nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+        else
+        {
+            nitems = pInfo->dpb.listXsize[0];
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+
+    }
+    else
+    {
+        nitems =0;
+    }
+#endif
+}
+#else
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+    h264_slice_data 				slice_data = {};
+
+    uint32_t		i=0, nitems=0, data=0;
+    uint32_t 	bits_offset =0, byte_offset =0;
+    uint8_t    	is_emul =0;
+
+
+    ////////////////////// Update Reference list //////////////////
+    if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+    {
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+        {
+            nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+        else
+        {
+            nitems = pInfo->dpb.listXsize[0];
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+
+    }
+    else
+    {
+        nitems =0;
+    }
+    /////file ref list 0
+    // h264_parse_emit_ref_list(parent, pInfo, 0);
+
+    /////file ref list 1
+    //h264_parse_emit_ref_list(parent, pInfo, 1);
+
+    ///////////////////////////////////// Slice Data ////////////////////////////////
+    // h264_fill_slice_data(pInfo, &slice_data);
+
+    wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_H264_SLICE_REG);
+
+    wi.data.data_offset = slice_data.h264_bsd_slice_start;
+    wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+    wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+    if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+    {
+        // viddec_pm_append_workitem( parent , &wi);
+    }
+    else
+    {
+        // viddec_pm_append_workitem_next( parent , &wi);
+    }
+
+
+    ///////////////////////////predict weight table item and data if have///////////////////////////
+    if (pInfo->h264_pwt_enabled)
+    {
+        wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+        wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+        wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+        wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            // viddec_pm_append_workitem( parent , &wi);
+
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+            wi.es.es_flags = 0;
+            // viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+        }
+        else
+        {
+            //  viddec_pm_append_workitem_next( parent , &wi);
+
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+            wi.es.es_flags = 0;
+            //  viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+        }
+    }
+
+
+    ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+    //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+    if (pInfo->active_PPS.entropy_coding_mode_flag)
+    {
+        if (0!=bits_offset)  {
+            data = data; // fix compilation warning
+            // don't skip byte-aligned bits as those bits are actually
+            // part of slice_data
+            //viddec_pm_get_bits(parent, &data, 8-bits_offset);
+        }
+    }
+    else
+    {
+        if (0!=bits_offset)  {
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+            wi.data.data_offset = bits_offset;
+            wi.data.data_payload[0]=0;
+            wi.data.data_payload[1]=0;
+
+            if (pInfo->push_to_cur) {			//cur is empty, fill new frame in cur
+                // viddec_pm_append_workitem( parent , &wi);
+            }
+            else {
+                //viddec_pm_append_workitem_next( parent , &wi);
+            }
+        }
+    }
+
+    if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+    {
+        //viddec_pm_append_pixeldata( parent );
+    }
+    else
+    {
+        //viddec_pm_append_pixeldata_next( parent);
+    }
+
+    return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+
+    const uint32_t             *pl;
+    uint32_t                   i=0,nitems=0;
+
+    h264_pic_data pic_data;
+
+    pInfo->qm_present_list=0;
+
+    //h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+    // h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+    // h264_fill_pic_data(pInfo, &pic_data);
+
+    // How many payloads must be generated
+    nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+    pl = (const uint32_t *) &pic_data;
+
+    // Dump slice data to an array of workitems,  to do pl access non valid mem
+    for ( i = 0; i < nitems; i++ )
+    {
+        wi.vwi_type           = (workload_item_type)VIDDEC_WORKLOAD_H264_PIC_REG;
+        wi.data.data_offset   = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+        wi.data.data_payload[0] = pl[0];
+        wi.data.data_payload[1] = pl[1];
+        pl += 2;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+
+            //  viddec_pm_append_workitem( parent, &wi );
+        }
+        else
+        {
+            //viddec_pm_append_workitem_next( parent, &wi );
+        }
+    }
+
+    return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+    uint32_t                   i=0,nitems=0;
+
+    ///////////////////////// Frame attributes//////////////////////////
+
+    //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+    if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+    {
+        //viddec_workload_t			*wl_cur = viddec_pm_get_header( parent );
+        //pInfo->img.g_new_frame = 0;
+        pInfo->Is_first_frame_in_stream =0;
+        pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+        pInfo->push_to_cur = 1;
+        //h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+    }
+    else  // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+    {
+        //viddec_workload_t        *wl_next = viddec_pm_get_next_header (parent);
+
+        pInfo->push_to_cur = 0;
+        //h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+        pInfo->is_current_workload_done=1;
+    }
+
+    ///////////////////// SPS/////////////////////
+    // h264_parse_emit_sps(parent, pInfo);
+
+    /////////////////////display frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i]);
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            // viddec_pm_append_workitem( parent, &wi );
+        }
+        else
+        {
+            // viddec_pm_append_workitem_next( parent, &wi );
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+    /////////////////////release frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i]);
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            //viddec_pm_append_workitem( parent, &wi );
+        }
+        else
+        {
+            // viddec_pm_append_workitem_next( parent, &wi );
+        }
+
+    }
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+    /////////////////////flust frames (do not display)/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i]);
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            //viddec_pm_append_workitem( parent, &wi );
+        }
+        else
+        {
+            // viddec_pm_append_workitem_next( parent, &wi );
+        }
+
+    }
+    pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+    /////////////////////updata DPB frames/////////////////////
+    nitems = pInfo->dpb.used_size;
+    for (i=0; i<nitems; i++)
+    {
+        uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+        if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+        {
+            wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id);
+            wi.ref_frame.reference_id = fs_id;
+            wi.ref_frame.luma_phys_addr = 0;
+            wi.ref_frame.chroma_phys_addr = 0;
+
+            if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+            {
+                // viddec_pm_append_workitem( parent, &wi );
+            }
+            else
+            {
+                //viddec_pm_append_workitem_next( parent, &wi );
+            }
+        }
+    }
+
+
+    /////////////////////updata dpb frames info (poc)/////////////////////
+    nitems = pInfo->dpb.used_size;
+    for (i=0; i<nitems; i++)
+    {
+        uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+        if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+        {
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+            wi.data.data_offset = fs_id;
+            //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+            switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+            {
+            case (FRAME): {
+                wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+                wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+                break;
+            };
+
+            case (TOP_FIELD): {
+                wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+                wi.data.data_payload[1] = 0;
+                break;
+            };
+
+            case (BOTTOM_FIELD): {
+                wi.data.data_payload[0] = 0;
+                wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+                break;
+            };
+
+            default : {
+                wi.data.data_payload[0] = 0;
+                wi.data.data_payload[1] = 0;
+                break;
+            };
+            }
+
+
+            if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+            {
+                //  viddec_pm_append_workitem( parent, &wi );
+            }
+            else
+            {
+                //viddec_pm_append_workitem_next( parent, &wi );
+            }
+
+        }
+    }
+
+    /////////////////////Alloc buffer for current Existing frame/////////////////////
+    if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+    {
+        if (pInfo->push_to_cur)
+        {
+            // viddec_workload_t        *wl_cur = viddec_pm_get_header (parent);
+            //  wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+        }
+        else
+        {
+            // viddec_workload_t        *wl_next = viddec_pm_get_next_header (parent);
+            //wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+    return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+    uint32_t nitems=0, i=0;
+    viddec_workload_item_t	wi;
+
+    ////
+    //// Now we can flush out all frames in DPB fro display
+    if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+    {
+        h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc);  //, DANGLING_TYPE_GAP_IN_FRAME
+    }
+
+    h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+    h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+    /////////////////////display frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i]);
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            //viddec_pm_append_workitem( parent, &wi );
+        }
+        else
+        {
+            //viddec_pm_append_workitem_next( parent, &wi );
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+    /////////////////////release frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = (workload_item_type)(VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i]);
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            //  viddec_pm_append_workitem( parent, &wi );
+            viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+        }
+        else
+        {
+            // viddec_pm_append_workitem_next( parent, &wi );
+            viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+    return;
+}
+#endif
diff --git a/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c
new file mode 100755
index 0000000..103841e
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/secvideo/baytrail/viddec_h264secure_parse.c
@@ -0,0 +1,802 @@
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+#include "h264.h"
+#include "h264parse.h"
+
+#include "h264parse_dpb.h"
+
+/* Init function which can be called to intialized local context on open and flush and preserve*/
+void viddec_h264secure_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+{
+    struct h264_viddec_parser* parser = ctxt;
+    h264_Info * pInfo = &(parser->info);
+
+    if (!preserve)
+    {
+        /* we don't initialize this data if we want to preserve
+           sequence and gop information */
+        h264_init_sps_pps(parser,persist_mem);
+    }
+    /* picture level info which will always be initialized */
+    h264_init_Info_under_sps_pps_level(pInfo);
+#ifdef SW_ERROR_CONCEALEMNT
+   pInfo->sw_bail = 0;
+#endif
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+uint32_t viddec_h264secure_parse(void *parent, void *ctxt)
+{
+    struct h264_viddec_parser* parser = ctxt;
+
+    h264_Info * pInfo = &(parser->info);
+
+    h264_Status status = H264_STATUS_ERROR;
+
+
+    uint8_t nal_ref_idc = 0;
+
+    ///// Parse NAL Unit header
+    pInfo->img.g_new_frame = 0;
+    pInfo->push_to_cur = 1;
+    pInfo->is_current_workload_done =0;
+    pInfo->nal_unit_type = 0;
+
+    h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc);
+
+    ///// Check frame bounday for non-vcl elimitter
+    h264_check_previous_frame_end(pInfo);
+
+    //////// Parse valid NAL unit
+    switch ( pInfo->nal_unit_type )
+    {
+    case h264_NAL_UNIT_TYPE_IDR:
+        if (pInfo->got_start)	{
+            pInfo->img.recovery_point_found |= 1;
+        }
+
+        pInfo->sei_rp_received = 0;
+
+    case h264_NAL_UNIT_TYPE_SLICE:
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 1: Check start point
+        ////////////////////////////////////////////////////////////////////////////
+        //
+        /// Slice parsing must start from the valid start point( SPS, PPS,  IDR or recovery point or primary_I)
+        /// 1) No start point reached, append current ES buffer to workload and release it
+        /// 2) else, start parsing
+        //
+        //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)))
+        //{
+        //pInfo->img.recovery_point_found = 1;
+        //}
+    {
+
+        h264_Slice_Header_t next_SliceHeader;
+
+        /// Reset next slice header
+        h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t));
+        next_SliceHeader.nal_ref_idc = nal_ref_idc;
+
+        if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start))
+        {
+            pInfo->img.recovery_point_found |=4;
+        }
+        pInfo->primary_pic_type_plus_one = 0;
+
+
+
+#ifndef VBP
+        if (pInfo->img.recovery_point_found == 0) {
+            pInfo->img.structure = FRAME;
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+            pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+            break;
+        }
+#endif
+
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 2: Parsing slice header
+        ////////////////////////////////////////////////////////////////////////////
+        /// PWT
+        pInfo->h264_pwt_start_byte_offset=0;
+        pInfo->h264_pwt_start_bit_offset=0;
+        pInfo->h264_pwt_end_byte_offset=0;
+        pInfo->h264_pwt_end_bit_offset=0;
+        pInfo->h264_pwt_enabled =0;
+        /// IDR flag
+        next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR);
+
+
+        /// Pass slice header
+        status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader);
+
+        pInfo->sei_information.recovery_point = 0;
+
+        if (next_SliceHeader.sh_error & 3) {
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+            // Error type definition, refer to viddec_fw_common_defs.h
+            //		if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD			= (1 << 17)
+            //		if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD	   = (1 << 18)
+            //		if this is frame based, both 2 bits should be set
+            pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+
+            break;
+        }
+        pInfo->img.current_slice_num++;
+
+
+#ifdef DUMP_HEADER_INFO
+        dump_slice_header(pInfo, &next_SliceHeader);
+////h264_print_decoder_values(pInfo);
+#endif
+
+
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 3: Processing if new picture coming
+        //  1) if it's the second field
+        //	2) if it's a new frame
+        ////////////////////////////////////////////////////////////////////////////
+        //AssignQuantParam(pInfo);
+        if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader))
+        {
+            //
+            ///----------------- New Picture.boundary detected--------------------
+            //
+            pInfo->img.g_new_pic++;
+
+            //
+            // Complete previous picture
+            h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old
+            //h264_hdr_post_poc(0, 0, use_old);
+
+            //
+            // Update slice structures:
+            h264_update_old_slice(pInfo, next_SliceHeader);  	//cur->old; next->cur;
+
+            //
+            // 1) if resolution change: reset dpb
+            // 2) else: init frame store
+            h264_update_img_info(pInfo); //img, dpb
+
+            //
+            ///----------------- New frame.boundary detected--------------------
+            //
+            pInfo->img.second_field = h264_is_second_field(pInfo);
+            if (pInfo->img.second_field == 0)
+            {
+                pInfo->img.g_new_frame = 1;
+                h264_dpb_update_queue_dangling_field(pInfo);
+
+                //
+                /// DPB management
+                ///	1) check the gaps
+                ///	2) assign fs for non-exist frames
+                ///	3) fill the gaps
+                ///	4) store frame into DPB if ...
+                //
+                //if(pInfo->SliceHeader.redundant_pic_cnt)
+                {
+                    h264_dpb_gaps_in_frame_num_mem_management(pInfo);
+                }
+
+#ifdef DUMP_HEADER_INFO
+                dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num);
+#endif
+            }
+            //
+            /// Decoding POC
+            h264_hdr_decoding_poc (pInfo, 0, 0);
+
+            //
+            /// Init Frame Store for next frame
+            h264_dpb_init_frame_store (pInfo);
+            pInfo->img.current_slice_num = 1;
+
+            if (pInfo->SliceHeader.first_mb_in_slice != 0)
+            {
+                ////Come here means we have slice lost at the beginning, since no FMO support
+                pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17);
+            }
+
+            //
+            /// Emit out the New Frame
+            if (pInfo->img.g_new_frame)
+            {
+                h264_parse_emit_start_new_frame(parent, pInfo);
+            }
+
+            h264_parse_emit_current_pic(parent, pInfo);
+        }
+        else ///////////////////////////////////////////////////// If Not a picture start
+        {
+            //
+            /// Update slice structures: cur->old; next->cur;
+            h264_update_old_slice(pInfo, next_SliceHeader);
+
+            //
+            /// 1) if resolution change: reset dpb
+            /// 2) else: update img info
+            h264_update_img_info(pInfo);
+        }
+
+
+        //////////////////////////////////////////////////////////////
+        // Step 4: DPB reference list init and reordering
+        //////////////////////////////////////////////////////////////
+
+        //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field
+        h264_update_frame_type(pInfo);
+
+
+        h264_dpb_update_ref_lists( pInfo);
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+#ifdef DUMP_HEADER_INFO
+        dump_ref_list(pInfo);
+#endif
+        /// Emit out the current "good" slice
+        h264_parse_emit_current_slice(parent, pInfo);
+
+    }
+    break;
+
+    ///// * Main profile doesn't support Data Partition, skipped.... *////
+    case h264_NAL_UNIT_TYPE_DPA:
+    case h264_NAL_UNIT_TYPE_DPB:
+    case h264_NAL_UNIT_TYPE_DPC:
+        //OS_INFO("***********************DP feature, not supported currently*******************\n");
+        pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+        status = H264_STATUS_NOTSUPPORT;
+        break;
+
+        //// * Parsing SEI info *////
+    case h264_NAL_UNIT_TYPE_SEI:
+        status = H264_STATUS_OK;
+
+        //OS_INFO("*****************************SEI**************************************\n");
+        if (pInfo->sps_valid) {
+            //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW
+            pInfo->number_of_first_au_info_nal_before_first_slice++;
+            /// parsing the SEI info
+            status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo);
+        }
+
+        //h264_rbsp_trailing_bits(pInfo);
+        break;
+    case h264_NAL_UNIT_TYPE_SPS:
+    {
+        //OS_INFO("*****************************SPS**************************************\n");
+        ///
+        /// Can not define local SPS since the Current local stack size limitation!
+        /// Could be changed after the limitation gone
+        ///
+        uint8_t  old_sps_id=0;
+        vui_seq_parameters_t_not_used vui_seq_not_used;
+
+        old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+        h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+
+
+        status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL);
+        if (status == H264_STATUS_OK) {
+            h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id);
+            pInfo->sps_valid = 1;
+
+            if (1==pInfo->active_SPS.pic_order_cnt_type) {
+                h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id);
+            }
+
+#ifdef DUMP_HEADER_INFO
+            dump_sps(&(pInfo->active_SPS));
+#endif
+
+        }
+        ///// Restore the active SPS if new arrival's id changed
+        if (old_sps_id>=MAX_NUM_SPS) {
+            h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+            pInfo->active_SPS.seq_parameter_set_id = 0xff;
+        }
+        else {
+            if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id)  {
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            }
+            else  {
+                //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set));
+                pInfo->active_SPS.seq_parameter_set_id = 0xff;
+            }
+        }
+
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+    }
+    break;
+    case h264_NAL_UNIT_TYPE_PPS:
+    {
+        //OS_INFO("*****************************PPS**************************************\n");
+
+        uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+        uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id;
+
+        h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set));
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+
+        if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK)
+        {
+            h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id);
+            if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id)
+            {
+                pInfo->Is_SPS_updated = 1;
+            }
+            if (pInfo->active_SPS.seq_parameter_set_id != 0xff) {
+                h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id);
+                pInfo->got_start = 1;
+                if (pInfo->sei_information.recovery_point)
+                {
+                    pInfo->img.recovery_point_found |= 2;
+
+                    //// Enable the RP recovery if no IDR ---Cisco
+                    if ((pInfo->img.recovery_point_found & 1)==0)
+                        pInfo->sei_rp_received = 1;
+                }
+            }
+            else
+            {
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            }
+#ifdef DUMP_HEADER_INFO
+            dump_pps(&(pInfo->active_PPS));
+#endif
+        } else {
+            if (old_sps_id<MAX_NUM_SPS)
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            if (old_pps_id<MAX_NUM_PPS)
+                h264_Parse_Copy_Pps_From_DDR(pInfo, &(pInfo->active_PPS), old_pps_id);
+        }
+
+    } //// End of PPS parsing
+    break;
+
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+    case h264_NAL_UNIT_TYPE_EOstream:
+
+        h264_parse_emit_eos(parent, pInfo);
+        h264_init_dpb(&(pInfo->dpb));
+
+        pInfo->is_current_workload_done=1;
+
+        /* picture level info which will always be initialized */
+        //h264_init_Info_under_sps_pps_level(pInfo);
+
+        ////reset the pInfo here
+        //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false);
+
+
+        status = H264_STATUS_OK;
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+#if 1
+        ///// primary_pic_type
+        {
+            uint32_t code = 0xff;
+            int32_t ret = 0;
+            ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3);
+
+            if (ret != -1) {
+                //if(pInfo->got_start && (code == 0))
+                //{
+                //pInfo->img.recovery_point_found |= 4;
+                //}
+                pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1;
+                status = H264_STATUS_OK;
+            }
+            pInfo->number_of_first_au_info_nal_before_first_slice++;
+            break;
+        }
+#endif
+
+    case h264_NAL_UNIT_TYPE_Reserved1:
+    case h264_NAL_UNIT_TYPE_Reserved2:
+    case h264_NAL_UNIT_TYPE_Reserved3:
+    case h264_NAL_UNIT_TYPE_Reserved4:
+    case h264_NAL_UNIT_TYPE_Reserved5:
+        status = H264_STATUS_OK;
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+        break;
+
+    case h264_NAL_UNIT_TYPE_filler_data:
+        status = H264_STATUS_OK;
+        break;
+    case h264_NAL_UNIT_TYPE_ACP:
+        break;
+    case h264_NAL_UNIT_TYPE_SPS_extension:
+    case h264_NAL_UNIT_TYPE_unspecified:
+    case h264_NAL_UNIT_TYPE_unspecified2:
+        status = H264_STATUS_OK;
+        //nothing
+        break;
+    default:
+        status = H264_STATUS_OK;
+        break;
+    }
+
+    //pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+    switch ( pInfo->nal_unit_type )
+    {
+    case h264_NAL_UNIT_TYPE_IDR:
+    case h264_NAL_UNIT_TYPE_SLICE:
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+    case h264_NAL_UNIT_TYPE_SPS:
+    case h264_NAL_UNIT_TYPE_PPS:
+    case h264_NAL_UNIT_TYPE_SEI:
+    case h264_NAL_UNIT_TYPE_EOSeq:
+    case h264_NAL_UNIT_TYPE_EOstream:
+    case h264_NAL_UNIT_TYPE_Reserved1:
+    case h264_NAL_UNIT_TYPE_Reserved2:
+    case h264_NAL_UNIT_TYPE_Reserved3:
+    case h264_NAL_UNIT_TYPE_Reserved4:
+    case h264_NAL_UNIT_TYPE_Reserved5:
+    {
+        pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+        break;
+    }
+    default:
+        break;
+    }
+
+    return status;
+}
+
+void viddec_h264secure_get_context_size(viddec_parser_memory_sizes_t *size)
+{
+    /* Should return size of my structure */
+    size->context_size = sizeof(struct h264_viddec_parser);
+    size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all)
+                         + MAX_NUM_PPS * sizeof(pic_param_set)
+                         + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE
+                         + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+void viddec_h264secure_flush(void *parent, void *ctxt)
+{
+    int i;
+    struct h264_viddec_parser* parser = ctxt;
+    h264_Info * pInfo = &(parser->info);
+
+    /* just flush dpb and disable output */
+    h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames);
+
+    /* reset the dpb to the initial state, avoid parser store
+       wrong data to dpb in next slice parsing */
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    for (i = 0; i < NUM_DPB_FRAME_STORES; i++)
+    {
+        p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+        p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+    }
+    p_dpb->used_size = 0;
+    p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+    p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+    return;
+}
+
+h264_Status h264secure_Parse_Dec_Ref_Pic_Marking(h264_Info* pInfo, void *newdata, h264_Slice_Header_t*SliceHeader)
+{
+    vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata;
+
+    uint8_t i = 0;
+    uint32_t code;
+    if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        SliceHeader->sh_dec_refpic.no_output_of_prior_pics_flag = (uint8_t)sliceheader_p->ref_pic_marking.no_output_of_prior_pics_flag;
+        SliceHeader->sh_dec_refpic.long_term_reference_flag = (uint8_t)sliceheader_p->ref_pic_marking.long_term_reference_flag;
+        pInfo->img.long_term_reference_flag = SliceHeader->sh_dec_refpic.long_term_reference_flag;
+    }
+    else
+    {
+        SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag = sliceheader_p->ref_pic_marking.adaptive_ref_pic_marking_mode_flag;
+
+        ///////////////////////////////////////////////////////////////////////////////////////
+        //adaptive_ref_pic_marking_mode_flag Reference picture marking mode specified
+        //                              Sliding window reference picture marking mode: A marking mode
+        //                              providing a first-in first-out mechanism for short-term reference pictures.
+        //                              Adaptive reference picture marking mode: A reference picture
+        //                              marking mode providing syntax elements to specify marking of
+        //                              reference pictures as unused for reference?and to assign long-term
+        //                              frame indices.
+        ///////////////////////////////////////////////////////////////////////////////////////
+
+        if (SliceHeader->sh_dec_refpic.adaptive_ref_pic_marking_mode_flag)
+        {
+            do
+            {
+                if (i < MAX_OP)
+                {
+                    code = sliceheader_p->ref_pic_marking.op[i].memory_management_control_operation;
+                    SliceHeader->sh_dec_refpic.memory_management_control_operation[i] = code;
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 1)
+                    {
+                        SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op1.difference_of_pic_nums_minus1;
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 2)
+                    {
+                        SliceHeader->sh_dec_refpic.long_term_pic_num[i] = sliceheader_p->ref_pic_marking.op[i].op2.long_term_pic_num;
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 6)
+                    {
+                        SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op6.long_term_frame_idx;
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 3) {
+                        SliceHeader->sh_dec_refpic.difference_of_pic_num_minus1[i] = sliceheader_p->ref_pic_marking.op[i].op3.difference_of_pic_nums_minus1;
+                        SliceHeader->sh_dec_refpic.long_term_frame_idx[i] = sliceheader_p->ref_pic_marking.op[i].op3.long_term_frame_idx;
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 4)
+                    {
+                        SliceHeader->sh_dec_refpic.max_long_term_frame_idx_plus1[i] = sliceheader_p->ref_pic_marking.op[i].op4.max_long_term_frame_idx_plus1;
+                    }
+
+                    if (SliceHeader->sh_dec_refpic.memory_management_control_operation[i] == 5)
+                    {
+                        pInfo->img.curr_has_mmco_5 = 1;
+                    }
+                }
+
+                if (i >= MAX_OP) {
+                    return H264_STATUS_ERROR;
+                }
+            } while (SliceHeader->sh_dec_refpic.memory_management_control_operation[i++] != 0);
+        }
+    }
+
+    SliceHeader->sh_dec_refpic.dec_ref_pic_marking_count = i;
+
+    return H264_STATUS_OK;
+}
+
+uint32_t h264secure_Update_Slice_Header(h264_Info* pInfo, void *newdata, h264_Slice_Header_t *SliceHeader)
+{
+    h264_Status retStatus = H264_STATUS_OK;
+    uint8_t data;
+    vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) newdata;
+    ///// first_mb_in_slice
+    SliceHeader->first_mb_in_slice = sliceheader_p->slice_header.first_mb_in_slice;
+
+    SliceHeader->pic_parameter_id  = (uint8_t)sliceheader_p->slice_header.pps_id;
+    retStatus = h264_active_par_set(pInfo, SliceHeader);
+
+    switch (pInfo->active_SPS.profile_idc)
+    {
+        case h264_ProfileBaseline:
+        case h264_ProfileMain:
+        case h264_ProfileExtended:
+            pInfo->active_PPS.transform_8x8_mode_flag=0;
+            pInfo->active_PPS.pic_scaling_matrix_present_flag =0;
+            pInfo->active_PPS.second_chroma_qp_index_offset = pInfo->active_PPS.chroma_qp_index_offset;
+        default:
+            break;
+    }
+
+    uint32_t code;
+    int32_t max_mb_num=0;
+
+    SliceHeader->frame_num = (int32_t)sliceheader_p->slice_header.frame_num;
+
+    /// Picture structure
+    SliceHeader->structure = FRAME;
+    SliceHeader->field_pic_flag = 0;
+    SliceHeader->bottom_field_flag = 0;
+
+    if (!(pInfo->active_SPS.sps_disp.frame_mbs_only_flag))
+    {
+        /// field_pic_flag
+        SliceHeader->field_pic_flag = (uint8_t)sliceheader_p->slice_header.field_pic_flag;
+
+        if (SliceHeader->field_pic_flag)
+        {
+            SliceHeader->bottom_field_flag = (uint8_t)sliceheader_p->slice_header.bottom_field_flag;
+            SliceHeader->structure = SliceHeader->bottom_field_flag? BOTTOM_FIELD: TOP_FIELD;
+        }
+    }
+
+    ////// Check valid or not of first_mb_in_slice
+    if (SliceHeader->structure == FRAME) {
+        max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs;
+    } else {
+        max_mb_num = pInfo->img.FrameHeightInMbs * pInfo->img.PicWidthInMbs/2;
+    }
+
+
+    if (pInfo->active_SPS.sps_disp.mb_adaptive_frame_field_flag & (!(pInfo->SliceHeader.field_pic_flag))) {
+        SliceHeader->first_mb_in_slice <<=1;
+    }
+
+    if (SliceHeader->first_mb_in_slice >= max_mb_num) {
+        retStatus = H264_STATUS_NOTSUPPORT;
+        return retStatus;
+    }
+
+
+    if (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)
+    {
+        SliceHeader->idr_pic_id = sliceheader_p->slice_header.idr_pic_id;
+    }
+
+    if (pInfo->active_SPS.pic_order_cnt_type == 0)
+    {
+        SliceHeader->pic_order_cnt_lsb = (uint32_t)sliceheader_p->slice_header.pic_order_cnt_lsb;
+
+
+        if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+        {
+            SliceHeader->delta_pic_order_cnt_bottom = sliceheader_p->slice_header.delta_pic_order_cnt_bottom;
+        }
+        else
+        {
+            SliceHeader->delta_pic_order_cnt_bottom = 0;
+        }
+    }
+
+    if ((pInfo->active_SPS.pic_order_cnt_type == 1) && !(pInfo->active_SPS.delta_pic_order_always_zero_flag))
+    {
+        SliceHeader->delta_pic_order_cnt[0] = sliceheader_p->slice_header.delta_pic_order_cnt[0];
+        if ((pInfo->active_PPS.pic_order_present_flag) && !(SliceHeader->field_pic_flag))
+        {
+            SliceHeader->delta_pic_order_cnt[1] = sliceheader_p->slice_header.delta_pic_order_cnt[1];
+        }
+    }
+/*
+    if (pInfo->active_PPS.redundant_pic_cnt_present_flag)
+    {
+        SliceHeader->redundant_pic_cnt = sliceheader_p->slice_header.redundant_pic_cnt;
+        if (SliceHeader->redundant_pic_cnt > 127) {
+            retStatus = H264_STATUS_NOTSUPPORT;
+            return retStatus;
+        }
+    } else {
+        SliceHeader->redundant_pic_cnt = 0;
+    }
+*/
+    ////
+    //// Parse Ref_pic marking if there
+    ////
+    if (SliceHeader->nal_ref_idc != 0)
+    {
+        if (h264secure_Parse_Dec_Ref_Pic_Marking(pInfo, newdata, SliceHeader) != H264_STATUS_OK)
+        {
+            retStatus = H264_STATUS_NOTSUPPORT;
+            return retStatus;
+        }
+    }
+    retStatus = H264_STATUS_OK;
+    return retStatus;
+}
+uint32_t viddec_h264secure_update(void *parent, void *data, uint32_t size)
+{
+    viddec_pm_cxt_t * parser_cxt = (viddec_pm_cxt_t *)parent;
+    struct h264_viddec_parser* parser = (struct h264_viddec_parser*) &parser_cxt->codec_data[0];
+    h264_Info * pInfo = &(parser->info);
+
+    h264_Status status = H264_STATUS_ERROR;
+    vbp_h264_sliceheader* sliceheader_p = (vbp_h264_sliceheader*) data;
+
+    pInfo->img.g_new_frame = 0;
+    pInfo->push_to_cur = 1;
+    pInfo->is_current_workload_done =0;
+    pInfo->nal_unit_type = 0;
+    pInfo->nal_unit_type = sliceheader_p->slice_header.nal_unit_type & 0x1F;
+
+    h264_Slice_Header_t next_SliceHeader;
+
+    /// Reset next slice header
+    h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t));
+    next_SliceHeader.nal_ref_idc = (sliceheader_p->slice_header.nal_unit_type & 0x60) >> 5;
+
+    if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start))
+    {
+        pInfo->img.recovery_point_found |=4;
+    }
+    pInfo->primary_pic_type_plus_one = 0;
+
+    ////////////////////////////////////////////////////////////////////////////
+    // Step 2: Parsing slice header
+    ////////////////////////////////////////////////////////////////////////////
+    /// PWT
+    pInfo->h264_pwt_start_byte_offset=0;
+    pInfo->h264_pwt_start_bit_offset=0;
+    pInfo->h264_pwt_end_byte_offset=0;
+    pInfo->h264_pwt_end_bit_offset=0;
+    pInfo->h264_pwt_enabled =0;
+    /// IDR flag
+    next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR);
+
+    /// Pass slice header
+    status = h264secure_Update_Slice_Header(pInfo, sliceheader_p, &next_SliceHeader);
+
+    pInfo->sei_information.recovery_point = 0;
+    pInfo->img.current_slice_num++;
+
+
+    ////////////////////////////////////////////////////////////////////////////
+    // Step 3: Processing if new picture coming
+    //  1) if it's the second field
+    //  2) if it's a new frame
+    ////////////////////////////////////////////////////////////////////////////
+    //AssignQuantParam(pInfo);
+    if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader))
+    {
+        //
+        ///----------------- New Picture.boundary detected--------------------
+        //
+        pInfo->img.g_new_pic++;
+
+        //
+        // Complete previous picture
+        h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old
+
+        //
+        // Update slice structures:
+        h264_update_old_slice(pInfo, next_SliceHeader);  //cur->old; next->cur;
+
+        //
+        // 1) if resolution change: reset dpb
+        // 2) else: init frame store
+        h264_update_img_info(pInfo);  //img, dpb
+
+        //
+        ///----------------- New frame.boundary detected--------------------
+        //
+        pInfo->img.second_field = h264_is_second_field(pInfo);
+        if (pInfo->img.second_field == 0)
+        {
+            pInfo->img.g_new_frame = 1;
+            h264_dpb_update_queue_dangling_field(pInfo);
+            h264_dpb_gaps_in_frame_num_mem_management(pInfo);
+        }
+        /// Decoding POC
+        h264_hdr_decoding_poc (pInfo, 0, 0);
+        //
+        /// Init Frame Store for next frame
+        h264_dpb_init_frame_store (pInfo);
+        pInfo->img.current_slice_num = 1;
+
+        if (pInfo->SliceHeader.first_mb_in_slice != 0)
+        {
+            ////Come here means we have slice lost at the beginning, since no FMO support
+            pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17);
+        }
+    }
+    else ///////////////////////////////////////////////////// If Not a picture start
+    {
+        /// Update slice structures: cur->old; next->cur;
+        h264_update_old_slice(pInfo, next_SliceHeader);
+        /// 1) if resolution change: reset dpb
+        /// 2) else: update img info
+        h264_update_img_info(pInfo);
+    }
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_parse.c b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c
new file mode 100755
index 0000000..c55db6b
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/viddec_h264_parse.c
@@ -0,0 +1,607 @@
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+#include "h264.h"
+#include "h264parse.h"
+
+#include "h264parse_dpb.h"
+
+/* Init function which can be called to intialized local context on open and flush and preserve*/
+#ifdef VBP
+void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_h264_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+    struct h264_viddec_parser* parser = ctxt;
+    h264_Info * pInfo = &(parser->info);
+
+    if (!preserve)
+    {
+        /* we don't initialize this data if we want to preserve
+           sequence and gop information */
+        h264_init_sps_pps(parser,persist_mem);
+    }
+    /* picture level info which will always be initialized */
+    h264_init_Info_under_sps_pps_level(pInfo);
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+   pInfo->sw_bail = 0;
+#endif
+#endif
+    return;
+}
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+#ifdef VBP
+uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_h264_parse(void *parent, void *ctxt)
+#endif
+{
+    struct h264_viddec_parser* parser = ctxt;
+
+    h264_Info * pInfo = &(parser->info);
+
+    h264_Status status = H264_STATUS_ERROR;
+
+
+    uint8_t nal_ref_idc = 0;
+
+    ///// Parse NAL Unit header
+    pInfo->img.g_new_frame = 0;
+    pInfo->push_to_cur = 1;
+    pInfo->is_current_workload_done =0;
+    pInfo->nal_unit_type = 0;
+
+    h264_Parse_NAL_Unit(parent, pInfo, &nal_ref_idc);
+
+    ///// Check frame bounday for non-vcl elimitter
+    h264_check_previous_frame_end(pInfo);
+
+    //OS_INFO("========================nal_type: %d=================\n", pInfo->nal_unit_type);
+    //DEBUG_WRITE(pInfo->nal_unit_type, pInfo->got_start, pInfo->wl_err_flag, pInfo->is_current_workload_done, 0, 0);
+#if 0
+    devh_SVEN_WriteModuleEvent( NULL,
+                                SVEN_MODULE_EVENT_GV_FW_PARSER_DEBUG_P0,
+                                pInfo->got_start,pInfo->nal_unit_type,  pInfo->wl_err_curr, pInfo->is_current_workload_done, 0, pInfo->img.frame_num);
+#endif
+
+    //////// Parse valid NAL unit
+    switch ( pInfo->nal_unit_type )
+    {
+    case h264_NAL_UNIT_TYPE_IDR:
+        if (pInfo->got_start)	{
+            pInfo->img.recovery_point_found |= 1;
+        }
+
+        pInfo->sei_rp_received = 0;
+
+    case h264_NAL_UNIT_TYPE_SLICE:
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 1: Check start point
+        ////////////////////////////////////////////////////////////////////////////
+        //
+        /// Slice parsing must start from the valid start point( SPS, PPS,  IDR or recovery point or primary_I)
+        /// 1) No start point reached, append current ES buffer to workload and release it
+        /// 2) else, start parsing
+        //
+        //if(pInfo->got_start && ((pInfo->sei_information.recovery_point) || (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR)))
+        //{
+        //pInfo->img.recovery_point_found = 1;
+        //}
+    {
+
+        h264_Slice_Header_t next_SliceHeader;
+
+        /// Reset next slice header
+        h264_memset(&next_SliceHeader, 0x0, sizeof(h264_Slice_Header_t));
+        next_SliceHeader.nal_ref_idc = nal_ref_idc;
+
+        if ( (1==pInfo->primary_pic_type_plus_one)&&(pInfo->got_start))
+        {
+            pInfo->img.recovery_point_found |=4;
+        }
+        pInfo->primary_pic_type_plus_one = 0;
+
+
+
+#ifndef VBP
+        if (pInfo->img.recovery_point_found == 0) {
+            pInfo->img.structure = FRAME;
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+            pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+            break;
+        }
+#endif
+
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 2: Parsing slice header
+        ////////////////////////////////////////////////////////////////////////////
+        /// PWT
+        pInfo->h264_pwt_start_byte_offset=0;
+        pInfo->h264_pwt_start_bit_offset=0;
+        pInfo->h264_pwt_end_byte_offset=0;
+        pInfo->h264_pwt_end_bit_offset=0;
+        pInfo->h264_pwt_enabled =0;
+        /// IDR flag
+        next_SliceHeader.idr_flag = (pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR);
+
+
+        /// Pass slice header
+        status = h264_Parse_Slice_Layer_Without_Partitioning_RBSP(parent, pInfo, &next_SliceHeader);
+
+        pInfo->sei_information.recovery_point = 0;
+
+        if (next_SliceHeader.sh_error & 3) {
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+
+            // Error type definition, refer to viddec_fw_common_defs.h
+            //		if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD			= (1 << 17)
+            //		if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD	   = (1 << 18)
+            //		if this is frame based, both 2 bits should be set
+            pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+
+            break;
+        }
+        pInfo->img.current_slice_num++;
+
+
+#ifdef DUMP_HEADER_INFO
+        dump_slice_header(pInfo, &next_SliceHeader);
+////h264_print_decoder_values(pInfo);
+#endif
+
+
+        ////////////////////////////////////////////////////////////////////////////
+        // Step 3: Processing if new picture coming
+        //  1) if it's the second field
+        //	2) if it's a new frame
+        ////////////////////////////////////////////////////////////////////////////
+        //AssignQuantParam(pInfo);
+        if (h264_is_new_picture_start(pInfo, next_SliceHeader, pInfo->SliceHeader))
+        {
+            //
+            ///----------------- New Picture.boundary detected--------------------
+            //
+            pInfo->img.g_new_pic++;
+
+            //
+            // Complete previous picture
+            h264_dpb_store_previous_picture_in_dpb(pInfo, 0, 0); //curr old
+            //h264_hdr_post_poc(0, 0, use_old);
+
+            //
+            // Update slice structures:
+            h264_update_old_slice(pInfo, next_SliceHeader);  	//cur->old; next->cur;
+
+            //
+            // 1) if resolution change: reset dpb
+            // 2) else: init frame store
+            h264_update_img_info(pInfo);								//img, dpb
+
+            //
+            ///----------------- New frame.boundary detected--------------------
+            //
+            pInfo->img.second_field = h264_is_second_field(pInfo);
+            if (pInfo->img.second_field == 0)
+            {
+                pInfo->img.g_new_frame = 1;
+                h264_dpb_update_queue_dangling_field(pInfo);
+
+                //
+                /// DPB management
+                ///	1) check the gaps
+                ///	2) assign fs for non-exist frames
+                ///	3) fill the gaps
+                ///	4) store frame into DPB if ...
+                //
+                //if(pInfo->SliceHeader.redundant_pic_cnt)
+                {
+                    h264_dpb_gaps_in_frame_num_mem_management(pInfo);
+                }
+
+#ifdef DUMP_HEADER_INFO
+                dump_new_picture_attr(pInfo, pInfo->SliceHeader.frame_num);
+#endif
+            }
+            //
+            /// Decoding POC
+            h264_hdr_decoding_poc (pInfo, 0, 0);
+
+            //
+            /// Init Frame Store for next frame
+            h264_dpb_init_frame_store (pInfo);
+            pInfo->img.current_slice_num = 1;
+
+            if (pInfo->SliceHeader.first_mb_in_slice != 0)
+            {
+                ////Come here means we have slice lost at the beginning, since no FMO support
+                pInfo->SliceHeader.sh_error |= (pInfo->SliceHeader.structure << 17);
+            }
+
+            //
+            /// Emit out the New Frame
+            if (pInfo->img.g_new_frame)
+            {
+                h264_parse_emit_start_new_frame(parent, pInfo);
+            }
+
+            h264_parse_emit_current_pic(parent, pInfo);
+        }
+        else ///////////////////////////////////////////////////// If Not a picture start
+        {
+            //
+            /// Update slice structures: cur->old; next->cur;
+            h264_update_old_slice(pInfo, next_SliceHeader);
+
+            //
+            /// 1) if resolution change: reset dpb
+            /// 2) else: update img info
+            h264_update_img_info(pInfo);
+        }
+
+
+        //////////////////////////////////////////////////////////////
+        // Step 4: DPB reference list init and reordering
+        //////////////////////////////////////////////////////////////
+
+        //////////////////////////////////////////////// Update frame Type--- IDR/I/P/B for frame or field
+        h264_update_frame_type(pInfo);
+
+
+        h264_dpb_update_ref_lists( pInfo);
+
+#ifdef VBP
+#ifdef SW_ERROR_CONCEALEMNT
+        if ((pInfo->dpb.ltref_frames_in_buffer + pInfo->dpb.ref_frames_in_buffer ) > pInfo->active_SPS.num_ref_frames)
+        {
+            pInfo->sw_bail = 1;
+        }
+#endif
+#endif
+#ifdef DUMP_HEADER_INFO
+        dump_ref_list(pInfo);
+#endif
+        /// Emit out the current "good" slice
+        h264_parse_emit_current_slice(parent, pInfo);
+
+    }
+    break;
+
+    ///// * Main profile doesn't support Data Partition, skipped.... *////
+    case h264_NAL_UNIT_TYPE_DPA:
+    case h264_NAL_UNIT_TYPE_DPB:
+    case h264_NAL_UNIT_TYPE_DPC:
+        //OS_INFO("***********************DP feature, not supported currently*******************\n");
+        pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+        status = H264_STATUS_NOTSUPPORT;
+        break;
+
+        //// * Parsing SEI info *////
+    case h264_NAL_UNIT_TYPE_SEI:
+        status = H264_STATUS_OK;
+
+        //OS_INFO("*****************************SEI**************************************\n");
+        if (pInfo->sps_valid) {
+            //h264_user_data_t user_data; /// Replace with tmp buffer while porting to FW
+            pInfo->number_of_first_au_info_nal_before_first_slice++;
+            /// parsing the SEI info
+            status = h264_Parse_Supplemental_Enhancement_Information_Message(parent, pInfo);
+        }
+
+        //h264_rbsp_trailing_bits(pInfo);
+        break;
+    case h264_NAL_UNIT_TYPE_SPS:
+    {
+        //OS_INFO("*****************************SPS**************************************\n");
+        ///
+        /// Can not define local SPS since the Current local stack size limitation!
+        /// Could be changed after the limitation gone
+        ///
+        uint8_t  old_sps_id=0;
+        vui_seq_parameters_t_not_used vui_seq_not_used;
+
+        old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+        h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+
+
+        status = h264_Parse_SeqParameterSet(parent, pInfo, &(pInfo->active_SPS), &vui_seq_not_used, (int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL);
+        if (status == H264_STATUS_OK) {
+            h264_Parse_Copy_Sps_To_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_SPS.seq_parameter_set_id);
+            pInfo->sps_valid = 1;
+
+            if (1==pInfo->active_SPS.pic_order_cnt_type) {
+                h264_Parse_Copy_Offset_Ref_Frames_To_DDR(pInfo,(int32_t *)pInfo->TMP_OFFSET_REFFRM_PADDR_GL,pInfo->active_SPS.seq_parameter_set_id);
+            }
+
+#ifdef DUMP_HEADER_INFO
+            dump_sps(&(pInfo->active_SPS));
+#endif
+
+        }
+        ///// Restore the active SPS if new arrival's id changed
+        if (old_sps_id>=MAX_NUM_SPS) {
+            h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set_used));
+            pInfo->active_SPS.seq_parameter_set_id = 0xff;
+        }
+        else {
+            if (old_sps_id!=pInfo->active_SPS.seq_parameter_set_id)  {
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            }
+            else  {
+                //h264_memset(&(pInfo->active_SPS), 0x0, sizeof(seq_param_set));
+                pInfo->active_SPS.seq_parameter_set_id = 0xff;
+            }
+        }
+
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+    }
+    break;
+    case h264_NAL_UNIT_TYPE_PPS:
+    {
+        //OS_INFO("*****************************PPS**************************************\n");
+
+        uint32_t old_sps_id = pInfo->active_SPS.seq_parameter_set_id;
+        uint32_t old_pps_id = pInfo->active_PPS.pic_parameter_set_id;
+
+        h264_memset(&pInfo->active_PPS, 0x0, sizeof(pic_param_set));
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+
+        if (h264_Parse_PicParameterSet(parent, pInfo, &pInfo->active_PPS)== H264_STATUS_OK)
+        {
+            h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), pInfo->active_PPS.seq_parameter_set_id);
+            if (old_sps_id != pInfo->active_SPS.seq_parameter_set_id)
+            {
+                pInfo->Is_SPS_updated = 1;
+            }
+            if (pInfo->active_SPS.seq_parameter_set_id != 0xff) {
+                h264_Parse_Copy_Pps_To_DDR(pInfo, &pInfo->active_PPS, pInfo->active_PPS.pic_parameter_set_id);
+                pInfo->got_start = 1;
+                if (pInfo->sei_information.recovery_point)
+                {
+                    pInfo->img.recovery_point_found |= 2;
+
+                    //// Enable the RP recovery if no IDR ---Cisco
+                    if ((pInfo->img.recovery_point_found & 1)==0)
+                        pInfo->sei_rp_received = 1;
+                }
+            }
+            else
+            {
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            }
+#ifdef DUMP_HEADER_INFO
+            dump_pps(&(pInfo->active_PPS));
+#endif
+        } else {
+            if (old_sps_id<MAX_NUM_SPS)
+                h264_Parse_Copy_Sps_From_DDR(pInfo, &(pInfo->active_SPS), old_sps_id);
+            if (old_pps_id<MAX_NUM_PPS)
+                h264_Parse_Copy_Pps_From_DDR(pInfo, &(pInfo->active_PPS), old_pps_id);
+        }
+
+    } //// End of PPS parsing
+    break;
+
+
+    case h264_NAL_UNIT_TYPE_EOSeq:
+    case h264_NAL_UNIT_TYPE_EOstream:
+
+        h264_parse_emit_eos(parent, pInfo);
+        h264_init_dpb(&(pInfo->dpb));
+
+        pInfo->is_current_workload_done=1;
+
+        /* picture level info which will always be initialized */
+        //h264_init_Info_under_sps_pps_level(pInfo);
+
+        ////reset the pInfo here
+        //viddec_h264_init(ctxt, (uint32_t *)parser->sps_pps_ddr_paddr, false);
+
+
+        status = H264_STATUS_OK;
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+        break;
+
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+#if 1
+        ///// primary_pic_type
+        {
+            uint32_t code = 0xff;
+            int32_t ret = 0;
+            ret = viddec_pm_get_bits(parent, (uint32_t *)&(code), 3);
+
+            if (ret != -1) {
+                //if(pInfo->got_start && (code == 0))
+                //{
+                //pInfo->img.recovery_point_found |= 4;
+                //}
+                pInfo->primary_pic_type_plus_one = (uint8_t)(code)+1;
+                status = H264_STATUS_OK;
+            }
+            pInfo->number_of_first_au_info_nal_before_first_slice++;
+            break;
+        }
+#endif
+
+    case h264_NAL_UNIT_TYPE_Reserved1:
+    case h264_NAL_UNIT_TYPE_Reserved2:
+    case h264_NAL_UNIT_TYPE_Reserved3:
+    case h264_NAL_UNIT_TYPE_Reserved4:
+    case h264_NAL_UNIT_TYPE_Reserved5:
+        status = H264_STATUS_OK;
+        pInfo->number_of_first_au_info_nal_before_first_slice++;
+        break;
+
+    case h264_NAL_UNIT_TYPE_filler_data:
+        status = H264_STATUS_OK;
+        break;
+    case h264_NAL_UNIT_TYPE_ACP:
+        break;
+    case h264_NAL_UNIT_TYPE_SPS_extension:
+    case h264_NAL_UNIT_TYPE_unspecified:
+    case h264_NAL_UNIT_TYPE_unspecified2:
+        status = H264_STATUS_OK;
+        //nothing
+        break;
+    default:
+        status = H264_STATUS_OK;
+        break;
+    }
+
+    //pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+    switch ( pInfo->nal_unit_type )
+    {
+    case h264_NAL_UNIT_TYPE_IDR:
+    case h264_NAL_UNIT_TYPE_SLICE:
+    case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
+    case h264_NAL_UNIT_TYPE_SPS:
+    case h264_NAL_UNIT_TYPE_PPS:
+    case h264_NAL_UNIT_TYPE_SEI:
+    case h264_NAL_UNIT_TYPE_EOSeq:
+    case h264_NAL_UNIT_TYPE_EOstream:
+    case h264_NAL_UNIT_TYPE_Reserved1:
+    case h264_NAL_UNIT_TYPE_Reserved2:
+    case h264_NAL_UNIT_TYPE_Reserved3:
+    case h264_NAL_UNIT_TYPE_Reserved4:
+    case h264_NAL_UNIT_TYPE_Reserved5:
+    {
+        pInfo->old_nal_unit_type = pInfo->nal_unit_type;
+        break;
+    }
+    default:
+        break;
+    }
+
+    return status;
+}
+
+
+
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+#ifndef VBP
+static uint32_t viddec_h264_is_frame_start(void *ctxt)
+{
+    struct h264_viddec_parser* parser = ctxt;
+    uint32_t ret = 0;
+
+    h264_Info * pInfo = &(parser->info);
+
+    if (pInfo->img.g_new_frame) {
+        ret = 1;
+    }
+
+    return ret;
+}
+#endif
+
+#ifndef VBP
+uint32_t viddec_h264_wkld_done(void *parent, void *ctxt, unsigned int next_sc,
+                               uint32_t *codec_specific_errors)
+{
+    struct h264_viddec_parser* parser = ctxt;
+    uint32_t ret = VIDDEC_PARSE_SUCESS;
+    h264_Info * pInfo = &(parser->info);
+    uint8_t is_stream_forced_to_complete=false;
+
+    is_stream_forced_to_complete = (VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc);
+
+    if (is_stream_forced_to_complete || (pInfo->is_current_workload_done))
+    {
+        viddec_workload_t 		 *wl;
+        viddec_frame_attributes_t *attrs;
+
+        wl = viddec_pm_get_header( parent );
+        attrs = &wl->attrs;
+
+        if ((attrs->cont_size.width < 32) || (attrs->cont_size.width > 2048) || (attrs->cont_size.height < 32) || (attrs->cont_size.height>2048))
+        {
+            attrs->cont_size.width = 32;
+            attrs->cont_size.height = 32;
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+            pInfo->wl_err_curr |= (FRAME << FIELD_ERR_OFFSET);
+        }
+
+        *codec_specific_errors = pInfo->wl_err_curr;
+        pInfo->wl_err_curr = pInfo->wl_err_next;
+        pInfo->wl_err_next = 0;
+
+        if (is_stream_forced_to_complete)
+        {
+            h264_parse_emit_eos(parent, pInfo);
+        }
+        ret = VIDDEC_PARSE_FRMDONE;
+    }
+
+    return ret;
+}
+#endif
+
+#ifdef VBP
+void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_h264_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+    /* Should return size of my structure */
+    size->context_size = sizeof(struct h264_viddec_parser);
+    size->persist_size = MAX_NUM_SPS * sizeof(seq_param_set_all)
+                         + MAX_NUM_PPS * sizeof(pic_param_set)
+                         + MAX_NUM_SPS * sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE
+                         + sizeof(int32_t) * MAX_NUM_REF_FRAMES_IN_PIC_ORDER_CNT_CYCLE;
+}
+
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+/* ------------------------------------------------------------------------------------------ */
+#ifdef VBP
+void viddec_h264_flush(void *parent, void *ctxt)
+#else
+static void viddec_h264_flush(void *parent, void *ctxt)
+#endif
+{
+    int i;
+    struct h264_viddec_parser* parser = ctxt;
+    h264_Info * pInfo = &(parser->info);
+
+    /* just flush dpb and disable output */
+    h264_dpb_flush_dpb(pInfo, 0, pInfo->img.second_field, pInfo->active_SPS.num_ref_frames);
+
+    /* reset the dpb to the initial state, avoid parser store
+       wrong data to dpb in next slice parsing */
+    h264_DecodedPictureBuffer *p_dpb = &pInfo->dpb;
+    for (i = 0; i < NUM_DPB_FRAME_STORES; i++)
+    {
+        p_dpb->fs[i].fs_idc = MPD_DPB_FS_NULL_IDC;
+        p_dpb->fs_dpb_idc[i] = MPD_DPB_FS_NULL_IDC;
+    }
+    p_dpb->used_size = 0;
+    p_dpb->fs_dec_idc = MPD_DPB_FS_NULL_IDC;
+    p_dpb->fs_non_exist_idc = MPD_DPB_FS_NULL_IDC;
+
+    return;
+}
+
+#ifndef VBP
+void viddec_h264_get_ops(viddec_parser_ops_t *ops)
+{
+    ops->init = viddec_h264_init;
+
+    ops->parse_syntax = viddec_h264_parse;
+    ops->get_cxt_size = viddec_h264_get_context_size;
+    ops->is_wkld_done = viddec_h264_wkld_done;
+    ops->is_frame_start = viddec_h264_is_frame_start;
+    ops->flush = viddec_h264_flush;
+    return;
+}
+#endif
+
diff --git a/mixvbp/vbp_plugin/h264/viddec_h264_workload.c b/mixvbp/vbp_plugin/h264/viddec_h264_workload.c
new file mode 100755
index 0000000..54c96db
--- /dev/null
+++ b/mixvbp/vbp_plugin/h264/viddec_h264_workload.c
@@ -0,0 +1,1195 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "h264.h"
+#include "h264parse.h"
+#include "viddec_fw_item_types.h"
+#include "h264parse_dpb.h"
+
+
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+// picture parameter 1
+#define PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT(w)            (((uint32_t)w)&0x1)
+#define PUT_BSD_PP1_SLICE_TYPE_BITS(w)                    ((((uint32_t)w)&0x7)<<1)
+#define PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(w)           ((((uint32_t)w)&0x3)<<4)
+#define PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(w)             ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(w)                ((((uint32_t)w)&0x3F)<<8)
+#define PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(w)                ((((uint32_t)w)&0x3F)<<16)
+
+// picture parameter 2
+#define PUT_BSD_PP2_CABAC_INIT_IDC_BITS(w)                (((uint32_t)w)&0x3)
+#define PUT_BSD_PP2_QP_BITS(w)                            ((((uint32_t)w)&0x3F)<<2)
+#define PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(w)               ((((uint32_t)w)&0x3)<<8)
+#define PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(w)          ((((uint32_t)w)&0xF)<<10)
+#define PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(w)              ((((uint32_t)w)&0xF)<<14)
+#define PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(w)                ((((uint32_t)w)&0x1)<<18)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(w)              ((((uint32_t)w)&0x1F)<<19)
+#define PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(w)            ((((uint32_t)w)&0x1F)<<24)
+
+
+// slice start parameter
+#define PUT_BSD_SS_START_ADDR_BITS(w)                      (((uint32_t)w)&0x7fff)         // 14:0  current slice start address
+#define PUT_BSD_SS_SKIP_FS_IDC_BITS(w)                    ((((uint32_t)w)&0x3f)<<16)      // [5:0], [4:0] frame store idc, [5] - 0: top-filed, 1: bottom field
+#define PUT_BSD_SS_SKIP_TYPE_BIT(w)                       ((((uint32_t)w)&0x1)<<24)       // 0: P-skip, 1: I-skip
+#define PUT_BSD_SS_SKIP_REWIND_BITS(w)                    ((((uint32_t)w)&0xf)<<28)       // number of MB or MBAFF pairs to rewind before skip
+
+//h264_dpb_init
+#define PUT_FRAME_WIDTH_MB_BITS(w)                        (((uint32_t)w)&0x7F)
+#define PUT_FRAME_HEIGHT_MB_BITS(w)                       ((((uint32_t)w)&0x7F)<<16)
+
+//dpb lut table init
+//#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w)             ((((uint32_t)w)&0x1F)<<8)
+
+//h264 img init
+#define PUT_BSD_IMAGE_STRUCTURE_BITS(w)                   (((uint32_t)w)&0x3)
+#define PUT_BSD_IMAGE_IDR_BIT(w)                          ((((uint32_t)w)&0x1)<<2)
+#define PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(w)             ((((uint32_t)w)&0x1)<<3)
+#define PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(w)     ((((uint32_t)w)&0x1)<<4)
+#define PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(w)  ((((uint32_t)w)&0x1)<<5)
+#define PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(w)            ((((uint32_t)w)&0x1)<<6)
+#define PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(w)          ((((uint32_t)w)&0x1)<<7)
+#define PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(w)             ((((uint32_t)w)&0x1F)<<8)
+
+#define PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(w)    ((((uint32_t)w)&0x1)<<13)
+#define PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(w)            ((((uint32_t)w)&0x1)<<14)
+#define PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(w)      ((((uint32_t)w)&0x1)<<15)
+#define PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT(w)            ((((uint32_t)w)&0x1)<<16)
+#define PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(w)             ((((uint32_t)w)&0xFF)<<17)
+#define PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(w)        ((((uint32_t)w)&0x1)<<25)
+
+
+extern void h264_dpb_store_previous_picture_in_dpb(h264_Info * pInfo,
+        int32_t NonExisting,
+        int32_t use_old);
+
+extern void h264_dpb_flush_dpb (h264_Info * pInfo,int32_t output_all, int32_t keep_complement, int32_t num_ref_frames);
+
+
+
+void h264_translate_parser_info_to_frame_attributes(viddec_workload_t *wl, h264_Info *pInfo)
+{
+
+    viddec_frame_attributes_t *attrs = &wl->attrs;
+
+
+
+    //// Cont_size
+    attrs->cont_size.height       = pInfo->img.FrameHeightInMbs*16;
+    attrs->cont_size.width        = pInfo->img.PicWidthInMbs*16;
+
+    //// The following attributes will be updated in slice level
+    attrs->h264.used_for_reference = 0;
+    attrs->h264.top_field_first = 0;
+    attrs->h264.top_field_poc = 0;
+    attrs->h264.bottom_field_poc = 0;
+    attrs->h264.field_pic_flag = 0;
+
+#if 1
+/// Double check the size late!!!!!
+    //attrs->h264.cropped_size.width = pInfo->img.PicWidthInMbs*16;
+    //attrs->h264.cropped_size.height = pInfo->img.PicWidthInMbs*16;
+
+    if ( (pInfo->active_SPS.sps_disp.frame_cropping_flag) &&
+            (pInfo->active_SPS.sps_disp.chroma_format_idc < 4))
+    {
+        int32_t CropUnitX, CropUnitY;
+        int32_t SubWidthC, SubHeightC;
+
+        if (pInfo->active_SPS.sps_disp.chroma_format_idc == 0)
+        {
+            CropUnitX = 1;
+            CropUnitY = 2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag;
+        }
+        else
+        {
+            SubWidthC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >> 1);
+            SubHeightC = 2 - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) >>1)
+                         - ((pInfo->active_SPS.sps_disp.chroma_format_idc - 1) & 0x1);
+            CropUnitX = SubWidthC;
+            CropUnitY = SubHeightC * (2 - pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+        }
+
+        if ((int32_t)attrs->cont_size.height >(pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY))
+        {
+            attrs->cont_size.height -= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+            //attrs->h264.cropped_size.height-= (pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset*CropUnitY);
+        }
+    }
+/// Pan-Scan Info
+
+#endif
+
+}
+
+
+static void h264_parse_update_frame_attributes(void *parent, h264_Info *pInfo)
+{
+    viddec_workload_t        *wl_cur,  *wl_next;
+    viddec_frame_attributes_t *attrs;
+    uint8_t    frame_type=0;
+
+
+    if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+    {
+        wl_cur = viddec_pm_get_header( parent );
+        attrs = &wl_cur->attrs;
+    }
+    else
+    {
+        wl_next = viddec_pm_get_next_header (parent);
+        attrs = &wl_next->attrs;
+    }
+
+    /////////update frame type
+    if ((pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&(0x1 << FRAME_TYPE_STRUCTRUE_OFFSET))
+    {
+        frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_FRAME_OFFSET)) )>> FRAME_TYPE_FRAME_OFFSET;
+        switch (frame_type)
+        {
+        case FRAME_TYPE_IDR:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_IDR;
+            break;
+        case FRAME_TYPE_I:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_I;
+            break;
+        case FRAME_TYPE_P:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_P;
+            break;
+        case FRAME_TYPE_B:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_B;
+            break;
+        default:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID;
+            break;
+        }
+
+        attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID;
+    }
+    else
+    {
+        frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_TOP_OFFSET)) )>> FRAME_TYPE_TOP_OFFSET;
+        switch (frame_type)
+        {
+        case FRAME_TYPE_IDR:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_IDR;
+            break;
+        case FRAME_TYPE_I:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_I;
+            break;
+        case FRAME_TYPE_P:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_P;
+            break;
+        case FRAME_TYPE_B:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_B;
+            break;
+        default:
+            attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID;
+            break;
+
+        }
+
+        frame_type = ( (pInfo->dpb.fs[pInfo->dpb.fs_dec_idc].pic_type)&((0x7 << FRAME_TYPE_BOTTOM_OFFSET)) )>> FRAME_TYPE_BOTTOM_OFFSET;
+        switch (frame_type)
+        {
+        case FRAME_TYPE_IDR:
+            attrs->bottom_field_type = VIDDEC_FRAME_TYPE_IDR;
+            break;
+        case FRAME_TYPE_I:
+            attrs->bottom_field_type = VIDDEC_FRAME_TYPE_I;
+            break;
+        case FRAME_TYPE_P:
+            attrs->bottom_field_type = VIDDEC_FRAME_TYPE_P;
+            break;
+        case FRAME_TYPE_B:
+            attrs->bottom_field_type = VIDDEC_FRAME_TYPE_B;
+            break;
+        default:
+            attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID;
+            break;
+
+        }
+    }
+
+    /////////update is_referece flag
+    attrs->h264.used_for_reference |= (pInfo->SliceHeader.nal_ref_idc == 0)? 0: 1;
+
+    /////////update POC
+    attrs->h264.top_field_poc = pInfo->img.toppoc;
+    attrs->h264.bottom_field_poc = pInfo->img.bottompoc;
+
+    //////// update TFF
+    if (attrs->h264.top_field_poc <= attrs->h264.bottom_field_poc) {
+        attrs->h264.top_field_first = 1;
+    } else {
+        attrs->h264.top_field_first = 0;
+    }
+
+    /////// update field_pic_flag
+    //attrs->h264.field_pic_flag |= (pInfo->SliceHeader.field_pic_flag << pInfo->SliceHeader.bottom_field_flag);
+    attrs->h264.field_pic_flag |= pInfo->SliceHeader.field_pic_flag;
+
+    return;
+}
+
+
+static void h264_fill_slice_data(h264_Info *pInfo, h264_slice_data * p_slice_data)
+{
+    uint32_t data=0;
+    uint32_t first_mb_in_slice =0;
+
+
+
+    ////////////fill pic parameters 1
+    data =   PUT_BSD_PP1_IMG_DISPOSABLE_FLAG_BIT( (pInfo->SliceHeader.nal_ref_idc == 0) ) +
+             PUT_BSD_PP1_SLICE_TYPE_BITS(pInfo->SliceHeader.slice_type) +
+             PUT_BSD_PP1_WEIGHTED_BIPRED_IDC_BITS(pInfo->active_PPS.weighted_bipred_idc) +
+             PUT_BSD_PP1_WEIGHTED_PRED_FLAG_BIT(pInfo->active_PPS.weighted_pred_flag)  +
+             PUT_BSD_PP1_NUM_REF_IDX_L0_BITS(pInfo->SliceHeader.num_ref_idx_l0_active)  +
+             PUT_BSD_PP1_NUM_REF_IDX_L1_BITS(pInfo->SliceHeader.num_ref_idx_l1_active);
+    p_slice_data->h264_bsd_slice_p1 = data;
+
+
+    ///////////fill pic parameters 2
+    data =   PUT_BSD_PP2_CABAC_INIT_IDC_BITS(pInfo->SliceHeader.cabac_init_idc) +
+             PUT_BSD_PP2_QP_BITS( (pInfo->SliceHeader.slice_qp_delta + pInfo->active_PPS.pic_init_qp_minus26+26) ) +
+             PUT_BSD_PP2_DISABLE_DBF_IDC_BITS(pInfo->SliceHeader.disable_deblocking_filter_idc) +
+             PUT_BSD_PP2_ALPHA_C0_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_alpha_c0_offset_div2) +
+             PUT_BSD_PP2_BETA_OFFSET_DIV2_BITS(pInfo->SliceHeader.slice_beta_offset_div2) +
+             PUT_BSD_PP2_IMG_DIRECT_TYPE_BIT(pInfo->SliceHeader.direct_spatial_mv_pred_flag) +
+             PUT_BSD_PP2_CHROMA_QP_OFFSET_BITS(pInfo->active_PPS.chroma_qp_index_offset) +
+             PUT_BSD_PP2_CHROMA_QP_OFFSET_2_BITS(pInfo->active_PPS.second_chroma_qp_index_offset);
+
+    p_slice_data->h264_bsd_slice_p2 = data;
+
+    /////////fill slice start
+    first_mb_in_slice = pInfo->SliceHeader.first_mb_in_slice;
+
+    data =   PUT_BSD_SS_START_ADDR_BITS(first_mb_in_slice);
+    data |=  PUT_BSD_SS_SKIP_FS_IDC_BITS( pInfo->h264_list_replacement) |
+             PUT_BSD_SS_SKIP_TYPE_BIT(0) |
+             PUT_BSD_SS_SKIP_REWIND_BITS((pInfo->img.MbaffFrameFlag? 2: 3));
+
+    p_slice_data->h264_bsd_slice_start = data;
+
+}
+
+
+static void h264_parse_emit_4X4_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+
+    uint32_t                   i=0, n_items=0;
+    uint32_t                   qm_type=0;
+
+
+    for ( i = 0; i < 6; i++ )
+    {
+        qm_type = FB_QM;
+        if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+        {
+            if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+            {
+                pInfo->qm_present_list |= ((0x1)<<i);
+
+                if (pInfo->active_SPS.UseDefaultScalingMatrix4x4Flag[i]) {
+                    qm_type = DEFAULT_QM;
+                } else {
+                    qm_type = SPS_QM;
+                }
+            }
+        }
+
+        if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+        {
+            if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+            {
+                pInfo->qm_present_list |= ((0x1)<<i);
+                if (pInfo->active_PPS.UseDefaultScalingMatrix4x4Flag[i]) {
+                    qm_type = DEFAULT_QM;
+                } else {
+                    qm_type = PPS_QM;
+                }
+            }
+            else
+            {
+                if ((i != 0) && (i != 3) && (i < 6)) {
+                    pInfo->qm_present_list  &= ~((0x1)<<i);
+                    qm_type = FB_QM;
+                }
+            }
+        }
+
+
+        ///////////////////// Emit out Scaling_matrix//////////////////////
+        wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+        //    data_offset    0x aa  bb  cc  dd
+        //    bb    is the workload item offset
+        //    cc    is the qm_type
+        //    dd    is the matrix number
+        //
+        switch (qm_type)
+        {
+        case (SPS_QM): {
+
+            for (n_items =0; n_items<2; n_items++)
+            {
+                wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+                wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+0]))+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+3]))<<24);
+                wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+4]))+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList4x4[i][n_items*8+7]))<<24);
+                //cur is empty, fill new frame in cur
+                viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            }
+
+            break;
+        }
+        case (PPS_QM): {
+
+            for (n_items =0; n_items<2; n_items++)
+            {
+                wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+                wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+0]))+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+1]))<<8)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+2]))<<16)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+3]))<<24);
+                wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+4]))+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+5]))<<8)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+6]))<<16)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList4x4[i][n_items*8+7]))<<24);
+                //cur is empty, fill new frame in cur
+                viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            }
+
+            break;
+        }
+        case (DEFAULT_QM):
+        {
+
+            wi.data.data_offset = i + (DEFAULT_QM << 4);
+            wi.data.data_payload[0] = 0;
+            wi.data.data_payload[1] = 0;
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            break;
+        }
+        default:
+        {
+            break;
+        }
+        }
+    }
+
+}
+
+static void h264_parse_emit_8X8_scaling_matrix( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+
+    uint32_t                   i=0, n_items=0;
+    uint32_t                   qm_type=0;
+
+    for ( i = 6; i < 8; i++ )
+    {
+        qm_type = FB_QM;
+        if (pInfo->active_SPS.seq_scaling_matrix_present_flag) // check sps first
+        {
+            if (pInfo->active_SPS.seq_scaling_list_present_flag[i])
+            {
+                pInfo->qm_present_list |= ((0x1)<<i);
+
+                if (pInfo->active_SPS.UseDefaultScalingMatrix8x8Flag[i-6])
+                {
+                    qm_type = DEFAULT_QM;
+                }
+                else
+                {
+                    qm_type = SPS_QM;
+                }
+            }
+        }
+
+        if (pInfo->active_PPS.pic_scaling_matrix_present_flag) // then check pps
+        {
+            if (pInfo->active_PPS.pic_scaling_list_present_flag[i])
+            {
+                pInfo->qm_present_list |= ((0x1)<<i);
+
+                if (pInfo->active_PPS.UseDefaultScalingMatrix8x8Flag[i-6])
+                {
+                    qm_type = DEFAULT_QM;
+                }
+                else
+                {
+                    qm_type = PPS_QM;
+                }
+            }
+        }
+        wi.vwi_type = VIDDEC_WORKLOAD_H264_SCALING_MATRIX;
+
+        //    data_offset    0x aa  bb  cc  dd
+        //    bb    is the workload item offset
+        //    cc    is the qm_type
+        //    dd    is the matrix number
+        //
+        switch (qm_type)
+        {
+        case (SPS_QM):
+        {
+            for (n_items =0; n_items<8; n_items++)
+            {
+                wi.data.data_offset = i + (SPS_QM << 4) + (n_items <<8);
+                wi.data.data_payload[0] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+0]))+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+                wi.data.data_payload[1] = ((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+4]))+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+                                          (((uint32_t)(pInfo->active_SPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+                //cur is empty, fill new frame in cur
+                viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            }
+            break;
+        }
+        case (PPS_QM):
+        {
+            for (n_items =0; n_items<8; n_items++)
+            {
+                wi.data.data_offset = i + (PPS_QM << 4) + (n_items <<8);
+                wi.data.data_payload[0] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+0]))+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+1]))<<8)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+2]))<<16)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+3]))<<24);
+                wi.data.data_payload[1] = ((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+4]))+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+5]))<<8)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+6]))<<16)+
+                                          (((uint32_t)(pInfo->active_PPS.ScalingList8x8[i-6][n_items*8+7]))<<24);
+                //cur is empty, fill new frame in cur
+                viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            }
+            break;
+        }
+        case (DEFAULT_QM):
+        {
+            wi.data.data_offset = i + (DEFAULT_QM << 4);
+            wi.data.data_payload[0] = 0;
+            wi.data.data_payload[1] = 0;
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            break;
+        }
+        default: {
+            break;
+        }
+        }
+    }
+
+}
+
+
+
+static void h264_fill_pic_data(h264_Info *pInfo, h264_pic_data * p_pic_data)
+{
+    uint32_t data=0;
+    uint32_t dec_idc =0;
+    uint32_t frame_structure =0;
+
+    //fill h264_dpb_init
+    data =   PUT_FRAME_WIDTH_MB_BITS(pInfo->dpb.PicWidthInMbs) +
+             PUT_FRAME_HEIGHT_MB_BITS(pInfo->dpb.FrameHeightInMbs);
+
+    p_pic_data->h264_dpb_init = data;
+
+    ////////////////////////////////file current pic info
+    data = 0;
+    dec_idc = pInfo->dpb.fs_dec_idc;
+    frame_structure = pInfo->img.structure;
+    if (frame_structure == FRAME)
+        frame_structure=0;
+    //data =  PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+    //p_pic_data->h264_cur_bsd_img_init= data;
+
+    data  =  PUT_BSD_IMAGE_STRUCTURE_BITS(frame_structure)  +
+             PUT_BSD_IMAGE_IDR_BIT(pInfo->nal_unit_type == h264_NAL_UNIT_TYPE_IDR) +
+             PUT_BSD_IMAGE_MBAFF_FRAME_FLAG_BIT(pInfo->img.MbaffFrameFlag) +
+             PUT_BSD_IMAGE_ENTROPY_CODING_MODE_FLAG_BIT(pInfo->active_PPS.entropy_coding_mode_flag) +
+             PUT_BSD_IMAGE_CONSTRAINED_INTRA_PRED_FLAG_BIT(pInfo->active_PPS.constrained_intra_pred_flag) +
+             PUT_BSD_IMG_FRAME_MBS_ONLY_FLAG_BIT(pInfo->active_SPS.sps_disp.frame_mbs_only_flag) +
+             PUT_BSD_IMG_DIRECT_8X8_INFER_FLAG_BIT(pInfo->active_SPS.sps_disp.direct_8x8_inference_flag) +
+             PUT_HPD_BSD_IMG_TRANSFORM_8X8_MODE_FLAG_BIT(pInfo->active_PPS.transform_8x8_mode_flag) +
+             PUT_HPD_BSD_IMG_MONOCHROME_FLAG_BIT(((pInfo->active_SPS.sps_disp.chroma_format_idc==0)? 0x1: 0x0)) +
+             PUT_HPD_BSD_IMG_GREY_NONEXISTING_FLAG_BIT(0x0) +
+             PUT_HPD_BSD_IMG_QM_PRESENT_FLAG_BIT((pInfo->active_PPS.pic_scaling_matrix_present_flag||pInfo->active_SPS.seq_scaling_matrix_present_flag)) +
+             PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->qm_present_list) +
+             PUT_HPD_BSD_IMG_MONOCHROME_PWT_FLAG_BIT(0x1) +
+             PUT_BSD_IMAGE_FRAME_STORE_IDC_BITS(pInfo->dpb.fs[dec_idc].fs_idc);
+
+    p_pic_data->h264_cur_bsd_img_init= data;
+
+    //to do: add qm list
+    //PUT_HPD_BSD_IMG_QM_LIST_FLAGS_BITS(pInfo->img.q .qm_present_list) +
+    //printf("structure = %d, tpoc = %d, bpoc = %d\n", pInfo->img.structure, pInfo->img.toppoc, pInfo->img.bottompoc);
+
+    if (pInfo->img.structure == FRAME)
+    {
+        // Write down POC
+        p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+        p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+    } else if (pInfo->img.structure == TOP_FIELD)
+    {
+        // Write down POC
+        p_pic_data->h264_cur_mpr_tf_poc = pInfo->img.toppoc;
+        p_pic_data->h264_cur_mpr_bf_poc = 0;
+    }
+    else if (pInfo->img.structure ==  BOTTOM_FIELD)
+    {
+        // Write down POC
+        p_pic_data->h264_cur_mpr_tf_poc = 0;
+        p_pic_data->h264_cur_mpr_bf_poc = pInfo->img.bottompoc;
+    }
+    else
+    {
+        // Write down POC
+        p_pic_data->h264_cur_mpr_tf_poc = 0;
+        p_pic_data->h264_cur_mpr_bf_poc = 0;
+    }
+
+    return;
+}
+
+static void h264_parse_emit_sps(void *parent, h264_Info *pInfo)
+{
+    viddec_workload_item_t     wi;
+
+    if (pInfo->Is_SPS_updated)
+    {
+        viddec_fw_reset_workload_item(&wi);
+        wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+        viddec_fw_h264_sps_set_profile_idc(&(wi.h264_sps), pInfo->active_SPS.profile_idc);
+        viddec_fw_h264_sps_set_level_idc(&(wi.h264_sps), pInfo->active_SPS.level_idc);
+        viddec_fw_h264_sps_set_chroma_format_idc(&(wi.h264_sps), pInfo->active_SPS.sps_disp.chroma_format_idc);
+        viddec_fw_h264_sps_set_num_ref_frames(&(wi.h264_sps), pInfo->active_SPS.num_ref_frames);
+        viddec_fw_h264_sps_set_gaps_in_frame_num_value_allowed_flag(&(wi.h264_sps), pInfo->active_SPS.gaps_in_frame_num_value_allowed_flag);
+        viddec_fw_h264_sps_set_frame_mbs_only_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_mbs_only_flag);
+        viddec_fw_h264_sps_set_frame_cropping_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.frame_cropping_flag);
+        viddec_fw_h264_sps_set_vui_parameters_present_flag(&(wi.h264_sps), pInfo->active_SPS.sps_disp.vui_parameters_present_flag);
+        wi.h264_sps.pic_width_in_mbs_minus1 = pInfo->active_SPS.sps_disp.pic_width_in_mbs_minus1;
+        wi.h264_sps.pic_height_in_map_units_minus1 = pInfo->active_SPS.sps_disp.pic_height_in_map_units_minus1;
+
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+
+        viddec_fw_reset_workload_item(&wi);
+        if (pInfo->active_SPS.sps_disp.frame_cropping_flag)
+        {
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_CROPPING;
+            viddec_fw_h264_cropping_set_left(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_left_offset);
+            viddec_fw_h264_cropping_set_right(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_right_offset);
+            viddec_fw_h264_cropping_set_top(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_top_offset);
+            viddec_fw_h264_cropping_set_bottom(&(wi.h264_cropping), pInfo->active_SPS.sps_disp.frame_crop_rect_bottom_offset);
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+        viddec_fw_reset_workload_item(&wi);
+        if (pInfo->active_SPS.sps_disp.vui_parameters_present_flag == 1)
+        {
+            wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+            viddec_fw_h264_vui_set_aspect_ratio_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag);
+            viddec_fw_h264_vui_set_video_signal_type_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag);
+            viddec_fw_h264_vui_set_pic_struct_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.pic_struct_present_flag);
+            viddec_fw_h264_vui_set_timing_info_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag);
+            viddec_fw_h264_vui_set_nal_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag);
+            viddec_fw_h264_vui_set_vcl_hrd_parameters_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag);
+
+            if (pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag == 1)
+            {
+                viddec_fw_h264_vui_set_aspect_ratio_idc(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc);
+                if (h264_AR_Extended_SAR == pInfo->active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc)
+                {
+                    viddec_fw_h264_vui_set_sar_width(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_width);
+                    viddec_fw_h264_vui_set_sar_height(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.sar_height);
+                }
+            }
+
+
+            if (pInfo->active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+            {
+                viddec_fw_h264_vui_set_colour_description_present_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag);
+                if (pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+                {
+                    viddec_fw_h264_vui_set_colour_primaries(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.colour_primaries);
+                    viddec_fw_h264_vui_set_transfer_characteristics(&(wi.h264_vui),  pInfo->active_SPS.sps_disp.vui_seq_parameters.transfer_characteristics);
+                }
+                viddec_fw_h264_vui_set_video_format(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.video_format);
+            }
+
+            if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+            {
+                viddec_fw_h264_vui_set_fixed_frame_rate_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.fixed_frame_rate_flag);
+            }
+
+            if ( (pInfo->active_SPS.sps_disp.vui_seq_parameters.nal_hrd_parameters_present_flag == 1)
+                    || (pInfo->active_SPS.sps_disp.vui_seq_parameters.vcl_hrd_parameters_present_flag == 1))
+            {
+                viddec_fw_h264_vui_set_low_delay_hrd_flag(&(wi.h264_vui), pInfo->active_SPS.sps_disp.vui_seq_parameters.low_delay_hrd_flag);
+            }
+
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+
+        viddec_fw_reset_workload_item(&wi);
+
+        if (pInfo->active_SPS.sps_disp.vui_seq_parameters.timing_info_present_flag == 1)
+        {
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_VUI_TIMING_INFO;
+
+            wi.h264_vui_time_info.num_units_in_tick = pInfo->active_SPS.sps_disp.vui_seq_parameters.num_units_in_tick;
+            wi.h264_vui_time_info.time_scale = pInfo->active_SPS.sps_disp.vui_seq_parameters.time_scale;
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+        pInfo->Is_SPS_updated =0;
+
+    }
+
+    return;
+}
+
+
+
+
+static void h264_parse_emit_ref_list( void *parent, h264_Info *pInfo, uint32_t list_id)
+{
+    uint32_t  i=0, nitems=0, byte_index=0, data=0, data_writed=0;
+    uint8_t    *p_list;
+    viddec_workload_item_t     wi;
+
+    if (0 == list_id)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_0;
+
+        if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+        {
+            nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+            if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+            {
+                p_list = pInfo->slice_ref_list0;
+            }
+            else
+            {
+                p_list = pInfo->dpb.listX_0;
+            }
+        }
+        else
+        {
+            nitems =0;
+            p_list = pInfo->dpb.listX_0;
+        }
+    }
+    else
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_H264_REFR_LIST_1;
+
+        if ( h264_PtypeB==pInfo->SliceHeader.slice_type)
+        {
+            nitems = pInfo->SliceHeader.num_ref_idx_l1_active;
+            if (pInfo->SliceHeader.sh_refpic_l1.ref_pic_list_reordering_flag)
+            {
+                p_list = pInfo->slice_ref_list1;
+            }
+            else
+            {
+                p_list = pInfo->dpb.listX_1;
+            }
+        }
+        else
+        {
+            nitems = 0;
+            p_list = pInfo->dpb.listX_1;
+        }
+
+    }
+
+    if (0 == nitems)
+    {
+        return;
+    }
+
+    byte_index =0;
+    data_writed=0;
+
+
+    for (i=0; i < 32; i++)
+    {
+        if (byte_index == 0) data = 0;
+
+        if (i<nitems)
+        {
+            if ( viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[ (p_list[i]&0x1f) ])))
+            {
+                data |= (pInfo->h264_list_replacement) << byte_index;
+            }
+            else
+            {
+                data |= (p_list[i] & 0x7f) << byte_index;
+            }
+        }
+        else
+        {
+            data |= (0x80) << byte_index;
+        }
+
+
+        if (byte_index == 24)
+        {
+            byte_index = 0;
+            wi.data.data_offset = data_writed&(~0x1);
+            wi.data.data_payload[data_writed&0x1]=data;
+
+            data =0;
+
+            if (data_writed&0x1)
+            {
+                //cur is empty, fill new frame in cur
+                viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+            }
+            data_writed ++;
+        }
+        else
+        {
+            byte_index += 8;
+        }
+    }
+
+}
+
+
+
+void h264_parse_emit_current_slice( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+    h264_slice_data 				slice_data;
+
+    uint32_t		i=0, nitems=0, data=0;
+    uint32_t 	bits_offset =0, byte_offset =0;
+    uint8_t    	is_emul =0;
+
+    ////////////////////// Update frame attributes/////////////////
+    h264_parse_update_frame_attributes(parent,pInfo);
+
+
+    if (pInfo->SliceHeader.sh_error) {
+        // Error type definition, refer to viddec_fw_common_defs.h
+        //		if error in top field, VIDDEC_FW_WORKLOAD_ERR_TOPFIELD			= (1 << 17)
+        //		if error in bottom field, VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD	   = (1 << 18)
+        //		if this is frame based, both 2 bits should be set
+
+        if (pInfo->push_to_cur) {
+            pInfo->wl_err_curr |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+            pInfo->wl_err_curr |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+        } else {
+            pInfo->wl_err_next |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+            pInfo->wl_err_next |= (pInfo->SliceHeader.structure << FIELD_ERR_OFFSET);
+        }
+    }
+
+
+    ////////////////////// Update Reference list //////////////////
+    if ( (h264_PtypeB==pInfo->SliceHeader.slice_type)||(h264_PtypeP==pInfo->SliceHeader.slice_type) )
+    {
+        if (pInfo->SliceHeader.sh_refpic_l0.ref_pic_list_reordering_flag)
+        {
+            nitems = pInfo->SliceHeader.num_ref_idx_l0_active;
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->slice_ref_list0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->slice_ref_list0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+        else
+        {
+            nitems = pInfo->dpb.listXsize[0];
+
+            for (i=0; i<nitems; i++)
+            {
+                if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[pInfo->dpb.listX_0[i]&0x1f]))==0)
+                {
+                    pInfo->h264_list_replacement = (pInfo->dpb.listX_0[i]&0xFF)|0x80;
+                    break;
+                }
+            }
+        }
+
+    }
+    else
+    {
+        nitems =0;
+    }
+    /////file ref list 0
+    h264_parse_emit_ref_list(parent, pInfo, 0);
+
+    /////file ref list 1
+    h264_parse_emit_ref_list(parent, pInfo, 1);
+
+    ///////////////////////////////////// Slice Data ////////////////////////////////
+    h264_fill_slice_data(pInfo, &slice_data);
+
+    wi.vwi_type = VIDDEC_WORKLOAD_H264_SLICE_REG;
+
+    wi.data.data_offset = slice_data.h264_bsd_slice_start;
+    wi.data.data_payload[0] = slice_data.h264_bsd_slice_p1;
+    wi.data.data_payload[1] = slice_data.h264_bsd_slice_p2;
+
+    //cur is empty, fill new frame in cur
+    viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+
+    ///////////////////////////predict weight table item and data if have///////////////////////////
+    if (pInfo->h264_pwt_enabled)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_BITS_OFFSET;
+        wi.data.data_offset = pInfo->h264_pwt_end_byte_offset- pInfo->h264_pwt_start_byte_offset+1;
+        wi.data.data_payload[0] = pInfo->h264_pwt_start_bit_offset;
+        wi.data.data_payload[1] = pInfo->h264_pwt_end_bit_offset;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            viddec_pm_append_workitem( parent , &wi, false);
+
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+            wi.es.es_flags = 0;
+            viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,1);
+        }
+        else
+        {
+            viddec_pm_append_workitem( parent , &wi, true);
+
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_PWT_ES_BYTES;
+            wi.es.es_flags = 0;
+            viddec_pm_append_misc_tags(parent, pInfo->h264_pwt_start_byte_offset, pInfo->h264_pwt_end_byte_offset,&wi,0);
+        }
+    }
+
+
+    ////////////////////////////////// Update ES Buffer for Slice ///////////////////////
+    viddec_pm_get_au_pos(parent, &bits_offset, &byte_offset, &is_emul);
+
+    //OS_INFO("DEBUG---entropy_coding_mode_flag:%d, bits_offset: %d\n", pInfo->active_PPS.entropy_coding_mode_flag, bits_offset);
+
+    if (pInfo->active_PPS.entropy_coding_mode_flag)
+    {
+        if (0!=bits_offset)  {
+            viddec_pm_get_bits(parent, &data, 8-bits_offset);
+        }
+    }
+    else
+    {
+        if (0!=bits_offset)  {
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_SH_BITS_OFFSET;
+            wi.data.data_offset = bits_offset;
+            wi.data.data_payload[0]=0;
+            wi.data.data_payload[1]=0;
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+    }
+
+    if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+    {
+        viddec_pm_append_pixeldata( parent );
+    }
+    else
+    {
+        viddec_pm_append_pixeldata_next( parent);
+    }
+
+    return;
+}
+
+
+void h264_parse_emit_current_pic( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+
+    const uint32_t             *pl;
+    uint32_t                   i=0,nitems=0;
+
+    h264_pic_data pic_data;
+
+    pInfo->qm_present_list=0;
+
+    h264_parse_emit_4X4_scaling_matrix(parent, pInfo);
+    h264_parse_emit_8X8_scaling_matrix(parent, pInfo);
+
+    h264_fill_pic_data(pInfo, &pic_data);
+
+    // How many payloads must be generated
+    nitems = (sizeof(h264_pic_data) + 7) / 8; // In QWORDs rounded up
+
+    pl = (const uint32_t *) &pic_data;
+
+    // Dump slice data to an array of workitems,  to do pl access non valid mem
+    for ( i = 0; i < nitems; i++ )
+    {
+        wi.vwi_type           = VIDDEC_WORKLOAD_H264_PIC_REG;
+        wi.data.data_offset   = (unsigned int)pl - (unsigned int)&pic_data; // offset within struct
+        wi.data.data_payload[0] = pl[0];
+        wi.data.data_payload[1] = pl[1];
+        pl += 2;
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+    }
+
+    return;
+}
+
+void h264_parse_emit_start_new_frame( void *parent, h264_Info *pInfo )
+{
+
+    viddec_workload_item_t     wi;
+    uint32_t                   i=0,nitems=0;
+
+    ///////////////////////// Frame attributes//////////////////////////
+
+    //Push data into current workload if first frame or frame_boundary already detected by non slice nal
+    if ( (pInfo->Is_first_frame_in_stream)||(pInfo->is_frame_boundary_detected_by_non_slice_nal))
+    {
+        viddec_workload_t			*wl_cur = viddec_pm_get_header( parent );
+        //pInfo->img.g_new_frame = 0;
+        pInfo->Is_first_frame_in_stream =0;
+        pInfo->is_frame_boundary_detected_by_non_slice_nal=0;
+        pInfo->push_to_cur = 1;
+        h264_translate_parser_info_to_frame_attributes(wl_cur, pInfo);
+    }
+    else  // move to cur if frame boundary detected by previous non slice nal, or move to next if not
+    {
+        viddec_workload_t        *wl_next = viddec_pm_get_next_header (parent);
+
+        pInfo->push_to_cur = 0;
+        h264_translate_parser_info_to_frame_attributes(wl_next, pInfo);
+
+        pInfo->is_current_workload_done=1;
+    }
+
+    ///////////////////// SPS/////////////////////
+    h264_parse_emit_sps(parent, pInfo);
+
+    /////////////////////display frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DISPLAY_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+    }
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+    /////////////////////release frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_RELEASE_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+    }
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+    /////////////////////flust frames (do not display)/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_dropped;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_REF_FRAME_DROPOUT_0 + pInfo->dpb.frame_id_need_to_be_dropped[i];
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_dropped[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+    }
+    pInfo->dpb.frame_numbers_need_to_be_dropped =0;
+
+    /////////////////////updata DPB frames/////////////////////
+    nitems = pInfo->dpb.used_size;
+    for (i=0; i<nitems; i++)
+    {
+        uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+        if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+        {
+            wi.vwi_type = VIDDEC_WORKLOAD_DPB_ACTIVE_FRAME_0+fs_id;
+            wi.ref_frame.reference_id = fs_id;
+            wi.ref_frame.luma_phys_addr = 0;
+            wi.ref_frame.chroma_phys_addr = 0;
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+    }
+
+    /////////////////////updata dpb frames info (poc)/////////////////////
+    nitems = pInfo->dpb.used_size;
+    for (i=0; i<nitems; i++)
+    {
+        uint8_t fs_id = pInfo->dpb.fs_dpb_idc[i];
+
+        if (viddec_h264_get_is_non_existent(&(pInfo->dpb.fs[fs_id])) == 0)
+        {
+            wi.vwi_type = VIDDEC_WORKLOAD_H264_DPB_FRAME_POC;
+            wi.data.data_offset = fs_id;
+            //printf("is_used = %d, tpoc = %d, bpoc = %d\n", pInfo->dpb.fs[fs_id].is_used, pInfo->dpb.fs[fs_id].top_field.poc, pInfo->dpb.fs[fs_id].bottom_field.poc);
+
+            switch (viddec_h264_get_is_used(&(pInfo->dpb.fs[fs_id])))
+            {
+            case (FRAME): {
+                wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+                wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+                break;
+            };
+
+            case (TOP_FIELD): {
+                wi.data.data_payload[0] = pInfo->dpb.fs[fs_id].top_field.poc;
+                wi.data.data_payload[1] = 0;
+                break;
+            };
+
+            case (BOTTOM_FIELD): {
+                wi.data.data_payload[0] = 0;
+                wi.data.data_payload[1] = pInfo->dpb.fs[fs_id].bottom_field.poc;
+                break;
+            };
+
+            default : {
+                wi.data.data_payload[0] = 0;
+                wi.data.data_payload[1] = 0;
+                break;
+            };
+            }
+            //cur is empty, fill new frame in cur
+            viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+        }
+    }
+
+    /////////////////////Alloc buffer for current Existing frame/////////////////////
+    if (0!=pInfo->dpb.frame_numbers_need_to_be_allocated)
+    {
+        if (pInfo->push_to_cur)
+        {
+            viddec_workload_t        *wl_cur = viddec_pm_get_header (parent);
+            wl_cur->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+        }
+        else
+        {
+            viddec_workload_t        *wl_next = viddec_pm_get_next_header (parent);
+            wl_next->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (pInfo->dpb.frame_id_need_to_be_allocated & 0x1f);
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_allocated =0;
+
+    return;
+}
+
+
+
+void h264_parse_emit_eos( void *parent, h264_Info *pInfo )
+{
+
+    uint32_t nitems=0, i=0;
+    viddec_workload_item_t	wi;
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_EOS_BEGIN_BOUNDARY;
+    wi.ref_frame.reference_id = 0;
+    wi.ref_frame.luma_phys_addr = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+
+    //cur is empty, fill new frame in cur
+    viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+
+    //// Now we can flush out all frames in DPB fro display
+
+    if (MPD_DPB_FS_NULL_IDC != pInfo->dpb.fs_dec_idc)
+    {
+        if (viddec_h264_get_is_used(&(pInfo->dpb.fs[pInfo->dpb.fs_dec_idc])) != 3)
+        {
+            h264_dpb_mark_dangling_field(&pInfo->dpb, pInfo->dpb.fs_dec_idc);  //, DANGLING_TYPE_GAP_IN_FRAME
+        }
+    }
+
+
+    h264_dpb_store_previous_picture_in_dpb(pInfo, 0,0);
+    h264_dpb_flush_dpb(pInfo, 1, 0, pInfo->active_SPS.num_ref_frames);
+
+
+    /////////////////////display frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_displayed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_EOS_DISPLAY_FRAME_0 + pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_displayed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+        //cur is empty, fill new frame in cur
+        viddec_pm_append_workitem( parent, &wi , !pInfo->push_to_cur);
+    }
+    pInfo->dpb.frame_numbers_need_to_be_displayed =0;
+
+
+    /////////////////////release frames/////////////////////
+    nitems = pInfo->dpb.frame_numbers_need_to_be_removed;
+
+    for (i=0; i<nitems; i++)
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_EOS_RELEASE_FRAME_0 + pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.reference_id = pInfo->dpb.frame_id_need_to_be_removed[i];
+        wi.ref_frame.luma_phys_addr = 0;
+        wi.ref_frame.chroma_phys_addr = 0;
+
+        if (pInfo->push_to_cur) //cur is empty, fill new frame in cur
+        {
+            viddec_pm_append_workitem( parent, &wi , false);
+            viddec_pm_set_next_frame_error_on_eos(parent, VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+        }
+        else
+        {
+            viddec_pm_append_workitem( parent, &wi , true);
+            viddec_pm_set_next_frame_error_on_eos(parent, pInfo->wl_err_next);
+        }
+    }
+    pInfo->dpb.frame_numbers_need_to_be_removed =0;
+
+    return;
+}
+
+
+
+
+
+
diff --git a/mixvbp/vbp_plugin/mp2/include/mpeg2.h b/mixvbp/vbp_plugin/mp2/include/mpeg2.h
new file mode 100755
index 0000000..4600f39
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/include/mpeg2.h
@@ -0,0 +1,195 @@
+#ifndef _MPEG2_H
+#define _MPEG2_H
+
+/**
+ * mpeg2.h
+ * -------
+ * This file contains all the necessary enumerations and structures needed from
+ * the MPEG-2 Specification.
+ */
+
+/* Max Pan-Scan offsets */
+#define MPEG2_MAX_VID_OFFSETS 3
+
+/* Quantization matrix size */
+#define MPEG2_QUANT_MAT_SIZE  64
+
+/* MPEG2 Start Code Values */
+typedef enum {
+    MPEG2_SC_PICTURE           = 0x00,
+    MPEG2_SC_SLICE_HDR         = 0x01,
+    MPEG2_SC_SLICE_MIN         = 0x01,
+    MPEG2_SC_SLICE_MAX         = 0xAF,
+    MPEG2_SC_USER_DATA         = 0xB2,
+    MPEG2_SC_SEQ_HDR           = 0xB3,
+    MPEG2_SC_SEQ_ERR           = 0xB4,
+    MPEG2_SC_EXT               = 0xB5,
+    MPEG2_SC_SEQ_END           = 0xB7,
+    MPEG2_SC_GROUP             = 0xB8,
+    MPEG2_SC_SYS_MIN           = 0xB9,
+    MPEG2_SC_SYS_MAX           = 0xFF,
+    MPEG2_SC_ALL               = 0xFF
+} mpeg2_start_codes;
+
+/* MPEG2 Extension Start Code ID */
+typedef enum {
+    MPEG2_EXT_SEQ              = 1,
+    MPEG2_EXT_SEQ_DISP         = 2,
+    MPEG2_EXT_QUANT_MAT        = 3,
+    MPEG2_EXT_COPYRIGHT        = 4,
+    MPEG2_EXT_SEQ_SCAL         = 5,
+    MPEG2_EXT_PIC_DISP         = 7,
+    MPEG2_EXT_PIC_CODING       = 8,
+    MPEG2_EXT_PIC_SPA_SCAL     = 9,
+    MPEG2_EXT_PIC_TEMP_SCAL    = 10,
+    MPEG2_EXT_ALL              = 11
+} mpeg2_ext_start_codes;
+
+/* MPEG2 Picture Coding Type Values */
+typedef enum {
+    MPEG2_PC_TYPE_FORBIDDEN    = 0,
+    MPEG2_PC_TYPE_I            = 1,
+    MPEG2_PC_TYPE_P            = 2,
+    MPEG2_PC_TYPE_B            = 3
+} mpeg2_picture_type;
+
+/* MPEG2 Picture Structure Type Values */
+typedef enum {
+    MPEG2_PIC_STRUCT_RESERVED  = 0,
+    MPEG2_PIC_STRUCT_TOP       = 1,
+    MPEG2_PIC_STRUCT_BOTTOM    = 2,
+    MPEG2_PIC_STRUCT_FRAME     = 3
+} mpeg2_picture_structure;
+
+/* MPEG2 Chroma Format Values */
+typedef enum {
+    MPEG2_CF_RESERVED   = 0,
+    MPEG2_CF_420        = 1,
+    MPEG2_CF_422        = 2,
+    MPEG2_CF_444        = 3
+} mpeg2_chroma_format;
+
+/* MPEG2 Parser Structures */
+/* Sequence Header Info */
+struct mpeg2_sequence_hdr_info
+{
+    uint32_t   horizontal_size_value;
+    uint32_t   vertical_size_value;
+    uint32_t   aspect_ratio_information;
+    uint32_t   frame_rate_code;
+    uint32_t   bit_rate_value;
+    uint32_t   vbv_buffer_size_value;
+    uint32_t   constrained_parameters_flag;
+};
+
+/* Group of Pictures Header Info */
+struct mpeg2_gop_hdr_info
+{
+    uint32_t   closed_gop;
+    uint32_t   broken_link;
+};
+
+/* Picture Header */
+struct mpeg2_picture_hdr_info
+{
+    uint32_t   temporal_reference;
+    uint32_t   picture_coding_type;
+    uint32_t   full_pel_forward_vect;
+    uint32_t   forward_f_code;
+    uint32_t   full_pel_backward_vect;
+    uint32_t   backward_f_code;
+};
+
+/* Sequence Extension Info */
+struct mpeg2_sequence_ext_info
+{
+    uint32_t   profile_and_level_indication;
+    uint32_t   progressive_sequence;
+    uint32_t   chroma_format;
+    uint32_t   horizontal_size_extension;
+    uint32_t   vertical_size_extension;
+    uint32_t   bit_rate_extension;
+    uint32_t   vbv_buffer_size_extension;
+    uint32_t   frame_rate_extension_n;
+    uint32_t   frame_rate_extension_d;
+};
+
+/* Sequence Display Extension Info */
+struct mpeg2_sequence_disp_ext_info
+{
+    uint32_t  video_format;
+    uint32_t  colour_description;
+    uint32_t  colour_primaries;
+    uint32_t  transfer_characteristics;
+    uint32_t  display_horizontal_size;
+    uint32_t  display_vertical_size;
+};
+
+/* Sequence scalable extension Info */
+struct mpeg2_sequence_scal_ext_info
+{
+    uint32_t  scalable_mode;
+};
+
+/* Picture Coding Extension */
+struct mpeg2_picture_coding_ext_info
+{
+    uint32_t fcode00;
+    uint32_t fcode01;
+    uint32_t fcode10;
+    uint32_t fcode11;
+    uint32_t intra_dc_precision;
+    uint32_t picture_structure;
+    uint32_t top_field_first;
+    uint32_t frame_pred_frame_dct;
+    uint32_t concealment_motion_vectors;
+    uint32_t q_scale_type;
+    uint32_t intra_vlc_format;
+    uint32_t alternate_scan;
+    uint32_t repeat_first_field;
+    uint32_t chroma_420_type;
+    uint32_t progressive_frame;
+    uint32_t composite_display_flag;
+};
+
+/* Picture Display Extension */
+struct mpeg2_picture_disp_ext_info
+{
+    uint32_t frame_center_horizontal_offset[MPEG2_MAX_VID_OFFSETS];
+    uint32_t frame_center_vertical_offset[MPEG2_MAX_VID_OFFSETS];
+};
+
+/* Quantization Matrix Extension */
+struct mpeg2_quant_ext_info
+{
+    uint32_t load_intra_quantiser_matrix;
+    uint32_t load_non_intra_quantiser_matrix;
+    uint32_t load_chroma_intra_quantiser_matrix;
+    uint32_t load_chroma_non_intra_quantiser_matrix;
+};
+
+/* Quantization Matrices */
+struct mpeg2_quant_matrices
+{
+    uint8_t intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+    uint8_t non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+    uint8_t chroma_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+    uint8_t chroma_non_intra_quantiser_matrix[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Info */
+struct mpeg2_info
+{
+    struct mpeg2_sequence_hdr_info         seq_hdr;
+    struct mpeg2_gop_hdr_info              gop_hdr;
+    struct mpeg2_picture_hdr_info          pic_hdr;
+    struct mpeg2_sequence_ext_info         seq_ext;
+    struct mpeg2_sequence_disp_ext_info    seq_disp_ext;
+    struct mpeg2_sequence_scal_ext_info    seq_scal_ext;
+    struct mpeg2_picture_coding_ext_info   pic_cod_ext;
+    struct mpeg2_picture_disp_ext_info     pic_disp_ext;
+    struct mpeg2_quant_ext_info            qnt_ext;
+    struct mpeg2_quant_matrices            qnt_mat;
+};
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h b/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h
new file mode 100755
index 0000000..22d6236
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/include/viddec_mpeg2.h
@@ -0,0 +1,231 @@
+#ifndef _VIDDEC_MPEG2_H
+#define _VIDDEC_MPEG2_H
+
+/**
+ * viddec_mpeg2.h
+ * --------------
+ * This header file contains all the necessary state information and function
+ * prototypes for the MPEG2 parser. This header also defines the debug macros
+ * used by the MPEG2 parser to emit debug messages in host mode.
+ */
+
+#include "viddec_fw_debug.h"
+#include "viddec_parser_ops.h"
+#include "mpeg2.h"
+
+/* Debug Print Macros */
+#define MPEG2_DEB(x...)        DEB("MPEG2_Parser: "x)
+#define MPEG2_FA_DEB(x...)     DEB("MPEG2_Frame_attribute: "x)
+
+/* Bit masks */
+#define MPEG2_BIT_MASK_11      0x7ff /* Used for masking Height and Width */
+#define MPEG2_BIT_MASK_8       0xff  /* Used fro masking start code byte */
+#define MPEG2_BIT_MASK_4       0xf   /* Used for masking Level */
+#define MPEG2_BIT_MASK_3       0x7   /* Used for masking Profile */
+
+/* MPEG2 Start code and prefix size */
+#define MPEG2_SC_AND_PREFIX_SIZE 32
+
+/* Number of DMEM Workload Items */
+#define MPEG2_NUM_DMEM_WL_ITEMS 2
+
+/* Number of Quantization Matrix Workload Items */
+#define MPEG2_NUM_QMAT_WL_ITEMS 32
+
+/* Maximum supported content size */
+#define MPEG2_MAX_CONTENT_WIDTH  2048
+#define MPEG2_MAX_CONTENT_HEIGHT 2048
+
+/* Others */
+#define MPEG2_BITS_EIGHT        8
+
+
+/* MPEG2 Stream Levels */
+typedef enum {
+    MPEG2_LEVEL_SEQ = 0,
+    MPEG2_LEVEL_GOP,
+    MPEG2_LEVEL_PIC
+} mpeg2_stream_levels;
+
+/* MPEG2 Headers and Extensions */
+typedef enum {
+    MPEG2_HEADER_NONE           = 0,
+    MPEG2_HEADER_SEQ            = 1 << 0,
+    MPEG2_HEADER_SEQ_EXT        = 1 << 1,
+    MPEG2_HEADER_SEQ_DISP_EXT   = 1 << 2,
+    MPEG2_HEADER_GOP            = 1 << 3,
+    MPEG2_HEADER_PIC            = 1 << 4,
+    MPEG2_HEADER_PIC_COD_EXT    = 1 << 5,
+    MPEG2_HEADER_PIC_DISP_EXT   = 1 << 6,
+    MPEG2_HEADER_SEQ_SCAL_EXT   = 1 << 7
+} mpeg2_headers;
+
+/* MPEG2 Parser Status Codes */
+typedef enum {
+    MPEG2_SUCCESS            = 0, /* No error */
+    MPEG2_FRAME_COMPLETE     = 1, /* Frame parsing complete found */
+    MPEG2_PARSE_ERROR        = 2, /* Failure in parsing */
+} mpeg2_status;
+
+/* MPEG2 Current Workload Status Codes */
+typedef enum {
+    MPEG2_WL_EMPTY          = 0,
+    MPEG2_WL_DMEM_DATA      = (1 << 0),
+    MPEG2_WL_REF_INFO       = (1 << 1),
+    MPEG2_WL_PARTIAL_SLICE  = (1 << 2),
+    MPEG2_WL_DANGLING_FIELD = (1 << 3),
+    MPEG2_WL_COMPLETE       = (1 << 4),
+    MPEG2_WL_MISSING_TF     = (1 << 5),
+    MPEG2_WL_MISSING_BF     = (1 << 6),
+    MPEG2_WL_UNSUPPORTED    = (1 << 7),
+    /* Error codes */
+    MPEG2_WL_CORRUPTED_SEQ_HDR      = (1 << 8),
+    MPEG2_WL_CORRUPTED_SEQ_EXT      = (1 << 9),
+    MPEG2_WL_CORRUPTED_SEQ_DISP_EXT = (1 << 10),
+    MPEG2_WL_CORRUPTED_GOP_HDR      = (1 << 11),
+    MPEG2_WL_CORRUPTED_PIC_HDR      = (1 << 12),
+    MPEG2_WL_CORRUPTED_PIC_COD_EXT  = (1 << 13),
+    MPEG2_WL_CORRUPTED_PIC_DISP_EXT = (1 << 14),
+    MPEG2_WL_CORRUPTED_QMAT_EXT     = (1 << 15),
+    /* Error concealment codes */
+    MPEG2_WL_CONCEALED_PIC_COD_TYPE = (1 << 16),
+    MPEG2_WL_CONCEALED_PIC_STRUCT   = (1 << 17),
+    MPEG2_WL_CONCEALED_CHROMA_FMT   = (1 << 18),
+    /* Type of dangling field */
+    MPEG2_WL_DANGLING_FIELD_TOP     = (1 << 24),
+    MPEG2_WL_DANGLING_FIELD_BOTTOM  = (1 << 25),
+    MPEG2_WL_REPEAT_FIELD           = (1 << 26),
+} mpeg2_wl_status_codes;
+
+/* MPEG2 Parser Workload types */
+typedef enum
+{
+    /* MPEG2 Decoder Specific data */
+    VIDDEC_WORKLOAD_MPEG2_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+
+    /* MPEG2 Quantization Matrix data */
+    VIDDEC_WORKLOAD_MPEG2_QMAT,
+
+    /* Past reference frame */
+    VIDDEC_WORKLOAD_MPEG2_REF_PAST = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+
+    /* Future reference frame */
+    VIDDEC_WORKLOAD_MPEG2_REF_FUTURE,
+
+    /* Use current frame as reference */
+    VIDDEC_WORKLOAD_MPEG2_REF_CURRENT_FRAME,
+
+    /* User Data */
+    VIDDEC_WORKLOAD_MPEG2_USERDATA = VIDDEC_WORKLOAD_USERDATA
+} viddec_mpeg2_workloads;
+
+/* MPEG2 Decoder Specific Workitems */
+struct mpeg2_workitems
+{
+    /* Core Sequence Info 1 */
+    uint32_t csi1;
+
+    /* Core Sequence Info 2 */
+    uint32_t csi2;
+
+    /* Core Picture Info 1 */
+    uint32_t cpi1;
+
+    /* Core Picture Coding Extension Info 1 */
+    uint32_t cpce1;
+
+    /* Quantization Matrices */
+    /*  0-15: Intra Quantization Matrix */
+    /* 16-31: Non-Intra Quantization Matrix */
+    /* 32-47: Chroma Intra Quantization Matrix */
+    /* 48-63: Chroma Non-Intra Quantization Matrix */
+    uint32_t qmat[MPEG2_QUANT_MAT_SIZE];
+};
+
+/* MPEG2 Video Parser Context */
+struct viddec_mpeg2_parser
+{
+    /* MPEG2 Metadata Structure */
+    struct mpeg2_info info;
+
+    /* MPEG2 Workitems */
+    struct mpeg2_workitems wi;
+
+    /* Workload Status */
+    uint32_t  mpeg2_wl_status;
+
+    /* Last parsed start code */
+    int32_t   mpeg2_last_parsed_sc;
+
+    /* Last parsed slice start code. Used to start emitting workload items. */
+    int32_t   mpeg2_last_parsed_slice_sc;
+
+    /* Current sequence headers parsed */
+    uint8_t   mpeg2_curr_seq_headers;
+
+    /* Current frame headers parsed */
+    uint8_t   mpeg2_curr_frame_headers;
+
+    /* Flag to indicate a valid sequence header was successfully parsed for */
+    /* the current stream. */
+    uint8_t   mpeg2_valid_seq_hdr_parsed;
+
+    /* Flag to indicate if quantization matrices are updated */
+    uint8_t   mpeg2_custom_qmat_parsed;
+
+    /* Flag to indicate if reference table is updated with an entry */
+    uint8_t   mpeg2_ref_table_updated;
+
+    /* Flag to indicate if the stream is MPEG2 */
+    uint8_t   mpeg2_stream;
+
+    /* Flag to indicate if the previous picture metadata is parsed */
+    uint8_t   mpeg2_pic_metadata_complete;
+
+    /* Number of active pan scan offsets */
+    uint8_t   mpeg2_num_pan_scan_offsets;
+
+    /* Indicates the current stream level (Sequence/GOP/Picture) */
+    /* Used for identifying the level for User Data */
+    uint8_t   mpeg2_stream_level;
+
+    /* Flag to indicate if the current picture is interlaced or not */
+    uint8_t   mpeg2_picture_interlaced;
+
+    /* Flag to indicate if the current field for interlaced picture is first */
+    /* field or not. This flag is used only when mpeg2_picture_interlaced is */
+    /* set to 1. */
+    uint8_t   mpeg2_first_field;
+
+    /* Flag to indicate if the current parsed data has start of a frame */
+    uint8_t   mpeg2_frame_start;
+
+    /* Temporal reference of the previous picture - Used to detect dangling fields */
+    uint32_t  mpeg2_prev_temp_ref;
+
+    /* Previous picture structure - Used to identify the type of missing field */
+    uint8_t   mpeg2_prev_picture_structure;
+
+    /* Flag to decide whether to use the current or next workload to dump workitems */
+    uint8_t   mpeg2_use_next_workload;
+    uint8_t   mpeg2_first_slice_flag;
+};
+
+/* External Function Declarations */
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* MPEG2 Parser Function Prototypes */
+void     viddec_mpeg2_translate_attr            (void *parent, void *ctxt);
+void     viddec_mpeg2_emit_workload             (void *parent, void *ctxt);
+void     viddec_mpeg2_parse_seq_hdr             (void *parent, void *ctxt);
+void     viddec_mpeg2_parse_gop_hdr             (void *parent, void *ctxt);
+void     viddec_mpeg2_parse_pic_hdr             (void *parent, void *ctxt);
+void     viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt);
+void     viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt);
+void     viddec_mpeg2_parse_ext                 (void *parent, void *ctxt);
+
+/* MPEG2 wrapper functions for workload operations */
+void    viddec_mpeg2_append_workitem        (void *parent, viddec_workload_item_t *wi, uint8_t flag);
+void    viddec_mpeg2_append_pixeldata       (void *parent, uint8_t flag);
+viddec_workload_t*  viddec_mpeg2_get_header (void *parent, uint8_t flag);
+#endif
diff --git a/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c b/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c
new file mode 100755
index 0000000..0394ec8
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/mix_vbp_mpeg2_stubs.c
@@ -0,0 +1,32 @@
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t flag)
+{
+    return;
+}
+
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+    return;
+}
+
+void viddec_mpeg2_append_pixeldata(void *parent, uint8_t flag)
+{
+    return;
+}
+
+viddec_workload_t*  viddec_mpeg2_get_header (void *parent, uint8_t flag)
+{
+    viddec_workload_t *ret;
+    if (flag)
+    {
+        ret = viddec_pm_get_next_header(parent);
+    }
+    else
+    {
+        ret = viddec_pm_get_header(parent);
+    }
+    return ret;
+}
diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c
new file mode 100755
index 0000000..310f986
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_frame_attr.c
@@ -0,0 +1,121 @@
+/**
+ * viddec_mpeg2_frame_attr.c
+ * -------------------------
+ * This is a helper file for viddec_mpeg2_workload.c to translate the data
+ * stored in the parser context into frame attributes in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_print_attr() - Prints collected frame attributes             */
+static inline void viddec_mpeg2_print_attr(viddec_frame_attributes_t *attr)
+{
+    unsigned int index = 0;
+
+    MPEG2_FA_DEB("Content_Size=%dx%d\n",        attr->cont_size.width,
+                 attr->cont_size.height);
+    MPEG2_FA_DEB("Repeat=%d\n",                 attr->mpeg2.repeat_first_field);
+    MPEG2_FA_DEB("Frame_Type=%d\n",             attr->frame_type);
+    MPEG2_FA_DEB("Temporal_Reference=%d\n",     attr->mpeg2.temporal_ref);
+    MPEG2_FA_DEB("Top_Field_First=%d\n",        attr->mpeg2.top_field_first);
+    MPEG2_FA_DEB("Progressive_Frame=%d\n",      attr->mpeg2.progressive_frame);
+    MPEG2_FA_DEB("Picture_Struct=%d\n",         attr->mpeg2.picture_struct);
+    MPEG2_FA_DEB("Pan_Scan_Offsets=%d\n",       attr->mpeg2.number_of_frame_center_offsets);
+
+    for (index = 0; index < attr->mpeg2.number_of_frame_center_offsets; index++)
+    {
+        MPEG2_FA_DEB("\tPan_Scan_Offset_%d= %dx%d\n", index,
+                     attr->mpeg2.frame_center_offset[index].horz,
+                     attr->mpeg2.frame_center_offset[index].vert);
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_set_default_values() - Resets attributes that are optional   */
+/* in the bitstream to their default values.                                 */
+static inline void viddec_mpeg2_set_default_values(viddec_frame_attributes_t *attrs)
+{
+    unsigned int index = 0;
+
+    attrs->mpeg2.number_of_frame_center_offsets = 0;
+    for (index = 0; index < MPEG2_MAX_VID_OFFSETS ; index++)
+    {
+        attrs->mpeg2.frame_center_offset[index].horz = 0;
+        attrs->mpeg2.frame_center_offset[index].vert = 0;
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_translate_attr() - Translates metadata parsed into frame     */
+/* attributes in the workload                                                */
+void viddec_mpeg2_translate_attr(void *parent, void *ctxt)
+{
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get workload */
+    viddec_workload_t *wl = viddec_pm_get_header( parent );
+
+    /* Get attributes in workload */
+    viddec_frame_attributes_t *attrs = &wl->attrs;
+
+    /* Get the default values for optional attributes */
+    viddec_mpeg2_set_default_values(attrs);
+
+    /* Populate attributes from parser context */
+    /* Content Size */
+    attrs->cont_size.height         = ((parser->info.seq_ext.vertical_size_extension << 12)
+                                       | parser->info.seq_hdr.vertical_size_value);
+    attrs->cont_size.width          = ((parser->info.seq_ext.horizontal_size_extension << 12)
+                                       | parser->info.seq_hdr.horizontal_size_value);
+
+    /* Repeat field */
+    attrs->mpeg2.repeat_first_field = parser->info.pic_cod_ext.repeat_first_field;
+
+    /* Temporal Reference */
+    attrs->mpeg2.temporal_ref       = parser->info.pic_hdr.temporal_reference;
+
+    /* Top field first */
+    attrs->mpeg2.top_field_first    = parser->info.pic_cod_ext.top_field_first;
+
+    /* Progressive frame */
+    attrs->mpeg2.progressive_frame  = parser->info.pic_cod_ext.progressive_frame;
+
+    /* Picture Structure */
+    attrs->mpeg2.picture_struct     = parser->info.pic_cod_ext.picture_structure;
+
+    /* Populate the frame type */
+    switch (parser->info.pic_hdr.picture_coding_type)
+    {
+    case MPEG2_PC_TYPE_I:
+        attrs->frame_type = VIDDEC_FRAME_TYPE_I;
+        break;
+    case MPEG2_PC_TYPE_P:
+        attrs->frame_type = VIDDEC_FRAME_TYPE_P;
+        break;
+    case MPEG2_PC_TYPE_B:
+        attrs->frame_type = VIDDEC_FRAME_TYPE_B;
+        break;
+    default:
+        attrs->frame_type = VIDDEC_FRAME_TYPE_INVALID;
+    }
+
+    /* Update PanScan data */
+    if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_DISP_EXT)
+    {
+        unsigned int index = 0;
+        attrs->mpeg2.number_of_frame_center_offsets = parser->mpeg2_num_pan_scan_offsets;
+        for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+        {
+            attrs->mpeg2.frame_center_offset[index].horz = parser->info.pic_disp_ext.frame_center_horizontal_offset[index];
+            attrs->mpeg2.frame_center_offset[index].vert = parser->info.pic_disp_ext.frame_center_vertical_offset[index];
+        }
+    }
+
+    /* Print frame attributes */
+    viddec_mpeg2_print_attr(attrs);
+
+    return;
+}
diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c
new file mode 100755
index 0000000..037d6de
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_metadata.c
@@ -0,0 +1,1039 @@
+/**
+ * viddec_mpeg2_metadata.c
+ * -----------------------
+ * This file contains all the routines to parse the information from MPEG2
+ * elementary stream and store it in the parser context. Based on the data
+ * parsed, the state information in the context is updated.
+ *
+ * Headers currently parsed from MPEG2 stream include:
+ * - Sequence Header
+ * - Sequence Extension
+ * - Sequence Display Extension
+ * - GOP Header
+ * - Picture Header
+ * - Picture Coding Extension
+ * - Quantization Matrix Extension
+ * - Picture Display Extension
+ *
+ * The slice data is parsed and appended into workload in viddec_mpeg2_parse.c
+ */
+
+#include "viddec_mpeg2.h"
+
+/* Default quantization matrix values */
+const uint8_t mpeg2_default_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+    8, 16, 19, 22, 26, 27, 29, 34,
+    16, 16, 22, 24, 27, 29, 34, 37,
+    19, 22, 26, 27, 29, 34, 34, 38,
+    22, 22, 26, 27, 29, 34, 37, 40,
+    22, 26, 27, 29, 32, 35, 40, 48,
+    26, 27, 29, 32, 35, 40, 48, 58,
+    26, 27, 29, 34, 38, 46, 56, 69,
+    27, 29, 35, 38, 46, 56, 69, 83
+};
+const uint8_t mpeg2_default_non_intra_quant_matrix[MPEG2_QUANT_MAT_SIZE] = {
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16,
+    16, 16, 16, 16, 16, 16, 16, 16
+};
+
+/* Matrix for converting scan order */
+const uint8_t mpeg2_classic_scan[MPEG2_QUANT_MAT_SIZE] = {
+    0,  1,  8, 16,  9,  2,  3, 10,
+    17, 24, 32, 25, 18, 11,  4,  5,
+    12, 19, 26, 33, 40, 48, 41, 34,
+    27, 20, 13,  6,  7, 14, 21, 28,
+    35, 42, 49, 56, 57, 50, 43, 36,
+    29, 22, 15, 23, 30, 37, 44, 51,
+    58, 59, 52, 45, 38, 31, 39, 46,
+    53, 60, 61, 54, 47, 55, 62, 63
+};
+const uint8_t mpeg2_alternate_scan[MPEG2_QUANT_MAT_SIZE] = {
+    0,  8, 16, 24,  1,  9,  2, 10,
+    17, 25, 32, 40, 48, 56, 57, 49,
+    41, 33, 26, 18,  3, 11,  4, 12,
+    19, 27, 34, 42, 50, 58, 35, 43,
+    51, 59, 20, 28,  5, 13,  6, 14,
+    21, 29, 36, 44, 52, 60, 37, 45,
+    53, 61, 22, 30,  7, 15, 23, 31,
+    38, 46, 54, 62, 39, 47, 55, 63
+};
+
+/* Look-up tables for macro block address increment VLC */
+const uint8_t mb_addr_inc_tab1[16] = {
+    0, 0, 7, 6, 5, 5, 4, 4,
+    3, 3, 3, 3, 2, 2, 2, 2
+};
+const uint8_t mb_addr_inc_tab2[8] = {
+    13, 12, 11, 10, 9, 9, 8, 8
+};
+const uint8_t mb_addr_inc_tab3[40] = {
+    33, 32, 31, 30, 29, 28, 27, 26,
+    25, 24, 23, 22, 21, 21, 20, 20,
+    19, 19, 18, 18, 17, 17, 16, 16,
+    15, 15, 15, 15, 15, 15, 15, 15,
+    14, 14, 14, 14, 14, 14, 14, 14
+};
+
+/* viddec_mpeg2_copy_default_matrix() - Copies quantization matrix from src  */
+/* to dst                                                                    */
+static inline void mpeg2_copy_matrix(const uint8_t *src, uint8_t *dst)
+{
+    register uint32_t index = 0;
+    for (index=0; index < MPEG2_QUANT_MAT_SIZE; index++)
+        dst[index] = src[index];
+}
+
+/* viddec_mpeg2_copy_matrix() - Copies next 64bytes in the stream into given */
+/* matrix                                                                    */
+static inline int32_t mpeg2_get_quant_matrix(void *parent, uint8_t *matrix, uint32_t alternate_scan)
+{
+    int32_t ret = 1;
+    uint32_t index = 0, code = 0;
+    const uint8_t  *zigzag_scan = (const uint8_t *) mpeg2_classic_scan;
+
+    if (alternate_scan)
+    {
+        zigzag_scan = (const uint8_t *) mpeg2_alternate_scan;
+    }
+
+    /* Start extracting matrix co-efficients and copy them in */
+    /* inverse zigzag scan order */
+    for (index = 0; index < MPEG2_QUANT_MAT_SIZE; index++)
+    {
+        ret = viddec_pm_get_bits(parent, &code, MPEG2_BITS_EIGHT);
+        /* Quantization values cannot be zero. If zero value if found, */
+        /* further parsing is stopped and the existing values are used.*/
+        if ((ret != 1) || (code == 0))
+        {
+            ret = -1;
+            break;
+        }
+        matrix[zigzag_scan[index]] = (uint8_t)(code & 0xFF);
+    }
+
+    return ret;
+}
+
+/* viddec_mpeg2_parse_seq_hdr() - Parse sequence header metadata and store   */
+/* in parser context                                                         */
+void viddec_mpeg2_parse_seq_hdr(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get Horizontal Frame Size */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.horizontal_size_value, 12);
+
+    /* Get Vertical Frame Size */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vertical_size_value, 12);
+
+    /* Get Frame Aspect Ratio */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.aspect_ratio_information, 4);
+
+    /* Get Frame Rate */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.frame_rate_code, 4);
+
+    /* Get Bit Rate */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.bit_rate_value, 18);
+
+    /* Skip Marker bit */
+    ret_code |= viddec_pm_skip_bits(parent, 1);
+
+    /* Get VBV Buffer Size Value */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.vbv_buffer_size_value, 10);
+
+    /* Get Constrained Parameters Flag */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_hdr.constrained_parameters_flag, 1);
+
+    /* Quantization Matrix Support */
+    /* Get Intra Quantizer matrix, if available or use default values */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.intra_quantiser_matrix, 0);
+        mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+    }
+    else
+    {
+        if (!parser->mpeg2_custom_qmat_parsed)
+        {
+            mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.intra_quantiser_matrix);
+            mpeg2_copy_matrix(mpeg2_default_intra_quant_matrix, parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+        }
+    }
+
+    /* Get Non-Intra Qualtizer matrix, if available or use default values */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent, parser->info.qnt_mat.non_intra_quantiser_matrix, 0);
+        mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+    }
+    else
+    {
+        if (!parser->mpeg2_custom_qmat_parsed)
+        {
+            mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.non_intra_quantiser_matrix);
+            mpeg2_copy_matrix(mpeg2_default_non_intra_quant_matrix, parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+        }
+    }
+
+    /* Error handling */
+    /* The return value from get_bits() function is accumulated. If the return value is not 1, */
+    /* then there was an error getting the required information from the stream and the status */
+    /* is updated for the current workload. */
+    if (ret_code == 1)
+    {
+        /* This flag indicates a valid sequence header has been parsed and so even if */
+        /* a sequence haeder is corrupted in the future, this valid sequence header   */
+        /* could be reused. */
+        parser->mpeg2_valid_seq_hdr_parsed = true;
+        /* This flag indicates a valid custom quantization matrix has been parsed.  */
+        /* So, if in the future, there is an error parsing quantization matrix, the */
+        /* parser will use the previously parsed custom values. */
+        if ((parser->info.qnt_ext.load_intra_quantiser_matrix)
+                || (parser->info.qnt_ext.load_non_intra_quantiser_matrix))
+        {
+            parser->mpeg2_custom_qmat_parsed = true;
+        }
+        MPEG2_DEB("Seqeunce header parsed successfully.\n");
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_HDR;
+        MPEG2_DEB("Sequence header corrupted.\n");
+    }
+
+    parser->mpeg2_stream               = false;
+    parser->mpeg2_curr_seq_headers    |= MPEG2_HEADER_SEQ;
+    parser->mpeg2_curr_frame_headers  |= MPEG2_HEADER_SEQ;
+    parser->mpeg2_stream_level         = MPEG2_LEVEL_SEQ;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_gop_hdr() - Parse group of pictures header info and    */
+/* store it in parser context                                                */
+void viddec_mpeg2_parse_gop_hdr(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Skip first 25 bits */
+    /* Skip time_code */
+    ret_code |= viddec_pm_skip_bits(parent, 25);
+
+    /* Get closed gop info */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.closed_gop, 1);
+
+    /* Get broken link info */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.gop_hdr.broken_link, 1);
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("GOP Header parsed successfully.\n");
+    }
+    else
+    {
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_GOP_HDR;
+        MPEG2_DEB("GOP header corrupted.\n");
+    }
+
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_GOP;
+    parser->mpeg2_stream_level        = MPEG2_LEVEL_GOP;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_pic_hdr() - Parse picture header info and store it in  */
+/* parser context                                                            */
+void viddec_mpeg2_parse_pic_hdr(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0, found_error = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get Temporal Reference info */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.temporal_reference, 10);
+
+    /* Get Picture Coding type and skip the following byte */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.picture_coding_type, 3);
+
+    /* Error Handling and Concealment */
+    /* Picture coding type should be one I, P or B */
+    if ((parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I) &&
+            (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_P) &&
+            (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_B))
+    {
+        found_error = 1;
+    }
+    /* The first frame after a gop header should be a coded I picture as per */
+    /* section 6.3.1 in MPEG2 Specification. */
+    else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+    {
+        if (parser->info.pic_hdr.picture_coding_type != MPEG2_PC_TYPE_I)
+        {
+            found_error = 1;
+        }
+    }
+    /* The first frame after a sequence header cannot be a coded B picture as per */
+    /* section 6.1.1.6 in MPEG2 Specification. */
+    else if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+    {
+        if (parser->info.pic_hdr.picture_coding_type == MPEG2_PC_TYPE_B)
+        {
+            found_error = 1;
+        }
+    }
+
+    /* If there is an error parsing picture coding type, do error concealment and continue. */
+    if ((ret_code != 1) || (found_error))
+    {
+        if (found_error)
+        {
+            /* Setting status to mark parser error while emitting the current workload. */
+            parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+            MPEG2_DEB("Picture header corrupted.\n");
+        }
+
+        /* Error concealment for picture coding type - Default to I picture. */
+        parser->info.pic_hdr.picture_coding_type = MPEG2_PC_TYPE_I;
+        parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_COD_TYPE;
+        MPEG2_DEB("Picture Coding Type corrupted. Concealing to I type.\n");
+    }
+
+    /* Skip next 16 bits */
+    /* Skip vbv_delay */
+    ret_code |= viddec_pm_skip_bits(parent, 16);
+
+    /* If Picture Coding type is either P or B then */
+    /* Get forward vector code */
+    if ((MPEG2_PC_TYPE_P == parser->info.pic_hdr.picture_coding_type) ||
+            (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type))
+    {
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_forward_vect, 1);
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.forward_f_code, 3);
+    }
+    else
+    {
+        parser->info.pic_hdr.full_pel_forward_vect = 0;
+        parser->info.pic_hdr.forward_f_code        = 0;
+    }
+
+    /* If Picture coding type is B then */
+    /*    Get backward vector code */
+    if (MPEG2_PC_TYPE_B == parser->info.pic_hdr.picture_coding_type)
+    {
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.full_pel_backward_vect, 1);
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_hdr.backward_f_code, 3);
+    }
+    else
+    {
+        parser->info.pic_hdr.full_pel_backward_vect = 0;
+        parser->info.pic_hdr.backward_f_code        = 0;
+    }
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Picture header parsed successfully.\n")
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_HDR;
+        MPEG2_DEB("Picture header corrupted.\n");
+    }
+
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC;
+    parser->mpeg2_stream_level        = MPEG2_LEVEL_PIC;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_seq() - Parse Sequence extension metadata and      */
+/* store in parser context                                                   */
+void viddec_mpeg2_parse_ext_seq(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get Profile and Level info */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.profile_and_level_indication, 8);
+
+    /* Get Progressive Sequence Flag */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.progressive_sequence, 1);
+
+    /* Get Chroma Format */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.chroma_format, 2);
+
+    /* Error Concealment */
+    /* If there is an error parsing chroma format, do error concealment and continue. */
+    if ((ret_code != 1) || (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED))
+    {
+        if (parser->info.seq_ext.chroma_format == MPEG2_CF_RESERVED)
+        {
+            /* Setting status to mark parser error while emitting the current workload. */
+            parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+            MPEG2_DEB("Sequence extension corrupted.\n")
+        }
+
+        /* Error concealment for chroma format - Default to 4:2:0 */
+        parser->info.seq_ext.chroma_format = MPEG2_CF_420;
+        parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_CHROMA_FMT;
+        MPEG2_DEB("Chroma Format corrupted. Concealing to 4:2:0.\n");
+    }
+
+    /* Get Content Size Extension Data */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.horizontal_size_extension, 2);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vertical_size_extension, 2);
+
+    /* Get Bit Rate Extension */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.bit_rate_extension, 12);
+
+    /* Skip Marker bit */
+    ret_code |= viddec_pm_skip_bits(parent, 1);
+
+    /* Get VBV Buffer Size Extension Data */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.vbv_buffer_size_extension, 8);
+
+    /* Skip 1 bit */
+    /* Skip low_delay */
+    ret_code |= viddec_pm_skip_bits(parent, 1);
+
+    /* Get Frame Rate extension data */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_n, 2);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_ext.frame_rate_extension_d, 5);
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Sequence extension header parsed successfully.\n")
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_EXT;
+        MPEG2_DEB("Sequence extension corrupted.\n")
+    }
+
+    /* Check if the last parsed start code was that of sequence header. */
+    /* If true, seq extension followed seq header => MPEG2 Stream */
+    parser->mpeg2_stream = (parser->mpeg2_last_parsed_sc == MPEG2_SC_SEQ_HDR) ? true:false;
+    parser->mpeg2_curr_seq_headers   |= MPEG2_HEADER_SEQ_EXT;
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_EXT;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_disp() - Parse Sequence Display extension      */
+/* metadata and store in parser context                                      */
+void viddec_mpeg2_parse_ext_seq_disp(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get video format */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.video_format, 3);
+
+    /* Check if color description info is present */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_description, 1);
+
+    /* If color description is found, get color primaries info */
+    /* and transfer characteristics */
+    if (parser->info.seq_disp_ext.colour_description)
+    {
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.colour_primaries, 8);
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.transfer_characteristics, 8);
+        ret_code |= viddec_pm_skip_bits(parent, 8);
+    }
+
+    /* Get Display Horizontal Size */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_horizontal_size, 14);
+    ret_code |= viddec_pm_skip_bits(parent, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_disp_ext.display_vertical_size, 14);
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Sequence display extension parsed successfully.\n");
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_SEQ_DISP_EXT;
+        MPEG2_DEB("Sequence display extension corrupted.\n")
+    }
+
+    /* Set flag to indicate Sequence Display Extension is present */
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_DISP_EXT;
+    parser->mpeg2_curr_seq_headers   |= MPEG2_HEADER_SEQ_DISP_EXT;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_seq_scal() - Parse Sequence Scalable extension     */
+/* metadata and store in parser context                                      */
+void viddec_mpeg2_parse_ext_seq_scal(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get video format */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.seq_scal_ext.scalable_mode, 2);
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Sequence scalable extension parsed successfully.\n");
+    }
+
+    /* Set flag to indicate Sequence Display Extension is present */
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_SEQ_SCAL_EXT;
+    parser->mpeg2_curr_seq_headers   |= MPEG2_HEADER_SEQ_SCAL_EXT;
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_pic() - Parse Picture Coding extension             */
+/* metadata and store in parser context                                      */
+void viddec_mpeg2_parse_ext_pic(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0, found_error = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get Forward/Backward, Horizontal/Vertical codes */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode00, 4);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode01, 4);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode10, 4);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.fcode11, 4);
+
+    /* Get Intra DC Precision */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_dc_precision, 2);
+
+    /* Get Picture Structure */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.picture_structure,  2);
+
+    /* Error Handling and Concealment */
+    /* Picture structure should be frame, top field or bottom field */
+    if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_RESERVED)
+    {
+        found_error = 1;
+    }
+    /* All pictures in progressive sequence should be frame picture */
+    else if (parser->info.seq_ext.progressive_sequence)
+    {
+        if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+        {
+            found_error = 1;
+        }
+    }
+
+    /* If there is an error parsing picture structure, do error concealment and continue. */
+    if ((ret_code != 1) || (found_error))
+    {
+        if (found_error)
+        {
+            /* Setting status to mark parser error while emitting the current workload. */
+            parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+            MPEG2_DEB("Picture coding extension corrupted.\n");
+        }
+
+        /* Error concealment for picture structure - Default to frame picture. */
+        parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+        parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+        MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+    }
+
+    /* Get flags */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.top_field_first, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.frame_pred_frame_dct, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.concealment_motion_vectors, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.q_scale_type, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.intra_vlc_format, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.alternate_scan, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.repeat_first_field, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.chroma_420_type, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.progressive_frame, 1);
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_cod_ext.composite_display_flag, 1);
+
+    /* Error concealment for frame picture */
+    if ((parser->info.pic_cod_ext.top_field_first)
+            || (parser->info.pic_cod_ext.frame_pred_frame_dct)
+            || (parser->info.pic_cod_ext.repeat_first_field)
+            || (parser->info.pic_cod_ext.progressive_frame))
+    {
+        if (parser->info.pic_cod_ext.picture_structure != MPEG2_PIC_STRUCT_FRAME)
+        {
+            parser->info.pic_cod_ext.picture_structure = MPEG2_PIC_STRUCT_FRAME;
+            parser->mpeg2_wl_status |= MPEG2_WL_CONCEALED_PIC_STRUCT;
+            MPEG2_DEB("Picture Structure corrupted. Concealing to Frame picture.\n");
+        }
+    }
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Picture coding extension parsed successfully.\n");
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_COD_EXT;
+        MPEG2_DEB("Picture coding extension corrupted.\n");
+    }
+
+    /* Dangling field detection */
+    /* If the previous picture is the first field, then the temporal reference number */
+    /* should match with the second field. Otherwise, one of the fields in the previous */
+    /* picture is missing and dangling field error is marked. The workload containing */
+    /* the previous picture is emitted out and current picture data is added to the next */
+    /* workload. The mpeg2_use_next_workload variable is used as a flag to direct the */
+    /* items into the current/next workload. */
+    if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+    {
+        if (parser->mpeg2_prev_temp_ref != parser->info.pic_hdr.temporal_reference)
+        {
+            /* Mark dangling field info in workload status */
+            parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD;
+            if (parser->mpeg2_prev_picture_structure == MPEG2_PIC_STRUCT_BOTTOM)
+            {
+                parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_TOP;
+            }
+            else
+            {
+                parser->mpeg2_wl_status |= MPEG2_WL_DANGLING_FIELD_BOTTOM;
+            }
+            /* Set flag stating current workload is done */
+            parser->mpeg2_pic_metadata_complete = true;
+            /* Set flag to use the next workload for adding workitems for */
+            /* the current frame */
+            parser->mpeg2_use_next_workload = true;
+            /* Toggle first field flag to compensate for missing field */
+            parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+        }
+        else
+        {
+            /* Same field repeated */
+            if (parser->mpeg2_prev_picture_structure == parser->info.pic_cod_ext.picture_structure)
+            {
+                /* Mark unsupported in workload status */
+                parser->mpeg2_wl_status |= MPEG2_WL_REPEAT_FIELD;
+            }
+        }
+    }
+
+    /* Set context variables for interlaced picture handling */
+    if (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_FRAME)
+    {
+        /* Frame picture found. Reset variables used for interlaced fields picture. */
+        parser->mpeg2_picture_interlaced = false;
+        parser->mpeg2_first_field        = false;
+        parser->mpeg2_use_next_workload  = false;
+    }
+    else
+    {
+        /* Interlaced fields picture found. */
+        parser->mpeg2_picture_interlaced = true;
+        parser->mpeg2_first_field = (parser->mpeg2_first_field) ? false : true;
+    }
+
+    /* Set flags */
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_COD_EXT;
+    parser->mpeg2_prev_temp_ref = parser->info.pic_hdr.temporal_reference;
+    parser->mpeg2_prev_picture_structure = parser->info.pic_cod_ext.picture_structure;
+    if ((!parser->mpeg2_picture_interlaced)
+            || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+    {
+        parser->mpeg2_frame_start = true;
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_pic_disp() - Parse Picture Display extension       */
+/* metadata and store in parser context                                      */
+void viddec_mpeg2_parse_ext_pic_disp(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+    uint32_t index = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Determine number of offsets */
+    if (parser->info.seq_ext.progressive_sequence)
+    {
+        if (parser->info.pic_cod_ext.repeat_first_field)
+        {
+            parser->mpeg2_num_pan_scan_offsets =
+                (parser->info.pic_cod_ext.top_field_first) ? 3 : 2;
+        }
+        else /* Not repeat field */
+            parser->mpeg2_num_pan_scan_offsets = 1;
+    }
+    else /* Not progressive sequence */
+    {
+        /* Check if picture structure is a field */
+        if ((parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_TOP) ||
+                (parser->info.pic_cod_ext.picture_structure == MPEG2_PIC_STRUCT_BOTTOM))
+        {
+            parser->mpeg2_num_pan_scan_offsets = 1;
+        }
+        else
+        {
+            parser->mpeg2_num_pan_scan_offsets =
+                (parser->info.pic_cod_ext.repeat_first_field) ? 3 : 2;
+        }
+    }
+
+    /* Get the offsets */
+    for (index = 0; index < parser->mpeg2_num_pan_scan_offsets; index++)
+    {
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_horizontal_offset[index], 16);
+        ret_code |= viddec_pm_skip_bits(parent, 1);
+        ret_code |= viddec_pm_get_bits(parent, &parser->info.pic_disp_ext.frame_center_vertical_offset[index], 16);
+        ret_code |= viddec_pm_skip_bits(parent, 1);
+    }
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Picture display extension parsed successfully.\n");
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_PIC_DISP_EXT;
+        MPEG2_DEB("Picture display extension corrupted.\n");
+    }
+
+    /* Set flag to indicate picture display extension is found */
+    parser->mpeg2_curr_frame_headers |= MPEG2_HEADER_PIC_DISP_EXT;
+    return;
+}
+
+/* viddec_mpeg2_parse_ext_quant() - Parse Quantization Matrix extension      */
+/* metadata and store in parser context                                      */
+void viddec_mpeg2_parse_ext_quant(void *parent, void *ctxt)
+{
+    int32_t ret_code = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Quantization Matrix Support */
+    /* Get Intra Quantizer matrix, if available or use default values */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent,
+                                           parser->info.qnt_mat.intra_quantiser_matrix,
+                                           parser->info.pic_cod_ext.alternate_scan);
+        mpeg2_copy_matrix(parser->info.qnt_mat.intra_quantiser_matrix,
+                          parser->info.qnt_mat.chroma_intra_quantiser_matrix);
+    }
+
+    /* Get Non-Intra Qualtizer matrix, if available */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_non_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_non_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent,
+                                           parser->info.qnt_mat.non_intra_quantiser_matrix,
+                                           parser->info.pic_cod_ext.alternate_scan);
+        mpeg2_copy_matrix(parser->info.qnt_mat.non_intra_quantiser_matrix,
+                          parser->info.qnt_mat.chroma_non_intra_quantiser_matrix);
+    }
+
+    /* Get Chroma Intra Quantizer matrix, if available */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent,
+                                           parser->info.qnt_mat.chroma_intra_quantiser_matrix,
+                                           parser->info.pic_cod_ext.alternate_scan);
+    }
+
+    /* Get Chroma Non-Intra Quantizer matrix, if available */
+    ret_code |= viddec_pm_get_bits(parent, &parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix, 1);
+    if (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix)
+    {
+        ret_code |= mpeg2_get_quant_matrix(parent,
+                                           parser->info.qnt_mat.chroma_non_intra_quantiser_matrix,
+                                           parser->info.pic_cod_ext.alternate_scan);
+    }
+
+    if (ret_code == 1)
+    {
+        MPEG2_DEB("Quantization matrix extension parsed successfully.\n");
+    }
+    else
+    {
+        /* Setting status to mark parser error while emitting the current workload. */
+        parser->mpeg2_wl_status |= MPEG2_WL_CORRUPTED_QMAT_EXT;
+        MPEG2_DEB("Quantization matrix extension corrupted.\n");
+    }
+
+    /* Set quantization matrices updated flag */
+    if ( (parser->info.qnt_ext.load_intra_quantiser_matrix) ||
+            (parser->info.qnt_ext.load_non_intra_quantiser_matrix) ||
+            (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) ||
+            (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) )
+    {
+        MPEG2_DEB("Custom quantization matrix found.\n");
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse extension metadata and store in parser   */
+/* context                                                                   */
+void viddec_mpeg2_parse_ext(void *parent, void *ctxt)
+{
+    uint32_t ext_code  = 0;
+
+    /* Get extension start code */
+    viddec_pm_get_bits(parent, &ext_code, 4);
+
+    /* Switch on extension type */
+    switch ( ext_code )
+    {
+        /* Sequence Extension Info */
+    case MPEG2_EXT_SEQ:
+        viddec_mpeg2_parse_ext_seq(parent, ctxt);
+        break;
+
+        /* Sequence Display Extension info */
+    case MPEG2_EXT_SEQ_DISP:
+        viddec_mpeg2_parse_ext_seq_disp(parent, ctxt);
+        break;
+
+    case MPEG2_EXT_SEQ_SCAL:
+        viddec_mpeg2_parse_ext_seq_scal(parent, ctxt);
+        break;
+
+        /* Picture Coding Extension */
+    case MPEG2_EXT_PIC_CODING:
+        viddec_mpeg2_parse_ext_pic(parent, ctxt);
+        break;
+
+        /* Picture Display Extension */
+    case MPEG2_EXT_PIC_DISP:
+        viddec_mpeg2_parse_ext_pic_disp(parent, ctxt);
+        break;
+
+        /*  Quantization Extension*/
+    case MPEG2_EXT_QUANT_MAT:
+        viddec_mpeg2_parse_ext_quant(parent, ctxt);
+        break;
+
+    default:
+        break;
+    } /* Switch, on extension type */
+
+    return;
+}
+
+/* viddec_mpeg2_parse_ext() - Parse user data and append to workload.        */
+void viddec_mpeg2_parse_and_append_user_data(void *parent, void *ctxt)
+{
+    uint32_t user_data = 0;
+    viddec_workload_item_t wi;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Set the user data level (SEQ/GOP/PIC) in the workitem type. */
+    switch (parser->mpeg2_stream_level)
+    {
+    case MPEG2_LEVEL_SEQ:
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+        break;
+    }
+    case MPEG2_LEVEL_GOP:
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+        break;
+    }
+    case MPEG2_LEVEL_PIC:
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+        break;
+    }
+    default:
+    {
+        wi.vwi_type = VIDDEC_WORKLOAD_INVALID;
+        break;
+    }
+    }
+
+    /* Read 1 byte of user data and store it in workitem for the current      */
+    /* stream level (SEQ/GOP/PIC). Keep adding data payloads till it reaches  */
+    /* size 11. When it is 11, the maximum user data payload size, append the */
+    /* workitem. This loop is repeated till all user data is extracted and    */
+    /* appended. */
+    wi.user_data.size = 0;
+    memset(&(wi.user_data), 0, sizeof(wi.user_data));
+    while (viddec_pm_get_bits(parent, &user_data, MPEG2_BITS_EIGHT) != -1)
+    {
+        /* Store the valid byte in data payload */
+        wi.user_data.data_payload[wi.user_data.size] = user_data;
+        wi.user_data.size++;
+
+        /* When size exceeds payload size, append workitem and continue */
+        if (wi.user_data.size >= 11)
+        {
+            viddec_pm_setup_userdata(&wi);
+            viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+            viddec_fw_reset_workload_item(&wi);
+            wi.user_data.size = 0;
+        }
+    }
+    /* If size is not 0, append remaining user data. */
+    if (wi.user_data.size > 0)
+    {
+        viddec_pm_setup_userdata(&wi);
+        viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+        wi.user_data.size = 0;
+    }
+
+    MPEG2_DEB("User data @ Level %d found.\n", parser->mpeg2_stream_level);
+    return;
+}
+
+static inline uint32_t get_mb_addr_increment(uint32_t *data)
+{
+    if (*data >= 1024)
+    {
+        return 1;
+    }
+    else if (*data >= 128)
+    {
+        *data >>= 6;
+        return mb_addr_inc_tab1[*data];
+    }
+    else if (*data >= 64)
+    {
+        *data >>= 3;
+        *data -= 8;
+        return mb_addr_inc_tab2[*data];
+    }
+    else
+    {
+        *data -= 24;
+        return mb_addr_inc_tab3[*data];
+    }
+}
+
+static void viddec_mpeg2_get_first_mb_number(void *parent, void *ctxt, uint32_t *first_mb)
+{
+    uint32_t mb_row = 0, mb_width = 0, prev_mb_addr = 0;
+    uint32_t temp = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+    *first_mb = 0;
+    mb_row   = ((parser->mpeg2_last_parsed_slice_sc & 0xFF) - 1);
+    mb_width = parser->info.seq_hdr.horizontal_size_value >> 4;
+    prev_mb_addr = (mb_row * mb_width) - 1;
+
+    /* Skip slice start code */
+    viddec_pm_skip_bits(parent, 32);
+
+    if (parser->info.seq_hdr.vertical_size_value > 2800)
+    {
+        /* Get 3 bits of slice_vertical_position_extension */
+        viddec_pm_get_bits(parent, &temp, 3);
+        mb_row += (temp << 7);
+    }
+
+    /* Skip proprity_breakpoint if sequence scalable extension is present */
+    if (parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_SCAL_EXT)
+    {
+        /* Skip 7 bits if scalable mode is 00 (Data partition) */
+        if (parser->info.seq_scal_ext.scalable_mode == 0)
+        {
+            viddec_pm_skip_bits(parent, 7);
+        }
+    }
+
+    /* Skip quantizer_scale */
+    viddec_pm_skip_bits(parent, 5);
+
+    /* Skip a few bits with slice information */
+    temp = 0;
+    viddec_pm_peek_bits(parent, &temp, 1);
+    if (temp == 0x1)
+    {
+        /* Skip intra_slice_flag(1), intra_slice(1) and reserved_bits(7) */
+        viddec_pm_skip_bits(parent, 9);
+        temp=0;
+        viddec_pm_peek_bits(parent, &temp, 1);
+        while (temp == 0x1)
+        {
+            /* Skip extra_bit_slice(1) and extra_information_slice(8) */
+            viddec_pm_skip_bits(parent, 9);
+            temp=0;
+            viddec_pm_peek_bits(parent, &temp, 1);
+        }
+    }
+
+    /* Skip extra_bit_slice flag */
+    viddec_pm_skip_bits(parent, 1);
+
+    /* Increment prev_mb_addr by 33 for every 11 bits of macroblock_escape string */
+    temp=0;
+    viddec_pm_peek_bits(parent, &temp, 11);
+    while (temp == 0x8)
+    {
+        viddec_pm_skip_bits(parent, 11);
+        prev_mb_addr += 33;
+        temp=0;
+        viddec_pm_peek_bits(parent, &temp, 11);
+    }
+
+    /* Get the mb_addr_increment and add it to prev_mb_addr to get the current mb number. */
+    *first_mb = prev_mb_addr + get_mb_addr_increment(&temp);
+    MPEG2_DEB("First MB number in slice is 0x%08X.\n", *first_mb);
+
+    return;
+}
+
+/* Parse slice data to get the number of macroblocks in the current slice and then */
+/* append as pixel data. */
+void viddec_mpeg2_parse_and_append_slice_data(void *parent, void *ctxt)
+{
+    uint32_t bit_off=0, start_byte=0, first_mb = 0;
+    uint8_t  is_emul=0;
+    viddec_workload_item_t wi;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get current byte position */
+    viddec_pm_get_au_pos(parent, &bit_off, &start_byte, &is_emul);
+
+    /* Populate wi type */
+    viddec_mpeg2_get_first_mb_number(parent, ctxt, &first_mb);
+    wi.vwi_type = VIDDEC_WORKLOAD_PIXEL_ES;
+    wi.es.es_flags = (first_mb << 16);
+
+    /* Append data from given byte position as pixel data */
+    viddec_pm_append_misc_tags(parent, start_byte, (unsigned int) -1, &wi, !parser->mpeg2_use_next_workload);
+    return;
+}
diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c
new file mode 100755
index 0000000..83d5340
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_parse.c
@@ -0,0 +1,380 @@
+/**
+ * viddec_mpeg2_parse.c
+ * --------------------
+ * This file acts as the main interface between the parser manager and MPEG2
+ * parser. All the operations done by the MPEG2 parser are defined here and
+ * functions pointers for each operation is returned to the parser manager.
+ */
+
+#include "viddec_mpeg2.h"
+
+/* viddec_mpeg2_parser_init() - Initializes parser context. */
+static void viddec_mpeg2_parser_init
+(
+    void        *ctxt,
+    uint32_t    *persist_mem,
+    uint32_t     preserve
+)
+{
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Avoid compiler warning */
+    persist_mem = persist_mem;
+
+    /* Initialize state variables */
+    parser->mpeg2_pic_metadata_complete         =  false;
+    parser->mpeg2_picture_interlaced            =  false;
+    parser->mpeg2_first_field                   =  false;
+    parser->mpeg2_frame_start                   =  false;
+    parser->mpeg2_ref_table_updated             =  false;
+    parser->mpeg2_use_next_workload             =  false;
+    parser->mpeg2_first_slice_flag              =  false;
+    parser->mpeg2_curr_frame_headers            =  MPEG2_HEADER_NONE;
+    parser->mpeg2_last_parsed_sc                =  MPEG2_SC_ALL;
+    parser->mpeg2_last_parsed_slice_sc          =  MPEG2_SC_SLICE_MAX;
+    parser->mpeg2_wl_status                     =  MPEG2_WL_EMPTY;
+    parser->mpeg2_prev_picture_structure        =  MPEG2_PIC_STRUCT_FRAME;
+    parser->mpeg2_prev_temp_ref                 =  0;
+    parser->mpeg2_num_pan_scan_offsets          =  0;
+
+    if (preserve)
+    {
+        /* Init all picture level header info */
+        memset(&parser->info.pic_hdr, 0, sizeof(struct mpeg2_picture_hdr_info));
+        memset(&parser->info.pic_cod_ext, 0, sizeof(struct mpeg2_picture_coding_ext_info));
+        memset(&parser->info.pic_disp_ext, 0, sizeof(struct mpeg2_picture_disp_ext_info));
+    }
+    else
+    {
+        /* Init all header info */
+        memset(&parser->info, 0, sizeof(struct mpeg2_info));
+
+        parser->mpeg2_stream                        =  false;
+        parser->mpeg2_custom_qmat_parsed            =  false;
+        parser->mpeg2_valid_seq_hdr_parsed          =  false;
+        parser->mpeg2_curr_seq_headers              =  MPEG2_HEADER_NONE;
+    }
+
+    MPEG2_DEB("MPEG2 Parser: Context Initialized.\n");
+
+    return;
+}
+
+/* viddec_mpeg2_get_context_size() - Returns the memory size required by the */
+/* MPEG2 parser. */
+static void viddec_mpeg2_get_context_size
+(
+    viddec_parser_memory_sizes_t    *size
+)
+{
+    /* Should return size of my structure */
+    size->context_size = sizeof(struct viddec_mpeg2_parser);
+    size->persist_size = 0;
+}
+
+/* viddec_mpeg2_get_error_code() - Returns the error code for the current */
+/* workload. */
+static void viddec_mpeg2_get_error_code
+(
+    struct viddec_mpeg2_parser  *parser,
+    viddec_workload_t           *wl,
+    uint32_t                    *error_code
+)
+{
+    *error_code = 0;
+
+    /* Dangling field error */
+    if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD)
+    {
+        *error_code |= VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD;
+        if (parser->mpeg2_wl_status & MPEG2_WL_DANGLING_FIELD_TOP)
+        {
+            *error_code |= VIDDEC_FW_WORKLOAD_ERR_TOPFIELD;
+        }
+        else
+        {
+            *error_code |= VIDDEC_FW_WORKLOAD_ERR_BOTTOMFIELD;
+        }
+    }
+
+    /* Repeated same field */
+    if (parser->mpeg2_wl_status & MPEG2_WL_REPEAT_FIELD)
+    {
+        *error_code |= (VIDDEC_FW_WORKLOAD_ERR_DANGLING_FLD
+                        | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE);
+    }
+
+    /* If workload is not complete, set non-decodeable flag */
+    if (!(parser->mpeg2_wl_status & MPEG2_WL_COMPLETE))
+    {
+        *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+    }
+
+    /* If reference info is not updated, set missing reference flag */
+    if (!(parser->mpeg2_wl_status & MPEG2_WL_REF_INFO))
+    {
+        *error_code |= VIDDEC_FW_WORKLOAD_ERR_MISSING_REFERENCE;
+    }
+
+    /* Missing DMEM data flag and irrecoverable flag is set */
+    if (!(parser->mpeg2_wl_status & MPEG2_WL_DMEM_DATA))
+    {
+        *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_DMEM
+                         | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+    }
+
+    /* Missing sequence header and irrecoverable flag is set */
+    if ((!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ))
+            && (!parser->mpeg2_valid_seq_hdr_parsed))
+    {
+        *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_MISSING_SEQ_INFO
+                         | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+    }
+
+    /* Unsupported features found in stream */
+    if (parser->mpeg2_wl_status & MPEG2_WL_UNSUPPORTED)
+    {
+        *error_code |= ( VIDDEC_FW_WORKLOAD_ERR_UNSUPPORTED
+                         | VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE ) ;
+    }
+
+    /* If frame type is unknown, default to I frame. */
+    if ((wl->attrs.frame_type != VIDDEC_FRAME_TYPE_I)
+            && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_P)
+            && (wl->attrs.frame_type != VIDDEC_FRAME_TYPE_B))
+    {
+        wl->attrs.frame_type = VIDDEC_FRAME_TYPE_I;
+    }
+
+    /* If there is a mismatch between the frame type and reference information */
+    /* then mark the workload as not decodable */
+    if (wl->attrs.frame_type == VIDDEC_FRAME_TYPE_B)
+    {
+        if (wl->is_reference_frame != 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+    }
+    else
+    {
+        if (wl->is_reference_frame == 0) *error_code |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+    }
+
+    /* For non-decodable frames, do not set reference info so that the workload */
+    /* manager does not increment ref count. */
+    if (*error_code & VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE)
+    {
+        wl->is_reference_frame = 0;
+    }
+
+    /* Corrupted header notification */
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_HDR)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_HDR;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_EXT)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_EXT;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_SEQ_DISP_EXT)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_SEQ_DISP_EXT;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_GOP_HDR)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_GOP_HDR;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_HDR)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_HDR;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_COD_EXT)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_COD_EXT;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_PIC_DISP_EXT)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_PIC_DISP_EXT;
+    if (parser->mpeg2_wl_status & MPEG2_WL_CORRUPTED_QMAT_EXT)
+        *error_code |= VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT;
+
+    MPEG2_DEB("Workload error code: 0x%8X.\n", *error_code);
+    return;
+}
+
+/* viddec_mpeg2_is_start_frame() - Returns if the current chunk of parsed */
+/* data has start of a frame. */
+static uint32_t viddec_mpeg2_is_start_frame
+(
+    void    *ctxt
+)
+{
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+    return (parser->mpeg2_frame_start);
+}
+
+/* viddec_mpeg2_is_workload_done() - Returns current frame parsing status */
+/* to the parser manager. */
+static uint32_t viddec_mpeg2_is_workload_done
+(
+    void            *parent,
+    void            *ctxt,
+    unsigned int    next_sc,
+    uint32_t        *codec_specific_errors
+)
+{
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+    viddec_workload_t *wl = viddec_pm_get_header(parent);
+    uint32_t ret = VIDDEC_PARSE_SUCESS;
+    uint32_t frame_boundary = 0;
+    uint8_t force_frame_complete = 0;
+    parent = parent;
+
+    /* Detect Frame Boundary */
+    frame_boundary = ((MPEG2_SC_PICTURE == next_sc) || (MPEG2_SC_SEQ_HDR == next_sc) || (MPEG2_SC_GROUP == next_sc));
+    if (frame_boundary)
+    {
+        parser->mpeg2_first_slice_flag = false;
+    }
+
+    force_frame_complete = ((VIDDEC_PARSE_EOS == next_sc) || (VIDDEC_PARSE_DISCONTINUITY == next_sc));
+
+    if (force_frame_complete || (frame_boundary && (parser->mpeg2_pic_metadata_complete)))
+    {
+        if (!force_frame_complete)
+        {
+            parser->mpeg2_wl_status            |= MPEG2_WL_COMPLETE;
+            parser->mpeg2_last_parsed_slice_sc  =  MPEG2_SC_SLICE_MAX;
+            parser->mpeg2_pic_metadata_complete = false;
+            parser->mpeg2_first_slice_flag = false;
+        }
+
+        viddec_mpeg2_get_error_code(parser, wl, codec_specific_errors);
+        parser->mpeg2_wl_status          = MPEG2_WL_EMPTY;
+        parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+        /* Reset mpeg2_use_next_workload flag if it is set */
+        if (parser->mpeg2_use_next_workload)
+        {
+            viddec_pm_set_late_frame_detect(parent);
+            parser->mpeg2_use_next_workload  = false;
+        }
+        ret = VIDDEC_PARSE_FRMDONE;
+    }
+    return ret;
+}
+
+/* viddec_mpeg2_parse() - Parse metadata info from the buffer for the prev */
+/* start code found. */
+static mpeg2_status viddec_mpeg2_parse
+(
+    void    *parent,
+    void    *ctxt
+)
+{
+    uint32_t current_sc = 0, sc_bits = MPEG2_SC_AND_PREFIX_SIZE;
+    int32_t  ret = MPEG2_SUCCESS;
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Reset frame start flag. For Mpeg1 we want to set frame start after
+     we parsed pich header, since there is no extension*/
+    parser->mpeg2_frame_start =  (!parser->mpeg2_stream) && (parser->mpeg2_last_parsed_sc == MPEG2_SC_PICTURE);
+
+    /* Peak current start code - First 32 bits of the stream */
+    ret = viddec_pm_peek_bits(parent, &current_sc, sc_bits);
+    if (ret == -1)
+    {
+        MPEG2_DEB("Unable to get start code.\n");
+        return MPEG2_PARSE_ERROR;
+    }
+    current_sc &= MPEG2_BIT_MASK_8;
+    MPEG2_DEB("Start Code found = 0x%.8X\n", current_sc);
+
+    /* Get rid of the start code prefix for all start codes except slice */
+    /* start codes. */
+    if ((current_sc < MPEG2_SC_SLICE_MIN) || (current_sc > MPEG2_SC_SLICE_MAX))
+    {
+        viddec_pm_skip_bits(parent, sc_bits);
+    }
+
+    /* Parse Metadata based on the start code found */
+    switch ( current_sc )
+    {
+        /* Sequence Start Code */
+    case MPEG2_SC_SEQ_HDR:
+    {
+        parser->mpeg2_curr_seq_headers = MPEG2_HEADER_NONE;
+        viddec_mpeg2_parse_seq_hdr(parent, ctxt);
+    }
+    break;
+
+    /* Picture Start Code */
+    case MPEG2_SC_PICTURE:
+    {
+        viddec_mpeg2_parse_pic_hdr(parent, ctxt);
+    }
+    break;
+
+    /* Extension Code */
+    case MPEG2_SC_EXT:
+    {
+        viddec_mpeg2_parse_ext(parent, ctxt);
+    }
+    break;
+
+    /* Group of Pictures Header */
+    case MPEG2_SC_GROUP:
+    {
+        viddec_mpeg2_parse_gop_hdr(parent, ctxt);
+    }
+    break;
+
+    /* Unused Start Code */
+    case MPEG2_SC_SEQ_END:
+    case MPEG2_SC_SEQ_ERR:
+        break;
+
+        /* User Data */
+    case MPEG2_SC_USER_DATA:
+    {
+        viddec_mpeg2_parse_and_append_user_data(parent, ctxt);
+    }
+    break;
+
+    default:
+    {
+        /* Slice Data - Append slice data to the workload */
+        if ((current_sc >= MPEG2_SC_SLICE_MIN) &&
+                (current_sc <= MPEG2_SC_SLICE_MAX))
+        {
+            if (!parser->mpeg2_first_slice_flag)
+            {
+                /* At this point, all the metadata required by the MPEG2 */
+                /* hardware for decoding is extracted and stored. So the */
+                /* metadata can be packed into workitems and emitted out.*/
+                viddec_mpeg2_emit_workload(parent, ctxt);
+
+                /* If the current picture is progressive or it is the */
+                /* second field of interlaced field picture then, set */
+                /* the workload done flag. */
+                if ((!parser->mpeg2_picture_interlaced)
+                        || ((parser->mpeg2_picture_interlaced) && (!parser->mpeg2_first_field)))
+                {
+                    parser->mpeg2_pic_metadata_complete = true;
+                }
+                else if ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field))
+                {
+                    parser->mpeg2_curr_frame_headers = MPEG2_HEADER_NONE;
+                }
+
+                parser->mpeg2_first_slice_flag = true;
+            }
+            parser->mpeg2_last_parsed_slice_sc = current_sc;
+            viddec_mpeg2_parse_and_append_slice_data(parent, ctxt);
+            parser->mpeg2_wl_status |= MPEG2_WL_PARTIAL_SLICE;
+        }
+    }
+    } /* Switch */
+
+    /* Save last parsed start code */
+    parser->mpeg2_last_parsed_sc = current_sc;
+    return ret;
+}
+
+/* viddec_mpeg2_get_ops() - Register parser ops with the parser manager. */
+void viddec_mpeg2_get_ops
+(
+    viddec_parser_ops_t     *ops
+)
+{
+    ops->init         = viddec_mpeg2_parser_init;
+    ops->parse_syntax = viddec_mpeg2_parse;
+    ops->get_cxt_size = viddec_mpeg2_get_context_size;
+    ops->is_wkld_done = viddec_mpeg2_is_workload_done;
+    ops->is_frame_start = viddec_mpeg2_is_start_frame;
+    return;
+}
+
diff --git a/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c
new file mode 100755
index 0000000..42a42a9
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp2/viddec_mpeg2_workload.c
@@ -0,0 +1,461 @@
+/**
+ * viddec_mpeg2_workload.c
+ * -----------------------
+ * This file packs the data parsed and stored in the context into workload and
+ * emits it out. The current list of workitems emitter into the workload
+ * include:
+ *
+ * - DMEM - Register Data
+ * - Past and Future picture references
+ * - Quantization matrix data
+ *
+ * Slice data gets appended into the workload in viddec_mpeg2_parse.c
+ *
+ * Also, the frame attributes are updated in the workload.
+ */
+
+#include "viddec_mpeg2.h"
+#include "viddec_fw_item_types.h"
+
+void viddec_mpeg2_append_workitem(void *parent, viddec_workload_item_t *wi, uint8_t next_wl)
+{
+    if (next_wl)
+    {
+        viddec_pm_append_workitem_next(parent, wi);
+    }
+    else
+    {
+        viddec_pm_append_workitem(parent, wi);
+    }
+    return;
+}
+
+viddec_workload_t* viddec_mpeg2_get_header(void *parent, uint8_t next_wl)
+{
+    viddec_workload_t *ret;
+    if (next_wl)
+    {
+        ret = viddec_pm_get_next_header(parent);
+    }
+    else
+    {
+        ret = viddec_pm_get_header(parent);
+    }
+    return ret;
+}
+
+/* viddec_mpeg2_set_seq_ext_defaults() - Sets non-zero default values for    */
+/* sequence extension items in case sequence extension is not present.       */
+static void viddec_mpeg2_set_seq_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+    parser->info.seq_ext.progressive_sequence = true;
+    parser->info.seq_ext.chroma_format        = MPEG2_CF_420;
+}
+
+/* viddec_mpeg2_set_pic_cod_ext_defaults() - Sets non-zero default values for*/
+/* picture coding extension items in case picture coding extension is not    */
+/* present.                                                                  */
+static void viddec_mpeg2_set_pic_cod_ext_defaults(struct viddec_mpeg2_parser *parser)
+{
+    parser->info.pic_cod_ext.picture_structure    = MPEG2_PIC_STRUCT_FRAME;
+    parser->info.pic_cod_ext.frame_pred_frame_dct = true;
+    parser->info.pic_cod_ext.progressive_frame    = true;
+}
+
+/* viddec_mpeg2_pack_qmat() - Packs the 256 byte quantization matrix data    */
+/* 64 32-bit values.                                                         */
+#ifdef MFDBIGENDIAN
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+    /* Quantization Matrix Support */
+    /* Populate Quantization Matrices */
+    uint32_t index = 0;
+    uint32_t *qmat_packed, *qmat_unpacked;
+
+    /* When transferring the quantization matrix data from the parser */
+    /* context into workload items, we are packing four 8 bit         */
+    /* quantization values into one DWORD (32 bits). To do this, the  */
+    /* array of values of type uint8_t, is typecast as uint32 * and   */
+    /* read.                                                          */
+    qmat_packed   = (uint32_t *) parser->wi.qmat;
+    qmat_unpacked = (uint32_t *) &parser->info.qnt_mat;
+
+    for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+    {
+        qmat_packed[index] = qmat_unpacked[index];
+    }
+    return;
+}
+#else
+static void viddec_mpeg2_pack_qmat(struct viddec_mpeg2_parser *parser)
+{
+    /* Quantization Matrix Support */
+    /* Populate Quantization Matrices */
+    uint32_t index = 0;
+    uint32_t *qmat_packed;
+    uint8_t  *qmat_unpacked;
+
+    /* When transferring the quantization matrix data from the parser */
+    /* context into workload items, we are packing four 8 bit         */
+    /* quantization values into one DWORD (32 bits). To do this, the  */
+    /* array of values of type uint8_t, is typecast as uint32 * and   */
+    /* read.                                                          */
+    qmat_packed   = (uint32_t *) parser->wi.qmat;
+    qmat_unpacked = (uint8_t *) &parser->info.qnt_mat;
+
+    for (index=0; index<MPEG2_QUANT_MAT_SIZE; index++)
+    {
+        qmat_packed[index] =
+            (((uint32_t)qmat_unpacked[(index<<2)+0])<< 24) |
+            (((uint32_t)qmat_unpacked[(index<<2)+1])<< 16) |
+            (((uint32_t)qmat_unpacked[(index<<2)+2])<<  8) |
+            (((uint32_t)qmat_unpacked[(index<<2)+3])<<  0) ;
+    }
+    return;
+}
+#endif
+
+/* viddec_mpeg2_trans_metadata_workitems() - Transfers the metadata stored   */
+/* in parser context into workitems by bit masking. These workitems are then */
+/* sent through emitter                                                      */
+static void viddec_mpeg2_trans_metadata_workitems(void *ctxt)
+{
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Reset register values */
+    parser->wi.csi1  = 0x0;
+    parser->wi.csi2  = 0x0;
+    parser->wi.cpi1  = 0x0;
+    parser->wi.cpce1 = 0x0;
+
+    /* Set defaults for missing fields */
+    if (!(parser->mpeg2_curr_seq_headers & MPEG2_HEADER_SEQ_EXT))
+    {
+        viddec_mpeg2_set_seq_ext_defaults(parser);
+    }
+    if (!(parser->mpeg2_curr_frame_headers & MPEG2_HEADER_PIC_COD_EXT))
+    {
+        viddec_mpeg2_set_pic_cod_ext_defaults(parser);
+    }
+
+    /* Populate Core Sequence Info 1 */
+    parser->wi.csi1 |= (parser->mpeg2_stream) <<  1;
+    parser->wi.csi1 |= (parser->info.seq_hdr.constrained_parameters_flag) <<  2;
+    parser->wi.csi1 |= (parser->info.seq_ext.progressive_sequence) <<  3;
+    parser->wi.csi1 |= (parser->info.seq_ext.chroma_format) << 16;
+    parser->wi.csi1 |= (parser->info.qnt_ext.load_intra_quantiser_matrix) << 19;
+    parser->wi.csi1 |= (parser->info.qnt_ext.load_non_intra_quantiser_matrix) << 20;
+    parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_intra_quantiser_matrix) << 21;
+    parser->wi.csi1 |= (parser->info.qnt_ext.load_chroma_non_intra_quantiser_matrix) << 22;
+    MPEG2_DEB("Core Sequence Info 1: 0x%.8X\n", parser->wi.csi1);
+
+    /* Populate Core Sequence Info 2 */
+    parser->wi.csi2 |= (parser->info.seq_hdr.horizontal_size_value & MPEG2_BIT_MASK_11);
+    parser->wi.csi2 |= (parser->info.seq_hdr.vertical_size_value & MPEG2_BIT_MASK_11) << 14;
+    MPEG2_DEB("Core Sequence Info 2: 0x%.8X\n", parser->wi.csi2);
+
+    /* Populate Core Picture Info */
+    parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_forward_vect);
+    parser->wi.cpi1 |= (parser->info.pic_hdr.forward_f_code) <<  1;
+    parser->wi.cpi1 |= (parser->info.pic_hdr.full_pel_backward_vect) <<  4;
+    parser->wi.cpi1 |= (parser->info.pic_hdr.backward_f_code) <<  5;
+    parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode00) <<  8;
+    parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode01) << 12;
+    parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode10) << 16;
+    parser->wi.cpi1 |= (parser->info.pic_cod_ext.fcode11) << 20;
+    parser->wi.cpi1 |= (parser->info.pic_cod_ext.intra_dc_precision) << 24;
+    parser->wi.cpi1 |= (parser->info.pic_hdr.picture_coding_type-1) << 26;
+    MPEG2_DEB("Core Picture Info 1: 0x%.8X\n", parser->wi.cpi1);
+
+    /* Populate Core Picture Extension Info */
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.composite_display_flag);
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.progressive_frame) <<  1;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.chroma_420_type) <<  2;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.repeat_first_field) <<  3;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.alternate_scan) <<  4;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.intra_vlc_format) <<  5;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.q_scale_type) <<  6;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.concealment_motion_vectors) <<  7;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.frame_pred_frame_dct) <<  8;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.top_field_first) <<  9;
+    parser->wi.cpce1 |= (parser->info.pic_cod_ext.picture_structure) << 10;
+    MPEG2_DEB("Core Picture Ext Info 1: 0x%.8X\n", parser->wi.cpce1);
+
+    return;
+}
+
+/* mpeg2_emit_display_frame() - Sends the frame id as a workload item.       */
+static inline void mpeg2_emit_frameid(void *parent, int32_t wl_type, uint8_t flag)
+{
+    viddec_workload_item_t wi;
+    wi.vwi_type = wl_type;
+
+    wi.ref_frame.reference_id     = 0;
+    wi.ref_frame.luma_phys_addr   = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+    viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* mpeg2_send_ref_reorder() - Reorders reference frames */
+static inline void mpeg2_send_ref_reorder(void *parent, uint8_t flag)
+{
+    viddec_workload_item_t wi;
+
+    wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+    wi.ref_reorder.ref_table_offset = 0;
+    /* Reorder index 1 to index 0 only */
+    wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+    wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+    viddec_mpeg2_append_workitem( parent, &wi, flag );
+}
+
+/* viddec_mpeg2_manage_ref() - Manages frame references by inserting the     */
+/* past and future references (if any) for every frame inserted in the       */
+/* workload.                                                                 */
+static void viddec_mpeg2_manage_ref(void *parent, void *ctxt)
+{
+    int32_t frame_id = 1;
+    int32_t frame_type;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+    viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+    wl->is_reference_frame = 0;
+
+    /* Identify the frame type (I, P or B) */
+    frame_type = parser->info.pic_hdr.picture_coding_type;
+
+    /* Send reference frame information based on whether the picture is a */
+    /* frame picture or field picture. */
+    if ((!parser->mpeg2_picture_interlaced)
+            || ((parser->mpeg2_picture_interlaced) && (parser->mpeg2_first_field)))
+    {
+        /* Check if we need to reorder frame references/send frame for display */
+        /* in case of I or P type */
+        if (frame_type != MPEG2_PC_TYPE_B)
+        {
+            /* Checking reorder */
+            if (parser->mpeg2_ref_table_updated)
+            {
+                mpeg2_send_ref_reorder(parent, parser->mpeg2_use_next_workload);
+            }
+        }
+
+        /* Send reference frame workitems */
+        switch (frame_type)
+        {
+        case MPEG2_PC_TYPE_I:
+        {
+            break;
+        }
+        case MPEG2_PC_TYPE_P:
+        {
+            mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+            break;
+        }
+        case MPEG2_PC_TYPE_B:
+        {
+            mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_PAST, parser->mpeg2_use_next_workload);
+            mpeg2_emit_frameid(parent, VIDDEC_WORKLOAD_MPEG2_REF_FUTURE, parser->mpeg2_use_next_workload);
+        }
+        }
+
+        /* Set reference information updated flag */
+        if (!parser->mpeg2_picture_interlaced)
+        {
+            parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+        }
+    }
+    else
+    {
+        /* Set reference information updated flag for second fiel */
+        parser->mpeg2_wl_status |= MPEG2_WL_REF_INFO;
+    }
+
+    /* Set the reference frame flags for I and P types */
+    if (frame_type != MPEG2_PC_TYPE_B)
+    {
+        wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+        parser->mpeg2_ref_table_updated = true;
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_check_unsupported() - Check for unsupported feature in the stream */
+static void viddec_mpeg2_check_unsupported(void *parent, void *ctxt)
+{
+    unsigned int unsupported_feature_found = 0;
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Get workload */
+    viddec_workload_t *wl = viddec_mpeg2_get_header( parent, parser->mpeg2_use_next_workload );
+
+    /* Get attributes in workload */
+    viddec_frame_attributes_t *attrs = &wl->attrs;
+
+    /* Check for unsupported content size */
+    unsupported_feature_found |= (attrs->cont_size.height > MPEG2_MAX_CONTENT_HEIGHT);
+    unsupported_feature_found |= (attrs->cont_size.width  > MPEG2_MAX_CONTENT_WIDTH);
+
+    /* Update parser status, if found */
+    if (unsupported_feature_found)
+    {
+        parser->mpeg2_wl_status |= MPEG2_WL_UNSUPPORTED;
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_append_metadata() - Appends meta data from the stream.       */
+void viddec_mpeg2_append_metadata(void *parent, void *ctxt)
+{
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    viddec_workload_item_t  wi;
+
+    /* Append sequence info, if found with current frame */
+    if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ)
+    {
+        memset(&wi, 0, sizeof(viddec_workload_item_t));
+        wi.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+        viddec_fw_mp2_sh_set_horizontal_size_value       ( &(wi.mp2_sh) , parser->info.seq_hdr.horizontal_size_value);
+        viddec_fw_mp2_sh_set_vertical_size_value         ( &(wi.mp2_sh) , parser->info.seq_hdr.vertical_size_value);
+        viddec_fw_mp2_sh_set_aspect_ratio_information    ( &(wi.mp2_sh) , parser->info.seq_hdr.aspect_ratio_information);
+        viddec_fw_mp2_sh_set_frame_rate_code             ( &(wi.mp2_sh) , parser->info.seq_hdr.frame_rate_code);
+        viddec_fw_mp2_sh_set_bit_rate_value              ( &(wi.mp2_sh) , parser->info.seq_hdr.bit_rate_value);
+        viddec_fw_mp2_sh_set_vbv_buffer_size_value       ( &(wi.mp2_sh) , parser->info.seq_hdr.vbv_buffer_size_value);
+
+        viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+    }
+
+    /* Append sequence extension info, if found with current frame */
+    if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_EXT)
+    {
+        memset(&wi, 0, sizeof(viddec_workload_item_t));
+        wi.vwi_type = VIDDEC_WORKLOAD_MPEG2_SEQ_EXT;
+
+        viddec_fw_mp2_se_set_profile_and_level_indication( &(wi.mp2_se) , parser->info.seq_ext.profile_and_level_indication);
+        viddec_fw_mp2_se_set_progressive_sequence        ( &(wi.mp2_se) , parser->info.seq_ext.progressive_sequence);
+        viddec_fw_mp2_se_set_chroma_format               ( &(wi.mp2_se) , parser->info.seq_ext.chroma_format);
+        viddec_fw_mp2_se_set_horizontal_size_extension   ( &(wi.mp2_se) , parser->info.seq_ext.horizontal_size_extension);
+        viddec_fw_mp2_se_set_vertical_size_extension     ( &(wi.mp2_se) , parser->info.seq_ext.vertical_size_extension);
+        viddec_fw_mp2_se_set_bit_rate_extension          ( &(wi.mp2_se) , parser->info.seq_ext.bit_rate_extension);
+        viddec_fw_mp2_se_set_vbv_buffer_size_extension   ( &(wi.mp2_se) , parser->info.seq_ext.vbv_buffer_size_extension);
+        viddec_fw_mp2_se_set_frame_rate_extension_n      ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_n);
+        viddec_fw_mp2_se_set_frame_rate_extension_d      ( &(wi.mp2_se) , parser->info.seq_ext.frame_rate_extension_d);
+
+        viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+    }
+
+    /* Append Display info, if present */
+    if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_SEQ_DISP_EXT)
+    {
+        memset(&wi, 0, sizeof(viddec_workload_item_t));
+        wi.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+        viddec_fw_mp2_sde_set_video_format            ( &(wi.mp2_sde) , parser->info.seq_disp_ext.video_format);
+        viddec_fw_mp2_sde_set_color_description       ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_description);
+        viddec_fw_mp2_sde_set_color_primaries         ( &(wi.mp2_sde) , parser->info.seq_disp_ext.colour_primaries);
+        viddec_fw_mp2_sde_set_transfer_characteristics( &(wi.mp2_sde) , parser->info.seq_disp_ext.transfer_characteristics);
+        viddec_fw_mp2_sde_set_display_horizontal_size ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_horizontal_size);
+        viddec_fw_mp2_sde_set_display_vertical_size   ( &(wi.mp2_sde) , parser->info.seq_disp_ext.display_vertical_size);
+
+        viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+    }
+
+    /* Append GOP info, if present */
+    if (parser->mpeg2_curr_frame_headers & MPEG2_HEADER_GOP)
+    {
+        memset(&wi, 0, sizeof(viddec_workload_item_t));
+        wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+        viddec_fw_mp2_gop_set_closed_gop ( &(wi.mp2_gop) , parser->info.gop_hdr.closed_gop);
+        viddec_fw_mp2_gop_set_broken_link( &(wi.mp2_gop) , parser->info.gop_hdr.broken_link);
+
+        viddec_mpeg2_append_workitem(parent, &wi, parser->mpeg2_use_next_workload);
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_append_workitems() - Appends decoder specific workitems      */
+/* to the workload starting at the address and length specified.             */
+static void viddec_mpeg2_append_workitems
+(
+    void *parent,
+    uint32_t* address,
+    int workitem_type,
+    int num_items,
+    uint8_t flag
+)
+{
+    int32_t                  index=0;
+    const uint32_t*          initial_address = address;
+    viddec_workload_item_t   wi;
+
+    for (index=0; index < num_items; index++)
+    {
+        wi.vwi_type = workitem_type;
+        wi.data.data_offset = (char *) address - (const char *) initial_address;
+        wi.data.data_payload[0] = address[0];
+        wi.data.data_payload[1] = address[1];
+        address += 2;
+
+        viddec_mpeg2_append_workitem(parent, &wi, flag);
+    }
+
+    return;
+}
+
+/* viddec_mpeg2_emit_workload() - Emits MPEG2 parser generated work load     */
+/* items.                                                                    */
+/* Items include: MPEG2 DMEM Data, Quantization Matrices.                    */
+/* Pixel ES data sent separately whenever parser sees slice data             */
+void viddec_mpeg2_emit_workload(void *parent, void *ctxt)
+{
+    MPEG2_DEB("Emitting workloads.\n");
+
+    /* Get MPEG2 Parser context */
+    struct viddec_mpeg2_parser *parser = (struct viddec_mpeg2_parser *) ctxt;
+
+    /* Append meta data workitems */
+    viddec_mpeg2_append_metadata(parent, ctxt);
+
+    /* Transfer metadata into attributes */
+    viddec_mpeg2_translate_attr(parent, ctxt);
+
+    /* Check for unsupported features in the stream and update parser status */
+    viddec_mpeg2_check_unsupported(parent, ctxt);
+
+    /* Transfer all stored metadata into MPEG2 Hardware Info */
+    viddec_mpeg2_trans_metadata_workitems(parser);
+
+    /* Send MPEG2 DMEM workitems */
+    viddec_mpeg2_append_workitems(parent,
+                                  (uint32_t *) &parser->wi,
+                                  VIDDEC_WORKLOAD_MPEG2_DMEM,
+                                  MPEG2_NUM_DMEM_WL_ITEMS,
+                                  parser->mpeg2_use_next_workload);
+    parser->mpeg2_wl_status |= MPEG2_WL_DMEM_DATA;
+    MPEG2_DEB("Adding %d items as DMEM Data.\n", MPEG2_NUM_DMEM_WL_ITEMS);
+
+    /* Send MPEG2 Quantization Matrix workitems, if updated */
+    viddec_mpeg2_pack_qmat(parser);
+    viddec_mpeg2_append_workitems(parent,
+                                  (uint32_t *) parser->wi.qmat,
+                                  VIDDEC_WORKLOAD_MPEG2_QMAT,
+                                  MPEG2_NUM_QMAT_WL_ITEMS,
+                                  parser->mpeg2_use_next_workload);
+    MPEG2_DEB("Adding %d items as QMAT Data.\n", MPEG2_NUM_QMAT_WL_ITEMS);
+
+    /* Manage reference frames */
+    viddec_mpeg2_manage_ref(parent, ctxt);
+
+    return;
+}
+
diff --git a/mixvbp/vbp_plugin/mp4/Android.mk b/mixvbp/vbp_plugin/mp4/Android.mk
new file mode 100755
index 0000000..da9ed15
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/Android.mk
@@ -0,0 +1,28 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES :=                           \
+	viddec_mp4_visualobject.c            \
+	viddec_mp4_decodevideoobjectplane.c  \
+	viddec_mp4_parse.c                   \
+	viddec_fw_mp4_workload.c             \
+	viddec_mp4_videoobjectplane.c        \
+	viddec_parse_sc_mp4.c                \
+	viddec_mp4_shortheader.c             \
+	viddec_mp4_videoobjectlayer.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES :=							   \
+	$(MIXVBP_DIR)/include		    \
+	$(LOCAL_PATH)/include \
+        $(MIXVBP_DIR)/vbp_manager/include
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_mpeg4
+
+LOCAL_SHARED_LIBRARIES :=		\
+	libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h
new file mode 100755
index 0000000..bb772d4
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/include/viddec_fw_mp4.h
@@ -0,0 +1,231 @@
+#ifndef VIDDEC_FW_MP4_H
+#define VIDDEC_FW_MP4_H
+
+#include "viddec_fw_workload.h"
+
+enum viddec_fw_mp4_ref_frame_id
+{
+    VIDDEC_MP4_FRAME_CURRENT = 0,
+    VIDDEC_MP4_FRAME_PAST = 1,
+    VIDDEC_MP4_FRAME_FUTURE = 2,
+    VIDDEC_MP4_FRAME_MAX = 3,
+};
+
+enum mp4_workload_item_type
+{
+    VIDDEC_WORKLOAD_MP4_PAST_FRAME = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+    VIDDEC_WORKLOAD_MP4_FUTURE_FRAME,
+    VIDDEC_WORKLOAD_MP4_VOL_INFO = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+    VIDDEC_WORKLOAD_MP4_VOP_INFO,
+    VIDDEC_WORKLOAD_MP4_BVOP_INFO,
+    VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+    VIDDEC_WORKLOAD_MP4_IQUANT,
+    VIDDEC_WORKLOAD_MP4_NIQUANT,
+    VIDDEC_WORKLOAD_MP4_SVH,
+};
+
+enum viddec_fw_mp4_vop_coding_type_t
+{
+    VIDDEC_MP4_VOP_TYPE_I = 0,
+    VIDDEC_MP4_VOP_TYPE_P,
+    VIDDEC_MP4_VOP_TYPE_B,
+    VIDDEC_MP4_VOP_TYPE_S
+};
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOL_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+    // Flags extracted from the Video Object Layer
+    // 0:0 - short_video_header
+    // 1:2 - vol_shape
+    // 3:3 - interlaced
+    // 4:4 - obmc_disable
+    // 5:5 - quarter_sample
+    // 6:6 - resync_marker_disable
+    // 7:7 - data_partitioned
+    // 8:8 - reversible_vlc
+#define viddec_fw_mp4_get_reversible_vlc(x)      viddec_fw_bitfields_extract((x)->vol_flags,  8, 0x1)
+#define viddec_fw_mp4_set_reversible_vlc(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 8, 0x1)
+#define viddec_fw_mp4_get_data_partitioned(x)      viddec_fw_bitfields_extract((x)->vol_flags,  7, 0x1)
+#define viddec_fw_mp4_set_data_partitioned(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 7, 0x1)
+#define viddec_fw_mp4_get_resync_marker_disable(x)      viddec_fw_bitfields_extract((x)->vol_flags,  6, 0x1)
+#define viddec_fw_mp4_set_resync_marker_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 6, 0x1)
+#define viddec_fw_mp4_get_quarter_sample(x)      viddec_fw_bitfields_extract((x)->vol_flags,  5, 0x1)
+#define viddec_fw_mp4_set_quarter_sample(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 5, 0x1)
+#define viddec_fw_mp4_get_obmc_disable(x)      viddec_fw_bitfields_extract((x)->vol_flags,  4, 0x1)
+#define viddec_fw_mp4_set_obmc_disable(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 4, 0x1)
+#define viddec_fw_mp4_get_interlaced(x)      viddec_fw_bitfields_extract((x)->vol_flags,  3, 0x1)
+#define viddec_fw_mp4_set_interlaced(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 3, 0x1)
+#define viddec_fw_mp4_get_vol_shape(x)      viddec_fw_bitfields_extract((x)->vol_flags,  1, 0x3)
+#define viddec_fw_mp4_set_vol_shape(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 1, 0x3)
+#define viddec_fw_mp4_get_short_video_header_flag(x)      viddec_fw_bitfields_extract((x)->vol_flags,  0, 0x1)
+#define viddec_fw_mp4_set_short_video_header_flag(x, val) viddec_fw_bitfields_insert((x)->vol_flags, val, 0, 0x1)
+    unsigned int vol_flags;
+
+    // Size extracted from the Video Object Layer
+    // 0:12 - width
+    // 13:25 - height
+    // MFD_MPG4VD_MB_PER_ROW can be calculated as (width+15) >> 4
+    // MFD_MPG4VD_MB_ROWS can be calculated as (height+15) >> 4
+#define viddec_fw_mp4_get_vol_width(x)      viddec_fw_bitfields_extract((x)->vol_size,  13, 0x1FFF)
+#define viddec_fw_mp4_set_vol_width(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 13, 0x1FFF)
+#define viddec_fw_mp4_get_vol_height(x)      viddec_fw_bitfields_extract((x)->vol_size,  0, 0x1FFF)
+#define viddec_fw_mp4_set_vol_height(x, val) viddec_fw_bitfields_insert((x)->vol_size, val, 0, 0x1FFF)
+    unsigned int vol_size;
+
+    // Sprite, time increments and quantization details from the Video Object Layer
+    // 0:15 - vop_time_increment_resolution
+    // 16:17 - sprite_enable
+    // 18:23 - sprite_warping_points
+    // 24:25 - sprite_warping_accuracy
+    // 26:29 - quant_precision
+    // 30:30 - quant_type
+#define viddec_fw_mp4_get_quant_type(x)      viddec_fw_bitfields_extract((x)->vol_item,  30, 0x1)
+#define viddec_fw_mp4_set_quant_type(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 30, 0x1)
+#define viddec_fw_mp4_get_quant_precision(x)      viddec_fw_bitfields_extract((x)->vol_item,  26, 0xF)
+#define viddec_fw_mp4_set_quant_precision(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 26, 0xF)
+#define viddec_fw_mp4_get_sprite_warping_accuracy(x)      viddec_fw_bitfields_extract((x)->vol_item,  24, 0x3)
+#define viddec_fw_mp4_set_sprite_warping_accuracy(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 24, 0x3)
+#define viddec_fw_mp4_get_sprite_warping_points(x)      viddec_fw_bitfields_extract((x)->vol_item,  18, 0x3F)
+#define viddec_fw_mp4_set_sprite_warping_points(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 18, 0x3F)
+#define viddec_fw_mp4_get_sprite_enable(x)      viddec_fw_bitfields_extract((x)->vol_item,  16, 0x3)
+#define viddec_fw_mp4_set_sprite_enable(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 16, 0x3)
+#define viddec_fw_mp4_get_vop_time_increment_resolution(x)      viddec_fw_bitfields_extract((x)->vol_item,  0, 0xFFFF)
+#define viddec_fw_mp4_set_vop_time_increment_resolution(x, val) viddec_fw_bitfields_insert((x)->vol_item, val, 0, 0xFFFF)
+    unsigned int vol_item;
+
+} viddec_fw_mp4_vol_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_VOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+    // Frame Info - to populate register MFD_MPG4VD_BSP_FRAME_INFO
+    // 0:4 - current_frame_id
+    // 5:5 - current_field_frame
+    // 6:10 - future_frame_id
+    // 11:11 - future_field_frame
+    // 12:16 - past_frame_id
+    // 17:17 - past_field_frame
+#define viddec_fw_mp4_get_past_field_frame(x)      viddec_fw_bitfields_extract((x)->frame_info,  17, 0x1)
+#define viddec_fw_mp4_set_past_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 17, 0x1)
+#define viddec_fw_mp4_get_past_frame_id(x)         viddec_fw_bitfields_extract((x)->frame_info,  12, 0x1F)
+#define viddec_fw_mp4_set_past_frame_id(x, val)    viddec_fw_bitfields_insert((x)->frame_info, val, 12, 0x1F)
+#define viddec_fw_mp4_get_future_field_frame(x)      viddec_fw_bitfields_extract((x)->frame_info,  11, 0x1)
+#define viddec_fw_mp4_set_future_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 11, 0x1)
+#define viddec_fw_mp4_get_future_frame_id(x)         viddec_fw_bitfields_extract((x)->frame_info,  6, 0x1F)
+#define viddec_fw_mp4_set_future_frame_id(x, val)    viddec_fw_bitfields_insert((x)->frame_info, val, 6, 0x1F)
+#define viddec_fw_mp4_get_current_field_frame(x)      viddec_fw_bitfields_extract((x)->frame_info,  5, 0x1)
+#define viddec_fw_mp4_set_current_field_frame(x, val) viddec_fw_bitfields_insert((x)->frame_info, val, 5, 0x1)
+#define viddec_fw_mp4_get_current_frame_id(x)         viddec_fw_bitfields_extract((x)->frame_info,  0, 0x1F)
+#define viddec_fw_mp4_set_current_frame_id(x, val)    viddec_fw_bitfields_insert((x)->frame_info, val, 0, 0x1F)
+    unsigned int frame_info;
+
+    // Video Object Plane Info
+    // 0:1 - vop_coding_type
+    // 2:2 - vop_rounding_type
+    // 3:5 - intra_dc_vlc_thr
+    // 6:6 - top_field_first
+    // 7:7 - alternate_vertical_scan_flag
+    // 8:16 - vop_quant
+    // 17:19 - vop_fcode_forward
+    // 20:22 - vop_fcode_backward
+    // 23:31 - quant_scale
+#define viddec_fw_mp4_get_vop_quant_scale(x)      viddec_fw_bitfields_extract((x)->vop_data, 23, 0x1FF)
+#define viddec_fw_mp4_set_vop_quant_scale(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 23, 0x1FF)
+#define viddec_fw_mp4_get_vop_fcode_backward(x)      viddec_fw_bitfields_extract((x)->vop_data, 20, 0x7)
+#define viddec_fw_mp4_set_vop_fcode_backward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 20, 0x7)
+#define viddec_fw_mp4_get_vop_fcode_forward(x)      viddec_fw_bitfields_extract((x)->vop_data, 17, 0x7)
+#define viddec_fw_mp4_set_vop_fcode_forward(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 17, 0x7)
+#define viddec_fw_mp4_get_vop_quant(x)      viddec_fw_bitfields_extract((x)->vop_data, 8, 0x1FF)
+#define viddec_fw_mp4_set_vop_quant(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 8, 0x1FF)
+#define viddec_fw_mp4_get_alternate_vertical_scan_flag(x)      viddec_fw_bitfields_extract((x)->vop_data, 7, 0x1)
+#define viddec_fw_mp4_set_alternate_vertical_scan_flag(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 7, 0x1)
+#define viddec_fw_mp4_get_top_field_first(x)      viddec_fw_bitfields_extract((x)->vop_data, 6, 0x1)
+#define viddec_fw_mp4_set_top_field_first(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 6, 0x1)
+#define viddec_fw_mp4_get_intra_dc_vlc_thr(x)      viddec_fw_bitfields_extract((x)->vop_data, 3, 0x7)
+#define viddec_fw_mp4_set_intra_dc_vlc_thr(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 3, 0x7)
+#define viddec_fw_mp4_get_vop_rounding_type(x)      viddec_fw_bitfields_extract((x)->vop_data, 2, 0x1)
+#define viddec_fw_mp4_set_vop_rounding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 2, 0x1)
+#define viddec_fw_mp4_get_vop_coding_type(x)      viddec_fw_bitfields_extract((x)->vop_data, 0, 0x3)
+#define viddec_fw_mp4_set_vop_coding_type(x, val) viddec_fw_bitfields_insert((x)->vop_data, val, 0, 0x3)
+    unsigned int vop_data;
+
+    // No of bits used in first byte of MB data
+    unsigned int bit_offset;
+
+} viddec_fw_mp4_vop_info_t;
+
+// This structure contains the information extracted from the Video Object Layer.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_BVOP_INFO, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+    // Frame period = T(first B-VOP after VOL) - T(past reference of first B-VOP after VOL)
+    unsigned int Tframe;
+
+    // TRD is the difference in temporal reference of the temporally next reference VOP with
+    // temporally previous reference VOP, assuming B-VOPs or skipped VOPs in between.
+    unsigned int TRD;
+
+    // TRB is the difference in temporal reference of the B-VOP and the previous reference VOP.
+    unsigned int TRB;
+
+} viddec_fw_mp4_bvop_info_t;
+
+// This structure contains the information extracted from the sprite trajectory.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SPRT_TRAJ,
+// using the fields vwi_payload in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+    // Sprite Trajectory can have dmv_codes for each warping point.
+    // 0:13 - warping_mv_code_dv
+    // 14:27 - warping_mv_code_du
+    // 28:31 - warping_point_index - identifies which warping point the warping code refers to.
+    // The default value for index is 0xF which should be treated as invalid.
+#define viddec_fw_mp4_get_warping_point_index(x)      viddec_fw_bitfields_extract((x), 28, 0xF)
+#define viddec_fw_mp4_set_warping_point_index(x, val) viddec_fw_bitfields_insert((x), val, 28, 0xF)
+#define viddec_fw_mp4_get_warping_mv_code_du(x)      viddec_fw_bitfields_extract((x), 14, 0x3FFF)
+#define viddec_fw_mp4_set_warping_mv_code_du(x, val) viddec_fw_bitfields_insert((x), val, 14, 0x3FFF)
+#define viddec_fw_mp4_get_warping_mv_code_dv(x)      viddec_fw_bitfields_extract((x), 0, 0x3FFF)
+#define viddec_fw_mp4_set_warping_mv_code_dv(x, val) viddec_fw_bitfields_insert((x), val, 0, 0x3FFF)
+    unsigned int warping_mv_code[3];
+} viddec_fw_mp4_sprite_trajectory_t;
+
+// IQUANT entries will be populated in the workload using items of type VIDDEC_WORKLOAD_MP4_IQUANT and the
+// vwi_payload array. The entries will be in the order in which they need to be programmed in the registers.
+// There is no need for a separate structure for these values.
+
+// This structure contains the information extracted from the Video Plane with Short Header.
+// This info will be populated in the workload as item type VIDDEC_WORKLOAD_MP4_SVH, using
+// the "vwi_payload" array in viddec_workload_item_t.
+// TODO: Add default values in the comments for each item
+typedef struct
+{
+    // Video Plane with Short Header
+    // 0:7 - temporal_reference
+    // 8:19 - num_macroblocks_in_gob
+    // 20:24 - num_gobs_in_vop
+    // 25:27 - num_rows_in_gob
+#define viddec_fw_mp4_get_num_rows_in_gob(x)      viddec_fw_bitfields_extract((x)->svh_data, 25, 0x7)
+#define viddec_fw_mp4_set_num_rows_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 25, 0x7)
+#define viddec_fw_mp4_get_num_gobs_in_vop(x)      viddec_fw_bitfields_extract((x)->svh_data, 20, 0x1F)
+#define viddec_fw_mp4_set_num_gobs_in_vop(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 20, 0x1F)
+#define viddec_fw_mp4_get_num_macroblocks_in_gob(x)      viddec_fw_bitfields_extract((x)->svh_data, 8, 0xFFF)
+#define viddec_fw_mp4_set_num_macroblocks_in_gob(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 8, 0xFFF)
+#define viddec_fw_mp4_get_temporal_reference(x)      viddec_fw_bitfields_extract((x)->svh_data, 0, 0xFF)
+#define viddec_fw_mp4_set_temporal_reference(x, val) viddec_fw_bitfields_insert((x)->svh_data, val, 0, 0xFF)
+    unsigned int svh_data;
+
+    unsigned int pad1;
+    unsigned int pad2;
+} viddec_fw_mp4_svh_t;
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c b/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c
new file mode 100755
index 0000000..c9ec2fb
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_fw_mp4_workload.c
@@ -0,0 +1,377 @@
+#ifndef VBP
+#include <string.h>
+
+#include "viddec_fw_workload.h"
+#include "viddec_parser_ops.h"
+#include "viddec_fw_mp4.h"
+#include "viddec_mp4_parse.h"
+
+uint32_t viddec_fw_mp4_populate_attr(viddec_workload_t *wl, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_frame_attributes_t *attr = &(wl->attrs);
+    mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+    memset(attr, 0, sizeof(viddec_frame_attributes_t));
+
+    attr->cont_size.width = vol->video_object_layer_width;
+    attr->cont_size.height = vol->video_object_layer_height;
+
+    // Translate vop_coding_type
+    switch (vol->VideoObjectPlane.vop_coding_type)
+    {
+    case MP4_VOP_TYPE_B:
+        attr->frame_type = VIDDEC_FRAME_TYPE_B;
+        break;
+    case MP4_VOP_TYPE_P:
+        attr->frame_type = VIDDEC_FRAME_TYPE_P;
+        break;
+    case MP4_VOP_TYPE_S:
+        attr->frame_type = VIDDEC_FRAME_TYPE_S;
+        break;
+    case MP4_VOP_TYPE_I:
+        attr->frame_type = VIDDEC_FRAME_TYPE_I;
+        break;
+    default:
+        break;
+    } // switch on vop_coding_type
+
+    attr->mpeg4.top_field_first = vol->VideoObjectPlane.top_field_first;
+
+    return result;
+} // viddec_fw_mp4_populate_attr
+
+uint32_t viddec_fw_mp4_insert_vol_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    viddec_fw_mp4_vol_info_t vol_info;
+    mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+    memset(&vol_info, 0, sizeof(viddec_fw_mp4_vol_info_t));
+
+    // Get vol_flags
+    viddec_fw_mp4_set_reversible_vlc(&vol_info, vol->reversible_vlc);
+    viddec_fw_mp4_set_data_partitioned(&vol_info, vol->data_partitioned);
+    viddec_fw_mp4_set_resync_marker_disable(&vol_info, vol->resync_marker_disable);
+    viddec_fw_mp4_set_quarter_sample(&vol_info, vol->quarter_sample);
+    viddec_fw_mp4_set_obmc_disable(&vol_info, vol->obmc_disable);
+    viddec_fw_mp4_set_interlaced(&vol_info, vol->interlaced);
+    viddec_fw_mp4_set_vol_shape(&vol_info, vol->video_object_layer_shape);
+    viddec_fw_mp4_set_short_video_header_flag(&vol_info, vol->short_video_header);
+
+    // Get vol_size
+    viddec_fw_mp4_set_vol_width(&vol_info, vol->video_object_layer_width);
+    viddec_fw_mp4_set_vol_height(&vol_info, vol->video_object_layer_height);
+
+    // Get vol_item
+    viddec_fw_mp4_set_quant_type(&vol_info, vol->quant_type);
+    viddec_fw_mp4_set_quant_precision(&vol_info, vol->quant_precision);
+    viddec_fw_mp4_set_sprite_warping_accuracy(&vol_info, vol->sprite_info.sprite_warping_accuracy);
+    viddec_fw_mp4_set_sprite_warping_points(&vol_info, vol->sprite_info.no_of_sprite_warping_points);
+    viddec_fw_mp4_set_sprite_enable(&vol_info, vol->sprite_enable);
+    viddec_fw_mp4_set_vop_time_increment_resolution(&vol_info, vol->vop_time_increment_resolution);
+
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOL_INFO;
+    wi.vwi_payload[0] = vol_info.vol_flags;
+    wi.vwi_payload[1] = vol_info.vol_size;
+    wi.vwi_payload[2] = vol_info.vol_item;
+
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_vol_workitem
+
+uint32_t viddec_fw_mp4_insert_vop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    viddec_fw_mp4_vop_info_t vop_info;
+    mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+    uint32_t byte = 0;
+    unsigned char is_emul;
+
+    memset(&vop_info, 0, sizeof(viddec_fw_mp4_vop_info_t));
+
+    // Get frame_info
+    viddec_fw_mp4_set_past_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_2].is_field);
+    viddec_fw_mp4_set_past_frame_id(&vop_info, VIDDEC_MP4_FRAME_PAST);
+    viddec_fw_mp4_set_future_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_1].is_field);
+    viddec_fw_mp4_set_future_frame_id(&vop_info, VIDDEC_MP4_FRAME_FUTURE);
+    viddec_fw_mp4_set_current_field_frame(&vop_info, parser->ref_frame[VIDDEC_MP4_INDX_0].is_field);
+    viddec_fw_mp4_set_current_frame_id(&vop_info, VIDDEC_MP4_FRAME_CURRENT);
+
+    // HW has a limitation that the enums for PAST(1), FUTURE(2) and CURRENT(0) cannot be changed and
+    // the spec does not support field pictures. Hence the field_frame bits are always zero.
+    // This gives us the constant 0x10200.
+    vop_info.frame_info = 0x10200;
+
+    // Get vop_data
+    // Quant scale is in the video_packet_header or the gob_layer - both of which are parsed by the BSP
+    viddec_fw_mp4_set_vop_quant_scale(&vop_info, 0);
+    viddec_fw_mp4_set_vop_fcode_backward(&vop_info, vop->vop_fcode_backward);
+    viddec_fw_mp4_set_vop_fcode_forward(&vop_info, vop->vop_fcode_forward);
+    viddec_fw_mp4_set_vop_quant(&vop_info, vop->vop_quant);
+    viddec_fw_mp4_set_alternate_vertical_scan_flag(&vop_info, vop->alternate_vertical_scan_flag);
+    viddec_fw_mp4_set_top_field_first(&vop_info, vop->top_field_first);
+    viddec_fw_mp4_set_intra_dc_vlc_thr(&vop_info, vop->intra_dc_vlc_thr);
+    viddec_fw_mp4_set_vop_rounding_type(&vop_info, vop->vop_rounding_type);
+    viddec_fw_mp4_set_vop_coding_type(&vop_info, vop->vop_coding_type);
+
+    // Get vol_item
+    result = viddec_pm_get_au_pos(parent, &vop_info.bit_offset, &byte, &is_emul);
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_VOP_INFO;
+    wi.vwi_payload[0] = vop_info.frame_info;
+    wi.vwi_payload[1] = vop_info.vop_data;
+    wi.vwi_payload[2] = vop_info.bit_offset;
+
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_vop_workitem
+
+uint32_t viddec_fw_mp4_insert_vpsh_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    viddec_fw_mp4_svh_t svh_info;
+    mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+
+    memset(&svh_info, 0, sizeof(viddec_fw_mp4_svh_t));
+
+    // Get svh_data
+    viddec_fw_mp4_set_temporal_reference(&svh_info, svh->temporal_reference);
+    viddec_fw_mp4_set_num_macroblocks_in_gob(&svh_info, svh->num_macroblocks_in_gob);
+    viddec_fw_mp4_set_num_gobs_in_vop(&svh_info, svh->num_gobs_in_vop);
+    viddec_fw_mp4_set_num_rows_in_gob(&svh_info, svh->num_rows_in_gob);
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SVH;
+    wi.vwi_payload[0] = svh_info.svh_data;
+    wi.vwi_payload[1] = svh_info.pad1;
+    wi.vwi_payload[2] = svh_info.pad2;
+
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_vpsh_workitem
+
+uint32_t viddec_fw_mp4_insert_sprite_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    viddec_fw_mp4_sprite_trajectory_t sprite_info;
+    mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+    mp4_VideoObjectPlane_t *vop = &(parser->info.VisualObject.VideoObject.VideoObjectPlane);
+    uint8_t no_of_entries_per_item = 3;
+    uint8_t no_of_sprite_workitems = 0;
+    uint8_t warp_index = 0;
+    int i, j;
+
+    if (!vol->sprite_info.no_of_sprite_warping_points)
+        return result;
+
+    no_of_sprite_workitems = (vol->sprite_info.no_of_sprite_warping_points > 3) ? 2 : 1;
+
+    for (i=0; i<no_of_sprite_workitems; i++)
+    {
+        memset(&sprite_info, 0, sizeof(viddec_fw_mp4_sprite_trajectory_t));
+
+        for (j=0; j<no_of_entries_per_item; j++)
+        {
+            if (warp_index < vol->sprite_info.no_of_sprite_warping_points)
+            {
+                if (warp_index < 4)
+                {
+                    viddec_fw_mp4_set_warping_point_index(sprite_info.warping_mv_code[j], warp_index);
+                    viddec_fw_mp4_set_warping_mv_code_du(sprite_info.warping_mv_code[j], vop->warping_mv_code_du[warp_index]);
+                    viddec_fw_mp4_set_warping_mv_code_dv(sprite_info.warping_mv_code[j], vop->warping_mv_code_dv[warp_index]);
+                }
+            }
+            else
+            {
+                sprite_info.warping_mv_code[j] = 0xF << 28;
+            }
+            warp_index++;
+        }
+
+        wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_SPRT_TRAJ;
+        wi.vwi_payload[0] = sprite_info.warping_mv_code[0];
+        wi.vwi_payload[1] = sprite_info.warping_mv_code[1];
+        wi.vwi_payload[2] = sprite_info.warping_mv_code[2];
+
+        result = viddec_pm_append_workitem(parent, &wi, false);
+    }
+
+    return result;
+} // viddec_fw_mp4_insert_sprite_workitem
+
+uint32_t viddec_fw_mp4_insert_bvop_workitem(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    mp4_VideoObjectLayer_t *vol = &(parser->info.VisualObject.VideoObject);
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_BVOP_INFO;
+    wi.vwi_payload[0] = vol->Tframe;
+    wi.vwi_payload[1] = vol->TRD;
+    wi.vwi_payload[2] = vol->TRB;
+
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_bvop_workitem
+
+uint32_t viddec_fw_mp4_insert_qmat(void *parent, uint8_t intra_quant_flag, uint32_t *qmat)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+    uint8_t i;
+
+    // No of items = (64/4 Dwords / 3 entries per workload item)
+    // 64 8b entries => 64 * 8 / 32 DWORDS => 64/4 DWORDS => 16 DWORDS
+    // Each item can store 3 DWORDS, 16 DWORDS => 16/3 items => 6 items
+    for (i=0; i<6; i++)
+    {
+        memset(&wi, 0, sizeof(viddec_workload_item_t));
+
+        if (intra_quant_flag)
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_IQUANT;
+        else
+            wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_NIQUANT;
+
+        if (i == 6)
+        {
+            wi.vwi_payload[0] = qmat[0];
+            wi.vwi_payload[1] = 0;
+            wi.vwi_payload[2] = 0;
+        }
+        else
+        {
+            wi.vwi_payload[0] = qmat[0];
+            wi.vwi_payload[1] = qmat[1];
+            wi.vwi_payload[2] = qmat[2];
+        }
+
+        qmat += 3;
+
+        result = viddec_pm_append_workitem(parent, &wi, false);
+    }
+
+    return result;
+} // viddec_fw_mp4_insert_qmat
+
+uint32_t viddec_fw_mp4_insert_inversequant_workitem(void *parent, mp4_VOLQuant_mat_t *qmat)
+{
+    uint32_t result = MP4_STATUS_OK;
+
+    if (qmat->load_intra_quant_mat)
+    {
+        result = viddec_fw_mp4_insert_qmat(parent, true, (uint32_t *) &(qmat->intra_quant_mat));
+    }
+
+    if (qmat->load_nonintra_quant_mat)
+    {
+        result = viddec_fw_mp4_insert_qmat(parent, false, (uint32_t *) &(qmat->nonintra_quant_mat));
+    }
+
+    return result;
+} // viddec_fw_mp4_insert_inversequant_workitem
+
+uint32_t viddec_fw_mp4_insert_past_frame_workitem(void *parent)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_PAST_FRAME;
+    wi.ref_frame.reference_id = 0;
+    wi.ref_frame.luma_phys_addr = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_past_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_future_frame_workitem(void *parent)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+
+    wi.vwi_type = (workload_item_type)VIDDEC_WORKLOAD_MP4_FUTURE_FRAME;
+    wi.ref_frame.reference_id = 0;
+    wi.ref_frame.luma_phys_addr = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_future_frame_workitem
+
+uint32_t viddec_fw_mp4_insert_reorder_workitem(void *parent)
+{
+    uint32_t result = MP4_STATUS_OK;
+    viddec_workload_item_t wi;
+
+    // Move frame at location 1 of the reference table to location 0
+    wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+    wi.ref_reorder.ref_table_offset = 0;
+    wi.ref_reorder.ref_reorder_00010203 = 0x01010203;
+    wi.ref_reorder.ref_reorder_04050607 = 0x04050607;
+
+    result = viddec_pm_append_workitem(parent, &wi, false);
+
+    return result;
+} // viddec_fw_mp4_insert_reorder_workitem
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt)
+{
+    uint32_t result = 0;
+    viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+    viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+    result = viddec_fw_mp4_populate_attr(wl, parser);
+    result = viddec_fw_mp4_insert_vol_workitem(parent, parser);
+    result = viddec_fw_mp4_insert_vop_workitem(parent, parser);
+    result = viddec_fw_mp4_insert_sprite_workitem(parent, parser);
+    result = viddec_fw_mp4_insert_inversequant_workitem(parent, &(parser->info.VisualObject.VideoObject.quant_mat_info));
+
+    if (parser->info.VisualObject.VideoObject.short_video_header)
+        result = viddec_fw_mp4_insert_vpsh_workitem(parent, parser);
+
+    if (!parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded)
+        wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+
+    // Send reference re-order tag for all reference frame types
+    if (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type != MP4_VOP_TYPE_B)
+    {
+        result = viddec_fw_mp4_insert_reorder_workitem(parent);
+    }
+
+    // Handle vop_coding_type based information
+    switch (parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type)
+    {
+    case MP4_VOP_TYPE_B:
+        result = viddec_fw_mp4_insert_bvop_workitem(parent, parser);
+        result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+        result = viddec_fw_mp4_insert_future_frame_workitem(parent);
+        break;
+    case MP4_VOP_TYPE_P:
+    case MP4_VOP_TYPE_S:
+        result = viddec_fw_mp4_insert_past_frame_workitem(parent);
+        // Deliberate fall-thru to type I
+    case MP4_VOP_TYPE_I:
+        wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (1 & WORKLOAD_REFERENCE_FRAME_BMASK);
+        // Swap reference information
+        parser->ref_frame[VIDDEC_MP4_INDX_2] = parser->ref_frame[VIDDEC_MP4_INDX_1];
+        parser->ref_frame[VIDDEC_MP4_INDX_1] = parser->ref_frame[VIDDEC_MP4_INDX_0];
+        break;
+        break;
+    default:
+        break;
+    } // switch on vop_coding_type
+
+    result = viddec_pm_append_pixeldata(parent);
+
+    return result;
+} // viddec_fw_mp4_emit_workload
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c
new file mode 100755
index 0000000..021678e
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.c
@@ -0,0 +1,98 @@
+#include "viddec_mp4_decodevideoobjectplane.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t* pInfo)
+{
+    mp4_Status_t status = MP4_STATUS_OK;
+    uint32_t vop_time=0;
+//    mp4_VisualObject_t *vo = &(pInfo->VisualObject);
+    mp4_VideoObjectLayer_t *vol = &(pInfo->VisualObject.VideoObject);
+    mp4_GroupOfVideoObjectPlane_t *gvop = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+    mp4_VideoObjectPlane_t *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+
+    // set VOP time
+    if (vol->short_video_header)
+    {
+        vop_time = vol->vop_sync_time +
+                   pInfo->VisualObject.VideoObject.VideoObjectPlaneH263.temporal_reference * 1001;
+
+//        if (vo->currentFrame.time > vop_time)
+        {
+            vol->vop_sync_time += 256 * 1001;
+            vop_time += 256 * 1001;
+        }
+    }
+    else
+    {
+        if (vop->vop_coding_type == MP4_VOP_TYPE_B)
+        {
+            vop_time = vol->vop_sync_time_b + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+        }
+        else
+        {
+            if (gvop->time_base > vol->vop_sync_time)
+                vol->vop_sync_time = gvop->time_base;
+
+            vop_time = vol->vop_sync_time + vop->modulo_time_base * vol->vop_time_increment_resolution + vop->vop_time_increment;
+
+            if (vol->vop_sync_time_b < vol->vop_sync_time)
+                vol->vop_sync_time_b = vol->vop_sync_time;
+
+            if (vop->modulo_time_base != 0)
+                vol->vop_sync_time = vop_time - vop->vop_time_increment;
+        }
+    }
+
+    if (vop->vop_coded)
+    {
+        switch (vop->vop_coding_type)
+        {
+        case MP4_VOP_TYPE_S:
+            if (vol->sprite_enable != MP4_SPRITE_GMC)
+                break;
+            // Deliberate fall-through from this case
+        case MP4_VOP_TYPE_I:
+        case MP4_VOP_TYPE_P:
+            // set past and future time for B-VOP
+            vol->pastFrameTime = vol->futureFrameTime;
+            vol->futureFrameTime = vop_time;
+            break;
+        default:
+            break;
+        }
+    }
+
+    if (vop->vop_coded)
+//     || (vop_time != vo->currentFrame.time && vop_time != vo->pastFrame.time && vop_time != vo->futureFrame.time) )
+    {
+        if (vop->vop_coding_type == MP4_VOP_TYPE_B)
+        {
+            if (!vol->Tframe)
+                vol->Tframe = (int) (vop_time); // - vo->pastFrame.time);
+
+            if (vop->vop_coded)
+            {
+                vol->TRB = (int) (vop_time - vol->pastFrameTime);
+                vol->TRD = (int) (vol->futureFrameTime - vol->pastFrameTime);
+
+                // defense from bad streams when B-VOPs are before Past and/or Future
+                if (vol->TRB <= 0)
+                    vol->TRB = 1;
+
+                if (vol->TRD <= 0)
+                    vol->TRD = 2;
+
+                if (vol->TRD <= vol->TRB)
+                {
+                    vol->TRB = 1;
+                    vol->TRD = 2;
+                }
+
+                if (vol->Tframe >= vol->TRD)
+                    vol->Tframe = vol->TRB;
+            }
+        }
+    }
+
+    return status;
+} // mp4_DecodeVideoObjectPlane
+
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h
new file mode 100755
index 0000000..e03bcb0
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_decodevideoobjectplane.h
@@ -0,0 +1,10 @@
+#ifndef VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_DECODEVIDEOOBJECTPLANE_H
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_DecodeVideoObjectPlane(mp4_Info_t *pInfo);
+
+//void mp4_copy_info_to_dmem(mp4_Info_t *pInfo, mp4_MBHWInterface *ptr_parameters);
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c
new file mode 100755
index 0000000..85eab1a
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.c
@@ -0,0 +1,191 @@
+#include <string.h>
+#include <vbp_common.h>
+
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+#include "viddec_mp4_decodevideoobjectplane.h"
+#include "viddec_mp4_shortheader.h"
+#include "viddec_mp4_videoobjectlayer.h"
+#include "viddec_mp4_videoobjectplane.h"
+#include "viddec_mp4_visualobject.h"
+
+void viddec_mp4_get_context_size(viddec_parser_memory_sizes_t *size)
+{
+    /* Should return size of my structure */
+    size->context_size = sizeof(viddec_mp4_parser_t);
+    size->persist_size = 0;
+    return;
+} // viddec_mp4_get_context_size
+
+void viddec_mp4_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+{
+    viddec_mp4_parser_t *parser = (viddec_mp4_parser_t *) ctxt;
+
+    persist_mem = persist_mem;
+    parser->is_frame_start = false;
+    parser->prev_sc = MP4_SC_INVALID;
+    parser->current_sc = MP4_SC_INVALID;
+    parser->cur_sc_prefix = false;
+    parser->next_sc_prefix = false;
+    parser->ignore_scs = false;
+
+    if (preserve)
+    {
+        // Need to maintain information till VOL
+        parser->sc_seen &= MP4_SC_SEEN_VOL;
+        parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+
+        // Reset only frame related data
+        memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlane), 0, sizeof(mp4_VideoObjectPlane_t));
+        memset(&(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263), 0, sizeof(mp4_VideoObjectPlaneH263));
+    }
+    else
+    {
+        parser->sc_seen = MP4_SC_SEEN_INVALID;
+        parser->bitstream_error = MP4_BS_ERROR_NONE;
+        memset(&(parser->info), 0, sizeof(mp4_Info_t));
+    }
+
+    return;
+} // viddec_mp4_init
+
+static uint32_t viddec_mp4_decodevop_and_emitwkld(void *parent, void *ctxt)
+{
+    int status = MP4_STATUS_OK;
+    viddec_mp4_parser_t *cxt = (viddec_mp4_parser_t *)ctxt;
+
+    status = mp4_DecodeVideoObjectPlane(&(cxt->info));
+
+    return status;
+} // viddec_mp4_decodevop_and_emitwkld
+
+uint32_t viddec_mp4_parse(void *parent, void *ctxt)
+{
+    uint32_t sc=0;
+    viddec_mp4_parser_t *cxt;
+    uint8_t is_svh=0;
+    int32_t getbits=0;
+    int32_t status = 0;
+
+    cxt = (viddec_mp4_parser_t *)ctxt;
+    is_svh = (cxt->cur_sc_prefix) ? false: true;
+    if ((getbits = viddec_pm_peek_bits(parent, &sc, 32)) == -1)
+    {
+        DEB("Start code not found\n");
+        return VIDDEC_PARSE_ERROR;
+    }
+
+    if (!is_svh)
+    {
+        viddec_pm_get_bits(parent, &sc, 32);
+        sc = sc & 0xFF;
+        cxt->current_sc = sc;
+        cxt->current_sc |= 0x100;
+        DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+
+        switch (sc)
+        {
+        case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+        {
+            status = mp4_Parse_VisualSequence(parent, cxt);
+            cxt->prev_sc = MP4_SC_VISUAL_OBJECT_SEQUENCE;
+            DEB("MP4_VISUAL_OBJECT_SEQUENCE_SC: \n");
+            break;
+        }
+        case MP4_SC_VISUAL_OBJECT_SEQUENCE_EC:
+        {/* Not required to do anything */
+            break;
+        }
+        case MP4_SC_USER_DATA:
+        {   /* Copy userdata to user-visible buffer (EMIT) */
+            status = mp4_Parse_UserData(parent, cxt);
+            DEB("MP4_USER_DATA_SC: \n");
+            break;
+        }
+        case MP4_SC_GROUP_OF_VOP:
+        {
+            status = mp4_Parse_GroupOfVideoObjectPlane(parent, cxt);
+            cxt->prev_sc = MP4_SC_GROUP_OF_VOP;
+            DEB("MP4_GROUP_OF_VOP_SC:0x%.8X\n", status);
+            break;
+        }
+        case MP4_SC_VIDEO_SESSION_ERROR:
+        {/* Not required to do anything?? */
+            break;
+        }
+        case MP4_SC_VISUAL_OBJECT:
+        {
+            status = mp4_Parse_VisualObject(parent, cxt);
+            cxt->prev_sc = MP4_SC_VISUAL_OBJECT;
+            DEB("MP4_VISUAL_OBJECT_SC: status=%.8X\n", status);
+            break;
+        }
+        case MP4_SC_VIDEO_OBJECT_PLANE:
+        {
+            /* We must decode the VOP Header information, it does not end  on a byte boundary, so we need to emit
+               a starting bit offset after parsing the header. */
+            status = mp4_Parse_VideoObjectPlane(parent, cxt);
+            status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+            // TODO: Fix this for interlaced
+            cxt->is_frame_start = true;
+            cxt->sc_seen |= MP4_SC_SEEN_VOP;
+
+            DEB("MP4_VIDEO_OBJECT_PLANE_SC: status=0x%.8X\n", status);
+            break;
+        }
+        case MP4_SC_STUFFING:
+        {
+            break;
+        }
+        default:
+        {
+            if ( (sc >=  MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (sc <=  MP4_SC_VIDEO_OBJECT_LAYER_MAX) )
+            {
+                status = mp4_Parse_VideoObjectLayer(parent, cxt);
+                cxt->sc_seen = MP4_SC_SEEN_VOL;
+                cxt->prev_sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+                DEB("MP4_VIDEO_OBJECT_LAYER_MIN_SC:status=0x%.8X\n", status);
+                sc = MP4_SC_VIDEO_OBJECT_LAYER_MIN;
+            }
+            // sc is unsigned and will be >= 0, so no check needed for sc >= MP4_SC_VIDEO_OBJECT_MIN
+            else if (sc <= MP4_SC_VIDEO_OBJECT_MAX)
+            {
+                // If there is more data, it is short video header, else the next start code is expected to be VideoObjectLayer
+                getbits = viddec_pm_get_bits(parent, &sc, 22);
+                if (getbits != -1)
+                {
+                    cxt->current_sc = sc;
+                    status = mp4_Parse_VideoObject_svh(parent, cxt);
+                    status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+                    cxt->sc_seen = MP4_SC_SEEN_SVH;
+                    cxt->is_frame_start = true;
+                    DEB("MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+                    DEB("MP4_VIDEO_OBJECT_MIN_SC:status=0x%.8X\n", status);
+                }
+            }
+            else
+            {
+                DEB("UNKWON Cod:0x%08X\n", sc);
+            }
+        }
+        break;
+        }
+    }
+    else
+    {
+        viddec_pm_get_bits(parent, &sc, 22);
+        cxt->current_sc = sc;
+        DEB("current_sc=0x%.8X, prev_sc=0x%x\n", sc, cxt->prev_sc);
+        status = mp4_Parse_VideoObject_svh(parent, cxt);
+        status = viddec_mp4_decodevop_and_emitwkld(parent, cxt);
+        cxt->sc_seen = MP4_SC_SEEN_SVH;
+        cxt->is_frame_start = true;
+        DEB("SVH: MP4_SCS_SVH: status=0x%.8X 0x%.8X %.8X\n", status, cxt->current_sc, sc);
+    }
+
+    // Current sc becomes the previous sc
+    cxt->prev_sc = sc;
+
+    return VIDDEC_PARSE_SUCESS;
+} // viddec_mp4_parse
+
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h
new file mode 100755
index 0000000..305e09b
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_parse.h
@@ -0,0 +1,529 @@
+#ifndef VIDDEC_MP4_PARSE_H
+#define VIDDEC_MP4_PARSE_H
+
+#include "viddec_fw_mp4.h"
+
+/* Macros for MP4 start code detection */
+#define FIRST_STARTCODE_BYTE        0x00
+#define SECOND_STARTCODE_BYTE       0x00
+#define THIRD_STARTCODE_BYTE        0x01
+#define SHORT_THIRD_STARTCODE_BYTE  0x80
+#define SC_BYTE_MASK0               0x00ff0000
+#define SC_BYTE_MASK1               0x000000ff
+
+/* status codes */
+typedef enum
+{
+    MP4_STATUS_OK               =  0,   /* Success */
+    MP4_STATUS_PARSE_ERROR      = (1 << 0),   /* Invalid syntax */
+    MP4_STATUS_NOTSUPPORT       = (1 << 1),   /* unsupported feature */
+    MP4_STATUS_REQD_DATA_ERROR  = (1 << 2),   /* supported data either invalid or missing */
+} mp4_Status_t;
+
+/* feature codes */
+typedef enum
+{
+    MP4_VOP_FEATURE_DEFAULT =  0,   // Default VOP features, no code image update needed
+    MP4_VOP_FEATURE_SVH     =  1,   // VOP has Short Video Header
+    MP4_VOP_FEATURE_DP      =  2    // VOP is Data Partitioned
+} mp4_Vop_feature;
+
+/* MPEG-4 start code values: Table 6-3 */
+typedef enum
+{
+    MP4_SC_VIDEO_OBJECT_MIN          = 0x00,
+    MP4_SC_VIDEO_OBJECT_MAX          = 0x1F,
+    MP4_SC_VIDEO_OBJECT_LAYER_MIN    = 0x20,
+    MP4_SC_VIDEO_OBJECT_LAYER_MAX    = 0x2F,
+    MP4_SC_FGS_BP_MIN                = 0x40, /* Unsupported */
+    MP4_SC_FGS_BP_MAX                = 0x5F, /* Unsupported */
+    MP4_SC_VISUAL_OBJECT_SEQUENCE    = 0xB0,
+    MP4_SC_VISUAL_OBJECT_SEQUENCE_EC = 0xB1,
+    MP4_SC_USER_DATA                 = 0xB2,
+    MP4_SC_GROUP_OF_VOP              = 0xB3,
+    MP4_SC_VIDEO_SESSION_ERROR       = 0xB4,
+    MP4_SC_VISUAL_OBJECT             = 0xB5,
+    MP4_SC_VIDEO_OBJECT_PLANE        = 0xB6,
+    MP4_SC_SLICE                     = 0xB7, /* Unsupported */
+    MP4_SC_EXTENSION                 = 0xB8, /* Unsupported */
+    MP4_SC_FGS_VOP                   = 0xB9, /* Unsupported */
+    MP4_SC_FBA_OBJECT                = 0xBA, /* Unsupported */
+    MP4_SC_FBA_OBJECT_PLANE          = 0xBB, /* Unsupported */
+    MP4_SC_MESH_OBJECT               = 0xBC, /* Unsupported */
+    MP4_SC_MESH_OBJECT_PLANE         = 0xBD, /* Unsupported */
+    MP4_SC_STILL_TEXTURE_OBJECT      = 0xBE, /* Unsupported */
+    MP4_SC_TEXTURE_SPATIAL_LAYER     = 0xBF, /* Unsupported */
+    MP4_SC_TEXTURE_SNR_LAYER         = 0xC0, /* Unsupported */
+    MP4_SC_TEXTURE_TILE              = 0xC1, /* Unsupported */
+    MP4_SC_TEXTURE_SHAPE_LAYER       = 0xC2, /* Unsupported */
+    MP4_SC_STUFFING                  = 0xC3,
+    MP4_SC_SYTEM_MIN                 = 0xC6, /* Unsupported */
+    MP4_SC_SYTEM_MAX                 = 0xFF, /* Unsupported */
+    MP4_SC_INVALID                   = 0x100, /* Invalid */
+} mp4_start_code_values_t;
+
+/* MPEG-4 code values
+   ISO/IEC 14496-2:2004 table 6-6 */
+enum
+{
+    MP4_VISUAL_OBJECT_TYPE_VIDEO     = 1,
+    MP4_VISUAL_OBJECT_TYPE_TEXTURE   = 2,
+    MP4_VISUAL_OBJECT_TYPE_MESH      = 3,
+    MP4_VISUAL_OBJECT_TYPE_FBA       = 4,
+    MP4_VISUAL_OBJECT_TYPE_3DMESH    = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-7 */
+enum
+{
+    MP4_VIDEO_FORMAT_COMPONENT      = 0,
+    MP4_VIDEO_FORMAT_PAL            = 1,
+    MP4_VIDEO_FORMAT_NTSC           = 2,
+    MP4_VIDEO_FORMAT_SECAM          = 3,
+    MP4_VIDEO_FORMAT_MAC            = 4,
+    MP4_VIDEO_FORMAT_UNSPECIFIED    = 5
+};
+
+/* ISO/IEC 14496-2:2004 table 6-8..10 */
+enum
+{
+    MP4_VIDEO_COLORS_FORBIDDEN         = 0,
+    MP4_VIDEO_COLORS_ITU_R_BT_709      = 1,
+    MP4_VIDEO_COLORS_UNSPECIFIED       = 2,
+    MP4_VIDEO_COLORS_RESERVED          = 3,
+    MP4_VIDEO_COLORS_ITU_R_BT_470_2_M  = 4,
+    MP4_VIDEO_COLORS_ITU_R_BT_470_2_BG = 5,
+    MP4_VIDEO_COLORS_SMPTE_170M        = 6,
+    MP4_VIDEO_COLORS_SMPTE_240M        = 7,
+    MP4_VIDEO_COLORS_GENERIC_FILM      = 8
+};
+
+/* ISO/IEC 14496-2:2004 table 6-11 */
+enum
+{
+    MP4_VIDEO_OBJECT_TYPE_SIMPLE                     = 1,
+    MP4_VIDEO_OBJECT_TYPE_SIMPLE_SCALABLE            = 2,
+    MP4_VIDEO_OBJECT_TYPE_CORE                       = 3,
+    MP4_VIDEO_OBJECT_TYPE_MAIN                       = 4,
+    MP4_VIDEO_OBJECT_TYPE_NBIT                       = 5,
+    MP4_VIDEO_OBJECT_TYPE_2DTEXTURE                  = 6,
+    MP4_VIDEO_OBJECT_TYPE_2DMESH                     = 7,
+    MP4_VIDEO_OBJECT_TYPE_SIMPLE_FACE                = 8,
+    MP4_VIDEO_OBJECT_TYPE_STILL_SCALABLE_TEXTURE     = 9,
+    MP4_VIDEO_OBJECT_TYPE_ADVANCED_REAL_TIME_SIMPLE  = 10,
+    MP4_VIDEO_OBJECT_TYPE_CORE_SCALABLE              = 11,
+    MP4_VIDEO_OBJECT_TYPE_ADVANCED_CODING_EFFICIENCY = 12,
+    MP4_VIDEO_OBJECT_TYPE_ADVANCED_SCALABLE_TEXTURE  = 13,
+    MP4_VIDEO_OBJECT_TYPE_SIMPLE_FBA                 = 14,
+    MP4_VIDEO_OBJECT_TYPE_SIMPLE_STUDIO              = 15,
+    MP4_VIDEO_OBJECT_TYPE_CORE_STUDIO                = 16,
+    MP4_VIDEO_OBJECT_TYPE_ADVANCED_SIMPLE            = 17,
+    MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE  = 18
+};
+
+/*  ISO/IEC 14496-2:2004 table 6.17 (maximum defined video_object_layer_shape_extension) */
+#define MP4_SHAPE_EXT_NUM 13
+
+/* ISO/IEC 14496-2:2004 table 6-14 */
+enum
+{
+    MP4_ASPECT_RATIO_FORBIDDEN  = 0,
+    MP4_ASPECT_RATIO_1_1        = 1,
+    MP4_ASPECT_RATIO_12_11      = 2,
+    MP4_ASPECT_RATIO_10_11      = 3,
+    MP4_ASPECT_RATIO_16_11      = 4,
+    MP4_ASPECT_RATIO_40_33      = 5,
+    MP4_ASPECT_RATIO_EXTPAR     = 15
+};
+
+/* ISO/IEC 14496-2:2004 table 6-15 */
+#define MP4_CHROMA_FORMAT_420    1
+
+/* ISO/IEC 14496-2:2004 table 6-16 */
+enum
+{
+    MP4_SHAPE_TYPE_RECTANGULAR  = 0,
+    MP4_SHAPE_TYPE_BINARY       = 1,
+    MP4_SHAPE_TYPE_BINARYONLY   = 2,
+    MP4_SHAPE_TYPE_GRAYSCALE    = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 6-19 */
+#define MP4_SPRITE_STATIC   1
+#define MP4_SPRITE_GMC      2
+
+/* ISO/IEC 14496-2:2004 table 6-24 */
+enum
+{
+    MP4_VOP_TYPE_I  = 0,
+    MP4_VOP_TYPE_P  = 1,
+    MP4_VOP_TYPE_B  = 2,
+    MP4_VOP_TYPE_S  = 3,
+};
+
+/* ISO/IEC 14496-2:2004 table 6-26 */
+enum
+{
+    MP4_SPRITE_TRANSMIT_MODE_STOP   = 0,
+    MP4_SPRITE_TRANSMIT_MODE_PIECE  = 1,
+    MP4_SPRITE_TRANSMIT_MODE_UPDATE = 2,
+    MP4_SPRITE_TRANSMIT_MODE_PAUSE  = 3
+};
+
+/* ISO/IEC 14496-2:2004 table 7-3 */
+enum
+{
+    MP4_BAB_TYPE_MVDSZ_NOUPDATE  = 0,
+    MP4_BAB_TYPE_MVDSNZ_NOUPDATE = 1,
+    MP4_BAB_TYPE_TRANSPARENT     = 2,
+    MP4_BAB_TYPE_OPAQUE          = 3,
+    MP4_BAB_TYPE_INTRACAE        = 4,
+    MP4_BAB_TYPE_MVDSZ_INTERCAE  = 5,
+    MP4_BAB_TYPE_MVDSNZ_INTERCAE = 6
+};
+
+#define MP4_DC_MARKER  0x6B001 // 110 1011 0000 0000 0001
+#define MP4_MV_MARKER  0x1F001 //   1 1111 0000 0000 0001
+
+
+/* ISO/IEC 14496-2:2004 table G.1 */
+enum
+{
+    MP4_SIMPLE_PROFILE_LEVEL_1                     = 0x01,
+    MP4_SIMPLE_PROFILE_LEVEL_2                     = 0x02,
+    MP4_SIMPLE_PROFILE_LEVEL_3                     = 0x03,
+    MP4_SIMPLE_PROFILE_LEVEL_4a                    = 0x04,
+    MP4_SIMPLE_PROFILE_LEVEL_5                     = 0x05,
+    MP4_SIMPLE_PROFILE_LEVEL_6                     = 0x06,
+    MP4_SIMPLE_PROFILE_LEVEL_0                     = 0x08,
+    MP4_CORE_PROFILE_LEVEL_1                       = 0x21,
+    MP4_CORE_PROFILE_LEVEL_2                       = 0x22,
+    MP4_MAIN_PROFILE_LEVEL_2                       = 0x32,
+    MP4_MAIN_PROFILE_LEVEL_3                       = 0x33,
+    MP4_MAIN_PROFILE_LEVEL_4                       = 0x34,
+    MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_1  = 0x91,
+    MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_2  = 0x92,
+    MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_3  = 0x93,
+    MP4_ADVANCED_REAL_TIME_SIMPLE_PROFILE_LEVEL_4  = 0x94,
+    MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_1 = 0xB1,
+    MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_2 = 0xB2,
+    MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_3 = 0xB3,
+    MP4_ADVANCED_CODING_EFFICIENCY_PROFILE_LEVEL_4 = 0xB4,
+    MP4_ADVANCED_CORE_PROFILE_LEVEL_1              = 0xC1,
+    MP4_ADVANCED_CORE_PROFILE_LEVEL_2              = 0xC2,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0            = 0xF0,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1            = 0xF1,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2            = 0xF2,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3            = 0xF3,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4            = 0xF4,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5            = 0xF5,
+    MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B           = 0xF7
+};
+
+/* Group Of Video Object Plane Info */
+typedef struct
+{
+    uint8_t closed_gov;
+    uint8_t broken_link;
+    uint8_t time_code_hours;
+    uint8_t time_code_minutes;
+    uint8_t time_code_seconds;
+    uint8_t dummy1;
+    uint16_t dummy2;
+    uint32_t time_base;
+} mp4_GroupOfVideoObjectPlane_t;
+
+
+/* Video Object Plane Info */
+typedef struct
+{
+    uint8_t     vop_coding_type;
+    uint32_t    modulo_time_base;
+    uint16_t    vop_time_increment;
+    uint8_t     vop_coded;
+
+    uint16_t    vop_id;
+    uint16_t    vop_id_for_prediction;
+    uint8_t     is_vop_id_for_prediction_indication;
+    uint8_t     vop_rounding_type;
+    uint8_t     vop_reduced_resolution;
+    uint8_t     align_dummy;
+
+    uint16_t    vop_width;
+    uint16_t    vop_height;
+    uint16_t    vop_horizontal_mc_spatial_ref;
+    uint16_t    vop_vertical_mc_spatial_ref;
+
+    uint8_t     background_composition;
+    uint8_t     change_conv_ratio_disable;
+    uint8_t     is_vop_constant_alpha;
+    uint8_t     vop_constant_alpha_value;
+    uint8_t     intra_dc_vlc_thr;
+    uint8_t     top_field_first;
+    uint8_t     alternate_vertical_scan_flag;
+    uint8_t     sprite_transmit_mode;
+
+    int32_t     brightness_change_factor;
+    uint16_t    vop_quant;
+    uint8_t     vop_fcode_forward;
+    uint8_t     vop_fcode_backward;
+
+    uint16_t    warping_mv_code_du[4];
+    uint16_t    warping_mv_code_dv[4];
+
+} mp4_VideoObjectPlane_t;
+
+/* VOLControlParameters Info */
+typedef struct
+{
+    uint8_t     chroma_format;
+    uint8_t     low_delay;
+    uint8_t     vbv_parameters;
+    uint8_t     align_dummy1;
+    uint32_t    bit_rate;
+    uint32_t    vbv_buffer_size;
+    uint32_t    vbv_occupancy;
+} mp4_VOLControlParameters_t;
+
+/* Video Object Plane with short header Info */
+typedef struct _mp4_VideoObjectPlaneH263
+{
+    uint8_t         temporal_reference;
+    uint8_t         split_screen_indicator;
+    uint8_t         document_camera_indicator;
+    uint8_t         full_picture_freeze_release;
+    uint8_t         source_format;
+    uint8_t         picture_coding_type;
+    uint8_t         vop_quant;
+    uint16_t        num_gobs_in_vop;
+    uint16_t        num_macroblocks_in_gob;
+    uint8_t         num_rows_in_gob;
+#if 0
+    uint8_t         gob_number;
+    int             gob_header_empty;
+    int             gob_frame_id;
+    int             quant_scale;
+#endif
+    uint8_t         vop_rounding_type;
+    //the following are required for PLUSPTYPE
+    uint8_t         ufep;
+    uint16_t        pixel_aspect_ratio_code;
+    uint16_t        picture_width_indication;
+    uint16_t        picture_height_indication;
+} mp4_VideoObjectPlaneH263;
+
+typedef struct
+{
+    uint16_t                  sprite_width;
+    uint16_t                  sprite_height;
+    uint16_t                  sprite_left_coordinate;
+    uint16_t                  sprite_top_coordinate;
+    uint16_t                  no_of_sprite_warping_points;
+    uint16_t                  sprite_warping_accuracy;
+    uint16_t                  sprite_brightness_change;
+    uint16_t                  low_latency_sprite_enable;
+} mp4_VOLSpriteInfo_t;
+
+typedef struct
+{
+    uint8_t                  load_intra_quant_mat;
+    uint8_t                  load_nonintra_quant_mat;
+    uint16_t                 align_dummy1;
+    uint8_t                  intra_quant_mat[64];
+    uint8_t                  nonintra_quant_mat[64];
+} mp4_VOLQuant_mat_t;
+
+/* Video Object Layer Info */
+typedef struct
+{
+    uint8_t                     video_object_layer_id; /* Last 4 bits of start code. */
+    uint8_t                     short_video_header;
+    uint8_t                     random_accessible_vol;
+    uint8_t                     video_object_type_indication;
+
+    uint8_t                     is_object_layer_identifier;
+    uint8_t                     video_object_layer_verid;
+    uint8_t                     video_object_layer_priority;
+    uint8_t                     aspect_ratio_info;
+
+    uint8_t                     aspect_ratio_info_par_width;
+    uint8_t                     aspect_ratio_info_par_height;
+    uint8_t                     align_dummy1;
+    uint8_t                     is_vol_control_parameters;
+
+    mp4_VOLControlParameters_t  VOLControlParameters;
+
+    uint8_t                     video_object_layer_shape;
+    uint16_t                    vop_time_increment_resolution;
+    uint8_t                     vop_time_increment_resolution_bits;
+
+    uint8_t                     fixed_vop_rate;
+    uint16_t                    fixed_vop_time_increment;
+    uint16_t                    video_object_layer_width;
+    uint16_t                    video_object_layer_height;
+    uint8_t                     interlaced;
+
+    uint8_t                     obmc_disable;
+    uint8_t                     sprite_enable;
+    mp4_VOLSpriteInfo_t         sprite_info;
+    uint8_t                     not_8_bit;
+    uint8_t                     quant_precision;
+
+    uint8_t                     bits_per_pixel;
+    uint8_t                     quant_type;
+    mp4_VOLQuant_mat_t          quant_mat_info;
+    uint8_t                     quarter_sample;
+    uint8_t                     complexity_estimation_disable;
+
+    uint8_t                     resync_marker_disable;
+    uint8_t                     data_partitioned;
+    uint8_t                     reversible_vlc;
+    uint8_t                     newpred_enable;
+
+    uint8_t                     reduced_resolution_vop_enable;  // verid != 1
+    uint8_t                     scalability;
+    uint8_t                     low_latency_sprite_enable;
+
+    mp4_GroupOfVideoObjectPlane_t  GroupOfVideoObjectPlane;
+    mp4_VideoObjectPlane_t      VideoObjectPlane;
+    mp4_VideoObjectPlaneH263    VideoObjectPlaneH263;
+
+    // for interlaced B-VOP direct mode
+    uint32_t                         Tframe;
+    // for B-VOP direct mode
+    uint32_t                         TRB, TRD;
+    // time increment of past and future VOP for B-VOP
+    uint32_t                      pastFrameTime, futureFrameTime;
+    // VOP global time
+    uint32_t                      vop_sync_time, vop_sync_time_b;
+
+} mp4_VideoObjectLayer_t;
+
+/* video_signal_type Info */
+typedef struct
+{
+    uint8_t is_video_signal_type;
+    uint8_t video_format;
+    uint8_t video_range;
+    uint8_t is_colour_description;
+    uint8_t colour_primaries;
+    uint8_t transfer_characteristics;
+    uint8_t matrix_coefficients;
+} mp4_VideoSignalType_t;
+
+typedef struct _mp4_Frame {
+    long long int    time;
+} mp4_Frame;
+
+/* Visual Object Info */
+typedef struct
+{
+    uint8_t                 is_visual_object_identifier;
+    uint8_t                 visual_object_verid;
+    uint8_t                 visual_object_priority;
+    uint8_t                 visual_object_type;
+    mp4_VideoSignalType_t   VideoSignalType;
+    mp4_VideoObjectLayer_t  VideoObject;
+
+    mp4_Frame               currentFrame;      // current
+    mp4_Frame               pastFrame;      // reference in past
+    mp4_Frame               futureFrame;      // reference in future
+} mp4_VisualObject_t;
+
+/* Full Info */
+typedef struct
+{
+    mp4_VisualObject_t    VisualObject;
+    uint8_t               profile_and_level_indication;
+} mp4_Info_t;
+
+enum
+{
+    MP4_SC_SEEN_INVALID = 0x0,
+    MP4_SC_SEEN_VOL = 0x1,
+    MP4_SC_SEEN_VOP = 0x2,
+    MP4_SC_SEEN_SVH = 0x4,
+};
+
+enum
+{
+    MP4_BS_ERROR_NONE =          (0 << 0),
+    MP4_BS_ERROR_HDR_PARSE =     (1 << 0),
+    MP4_BS_ERROR_HDR_NONDEC =    (1 << 1),
+    MP4_BS_ERROR_HDR_UNSUP =     (1 << 2),
+    MP4_BS_ERROR_FRM_PARSE =     (1 << 3),
+    MP4_BS_ERROR_FRM_NONDEC =    (1 << 4),
+    MP4_BS_ERROR_FRM_UNSUP =     (1 << 5),
+};
+
+#define MP4_HDR_ERROR_MASK (MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC | MP4_BS_ERROR_HDR_UNSUP)
+
+typedef enum
+{
+    VIDDEC_MP4_INDX_0 = 0,
+    VIDDEC_MP4_INDX_1 = 1,
+    VIDDEC_MP4_INDX_2 = 2,
+    VIDDEC_MP4_INDX_MAX = 3,
+} viddec_fw_mp4_ref_index_t;
+
+typedef struct
+{
+    uint8_t is_field;
+} viddec_mp4_ref_info_t;
+
+typedef struct
+{
+    // The relevant bitstream data for current stream
+    mp4_Info_t info;
+
+    // The previous start code (without the prefix)
+    uint32_t   prev_sc;
+
+    // The current start code (without the prefix)
+    // TODO: Revisit for SVH
+    uint32_t   current_sc;
+
+    // Indicates if we look for both short and long video header or just the long video header
+    // If false, sc detection looks for both short and long video headers.
+    // If true, long video header has been seen and sc detection does not look for short video header any more.
+    uint8_t    ignore_scs;
+
+    // Indicates if the current start code prefix is long (if true).
+    uint8_t    cur_sc_prefix;
+
+    // Indicates if the next start code prefix is long (if true).
+    uint8_t    next_sc_prefix;
+
+    // Indicates start of a frame
+    uint8_t    is_frame_start;
+
+    // Indicates which start codes were seen for this workload
+    uint8_t    sc_seen;
+
+    // Indicates bitstream errors if any
+    uint16_t    bitstream_error;
+
+    // Reference frame information
+    viddec_mp4_ref_info_t ref_frame[VIDDEC_MP4_INDX_MAX];
+
+} viddec_mp4_parser_t;
+
+#define BREAK_GETBITS_FAIL(x, ret) {            \
+        if(x == -1){                            \
+            ret = MP4_STATUS_PARSE_ERROR;       \
+            break;}                             \
+    }
+
+#define BREAK_GETBITS_REQD_MISSING(x, ret) {            \
+        if(x == -1){                            \
+            ret = MP4_STATUS_REQD_DATA_ERROR;       \
+            break;}                             \
+    }
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+uint32_t viddec_fw_mp4_emit_workload(void *parent, void *ctxt);
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status);
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c
new file mode 100755
index 0000000..11c82d4
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.c
@@ -0,0 +1,367 @@
+#include <vbp_common.h>
+#include "viddec_mp4_shortheader.h"
+
+typedef struct
+{
+    uint16_t vop_width;
+    uint16_t vop_height;
+    uint16_t num_macroblocks_in_gob;
+    uint16_t num_gobs_in_vop;
+    uint8_t  num_rows_in_gob;
+} svh_src_fmt_params_t;
+
+const svh_src_fmt_params_t svh_src_fmt_defaults[5] =
+{
+    {128,    96,   8,  6, 1},
+    {176,   144,  11,  9, 1},
+    {352,   288,  22, 18, 1},
+    {704,   576,  88, 18, 2},
+    {1408, 1152, 352, 18, 4},
+};
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_Status_t ret = MP4_STATUS_OK;
+    unsigned int data;
+    mp4_VideoObjectPlaneH263 *svh = &(parser->info.VisualObject.VideoObject.VideoObjectPlaneH263);
+    int32_t getbits = 0;
+    uint8_t pei = 0;
+    uint8_t optional_indicators_8bits = 0;
+
+    do
+    {
+        //temporal reference
+        getbits = viddec_pm_get_bits(parent, &data, 8);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        svh->temporal_reference = (data & 0xff);
+        //marker bit
+        getbits = viddec_pm_get_bits(parent, &data, 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        if ( 1 != (data & 0x1))
+        {
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+        //zero bit
+        getbits = viddec_pm_get_bits(parent, &data, 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        if ( 0 != (data & 0x1))
+        {
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+        //split_screen_indicator, document_camera_indicator, full_picture_freeze_release
+        getbits = viddec_pm_get_bits(parent, &data, 3);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        //source format
+        getbits = viddec_pm_get_bits(parent, &data, 3);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        svh->source_format = (data & 0x7);
+        if (svh->source_format == 0 || svh->source_format == 6)
+        {
+            DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n");
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+
+        if (svh->source_format != 7)
+        {
+            //picture coding type
+            getbits = viddec_pm_get_bits(parent, &data, 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            svh->picture_coding_type = (data & 0x1);
+            //reserved zero bits
+            getbits = viddec_pm_get_bits(parent, &data, 4);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            if ( 0 != (data & 0xf))
+            {
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+            //vop quant
+            getbits = viddec_pm_get_bits(parent, &data, 5);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            svh->vop_quant = (data & 0x1f);
+            //cpm
+            getbits = viddec_pm_get_bits(parent, &data, 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            if ( 0 != (data & 0x1))
+            {
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+        }
+        else //extended PTYPE (PLUSPTYPE)
+        {
+            //ufep
+            getbits = viddec_pm_get_bits(parent, &data, 3);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            svh->ufep = (data & 0x7); //ufep
+            if (svh->ufep == 1 || svh->ufep == 0)
+            {
+                //OPPTYPE
+                if (svh->ufep == 1)
+                {
+                    //source format
+                    getbits = viddec_pm_get_bits(parent, &data, 3);
+                    BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                    svh->source_format = (data & 0x7);
+                    if (svh->source_format < 1 || svh->source_format > 6)
+                    {
+                        DEB("Error: bad value of source_format\n");
+                        ret = MP4_STATUS_PARSE_ERROR;
+                        break;
+                    }
+                    //optional indicators
+                    getbits = viddec_pm_get_bits(parent, &data, 8);
+                    BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                    optional_indicators_8bits = data;
+                    //reserved zero bits
+                    getbits = viddec_pm_get_bits(parent, &data, 3);
+                    BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                    if ( 0 != (data & 0x7))
+                    {
+                        ret = MP4_STATUS_PARSE_ERROR;
+                        break;
+                    }
+                    //marker bit
+                    getbits = viddec_pm_get_bits(parent, &data, 1);
+                    BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                    if ( 1 != (data & 0x1))
+                    {
+                        ret = MP4_STATUS_PARSE_ERROR;
+                        break;
+                    }
+                    //reserved zero bits
+                    getbits = viddec_pm_get_bits(parent, &data, 3);
+                    BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                    if ( 0 != (data & 0x7))
+                    {
+                        ret = MP4_STATUS_PARSE_ERROR;
+                        break;
+                    }
+                }
+
+                //MPPTYPE
+                //picture coding type
+                getbits = viddec_pm_get_bits(parent, &data, 3);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                svh->picture_coding_type = (data & 0x7);
+                if (svh->picture_coding_type > 1)
+                {
+                    DEB("Info: only support I and P frames\n");
+                    ret = MP4_STATUS_NOTSUPPORT;
+                    break;
+                }
+                //optional RPR mode
+                getbits = viddec_pm_get_bits(parent, &data, 1);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                if ( 0 != (data & 0x1))
+                {
+                    ret = MP4_STATUS_PARSE_ERROR;
+                    break;
+                }
+                //optional PRU mode
+                getbits = viddec_pm_get_bits(parent, &data, 1);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                if ( 0 != (data & 0x1))
+                {
+                    ret = MP4_STATUS_PARSE_ERROR;
+                    break;
+                }
+                //vop rounding type
+                getbits = viddec_pm_get_bits(parent, &data, 1);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                svh->vop_rounding_type = (data & 0x1);
+                //reserved zero bits
+                getbits = viddec_pm_get_bits(parent, &data, 2);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                if ( 0 != (data & 0x3))
+                {
+                    ret = MP4_STATUS_PARSE_ERROR;
+                    break;
+                }
+                //marker bit
+                getbits = viddec_pm_get_bits(parent, &data, 1);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                if ( 1 != (data & 0x1))
+                {
+                    ret = MP4_STATUS_PARSE_ERROR;
+                    break;
+                }
+            }
+            else
+            {
+                DEB("Info: don't support to handle the other case of Update Full Extended PTYPE\n");
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+
+            //cpm
+            getbits = viddec_pm_get_bits(parent, &data, 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            if ( 0 != (data & 0x1))
+            {
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+
+            //CPFMT
+            if (svh->ufep == 1 && svh->source_format == 6)
+            {   //Pixel Aspect Ratio
+                getbits = viddec_pm_get_bits(parent, &data, 4);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                svh->pixel_aspect_ratio_code = (data & 0xf);
+                //Picture Width Indication
+                getbits = viddec_pm_get_bits(parent, &data, 9);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                svh->picture_width_indication = (data & 0x1ff);
+                //marker bit
+                getbits = viddec_pm_get_bits(parent, &data, 1);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                if ( 1 != (data & 0x1))
+                {
+                    ret = MP4_STATUS_PARSE_ERROR;
+                    break;
+                }
+                //Picture Height Indication
+                getbits = viddec_pm_get_bits(parent, &data, 9);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                svh->picture_height_indication = (data & 0x1ff);
+
+                if (svh->pixel_aspect_ratio_code == 0xf)
+                {
+                    //EPAR
+                    viddec_pm_get_bits(parent, &data, 16);
+                }
+            }
+
+            //custom PCF
+            if (optional_indicators_8bits & 0x80) {
+                viddec_pm_get_bits(parent, &data, 8);
+                viddec_pm_get_bits(parent, &data, 2);
+            }
+
+            viddec_pm_get_bits(parent, &data, 5);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            svh->vop_quant = (data & 0x1f);
+        }
+        //PEI
+        do
+        {
+            getbits = viddec_pm_get_bits(parent, &data, 1); // pei
+            BREAK_GETBITS_FAIL(getbits, ret);
+            pei = (data & 0x1);
+            if (0 != pei)
+            {
+                getbits = viddec_pm_get_bits(parent, &data, 8); // psupp
+                BREAK_GETBITS_FAIL(getbits, ret);
+            }
+        } while ( 1 == pei);
+
+        // Anything after this needs to be fed to the decoder as PIXEL_ES
+    } while (0);
+
+    return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_Status_t             ret=MP4_STATUS_OK;
+    mp4_Info_t              *pInfo = &(parser->info);
+    mp4_VideoSignalType_t *vst = &(pInfo->VisualObject.VideoSignalType);
+    mp4_VideoObjectLayer_t  *vol = &(pInfo->VisualObject.VideoObject);
+    mp4_VideoObjectPlane_t  *vop = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+    mp4_VideoObjectPlaneH263 *svh = &(pInfo->VisualObject.VideoObject.VideoObjectPlaneH263);
+    uint8_t index = 0;
+    uint8_t k = 0;
+
+    ret = mp4_Parse_VideoObjectPlane_svh(parent, parser);
+    if (ret == MP4_STATUS_OK)
+    {
+        // Populate defaults for the svh
+        vol->short_video_header = 1;
+        vol->video_object_layer_shape = MP4_SHAPE_TYPE_RECTANGULAR;
+        vol->obmc_disable = 1;
+        vol->quant_type = 0;
+        vol->resync_marker_disable = 1;
+        vol->data_partitioned = 0;
+        vol->reversible_vlc = 0;
+        vol->interlaced = 0;
+        vol->complexity_estimation_disable = 1;
+        vol->scalability = 0;
+        vol->not_8_bit = 0;
+        vol->bits_per_pixel = 8;
+        vol->quant_precision = 5;
+        vol->vop_time_increment_resolution = 30000;
+        vol->fixed_vop_time_increment = 1001;
+        vol->aspect_ratio_info = MP4_ASPECT_RATIO_12_11;
+
+        vop->vop_rounding_type = svh->vop_rounding_type;
+        vop->vop_fcode_forward = 1;
+        vop->vop_coded = 1;
+        vop->vop_coding_type = svh->picture_coding_type ? MP4_VOP_TYPE_P: MP4_VOP_TYPE_I;
+        vop->vop_quant = svh->vop_quant;
+
+        vst->colour_primaries = 1;
+        vst->transfer_characteristics = 1;
+        vst->matrix_coefficients = 6;
+
+        if (svh->source_format >= 1 && svh->source_format <= 5)
+        {
+            index = svh->source_format - 1;
+            vol->video_object_layer_width = svh_src_fmt_defaults[index].vop_width;
+            vol->video_object_layer_height = svh_src_fmt_defaults[index].vop_height;
+            svh->num_macroblocks_in_gob = svh_src_fmt_defaults[index].num_macroblocks_in_gob;
+            svh->num_gobs_in_vop = svh_src_fmt_defaults[index].num_gobs_in_vop;
+            svh->num_rows_in_gob = svh_src_fmt_defaults[index].num_rows_in_gob;
+        }
+        else if (svh->source_format == 6) //custom format
+        {
+            vol->video_object_layer_width = (svh->picture_width_indication + 1)*4;
+            vol->video_object_layer_height = (svh->picture_height_indication)*4;
+            if (vol->video_object_layer_height < 404)
+            {
+                k = 1;
+            }
+            else if (vol->video_object_layer_height < 804)
+            {
+                k = 2;
+            }
+            else
+            {
+                k = 4;
+            }
+	     svh->num_macroblocks_in_gob = (((vol->video_object_layer_width + 15) & ~15) /16)*k;
+            svh->num_gobs_in_vop = (((vol->video_object_layer_height + 15) & ~15)/(16*k));
+            svh->num_rows_in_gob = k;
+        }
+        else
+        {
+            DEB("Error: Bad value for VideoPlaneWithShortHeader.source_format\n");
+            ret = MP4_STATUS_NOTSUPPORT;
+            return ret;
+        }
+    }
+
+    mp4_set_hdr_bitstream_error(parser, false, ret);
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi;
+
+        wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_PLANE_SHORT;
+
+        wi.mp4_vpsh.info = 0;
+        wi.mp4_vpsh.pad1 = 0;
+        wi.mp4_vpsh.pad2 = 0;
+
+        viddec_fw_mp4_vpsh_set_source_format(&wi.mp4_vpsh, svh->source_format);
+
+        ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false);
+        if (ret == 1)
+            ret = MP4_STATUS_OK;
+    }
+
+    return ret;
+}
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h
new file mode 100755
index 0000000..0d3181a
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_shortheader.h
@@ -0,0 +1,10 @@
+#ifndef VIDDEC_MP4_SHORTHEADER_H
+#define VIDDEC_MP4_SHORTHEADER_H
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VideoObjectPlane_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+mp4_Status_t mp4_Parse_VideoObject_svh(void *parent, viddec_mp4_parser_t *cxt);
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c
new file mode 100755
index 0000000..c50ef3c
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.c
@@ -0,0 +1,629 @@
+#include <string.h>
+#include <vbp_common.h>
+#include "viddec_mp4_videoobjectlayer.h"
+#ifndef VBP
+const unsigned char mp4_DefaultIntraQuantMatrix[64] = {
+    8, 17, 18, 19, 21, 23, 25, 27,
+    17, 18, 19, 21, 23, 25, 27, 28,
+    20, 21, 22, 23, 24, 26, 28, 30,
+    21, 22, 23, 24, 26, 28, 30, 32,
+    22, 23, 24, 26, 28, 30, 32, 35,
+    23, 24, 26, 28, 30, 32, 35, 38,
+    25, 26, 28, 30, 32, 35, 38, 41,
+    27, 28, 30, 32, 35, 38, 41, 45
+};
+const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = {
+    16, 17, 18, 19, 20, 21, 22, 23,
+    17, 18, 19, 20, 21, 22, 23, 24,
+    18, 19, 20, 21, 22, 23, 24, 25,
+    19, 20, 21, 22, 23, 24, 26, 27,
+    20, 21, 22, 23, 25, 26, 27, 28,
+    21, 22, 23, 24, 26, 27, 28, 30,
+    22, 23, 24, 26, 27, 28, 30, 31,
+    23, 24, 25, 27, 28, 30, 31, 33
+};
+
+#else
+const unsigned char mp4_DefaultIntraQuantMatrix[64] = {
+    8, 17, 17, 20, 18, 18, 19, 19,
+    21, 21, 22, 22, 22, 21, 21, 23,
+    23, 23, 23, 23, 23, 25, 24, 24,
+    24, 24, 25, 25, 27, 27, 26, 26,
+    26, 26, 26, 27, 28, 28, 28, 28,
+    28, 28, 28, 30, 30, 30, 30, 30,
+    30, 32, 32, 32, 32, 32, 35, 35,
+    35, 35, 38, 38, 38, 41, 41, 45
+};
+
+const unsigned char mp4_DefaultNonIntraQuantMatrix[64] = {
+    16, 17, 17, 18, 18, 18, 19, 19,
+    19, 19, 20, 20, 20, 20, 20, 21,
+    21, 21, 21, 21, 21, 22, 22, 22,
+    22, 22, 22, 22, 23, 23, 23, 23,
+    23, 23, 23, 23, 24, 24, 24, 25,
+    24, 24, 24, 25, 26, 26, 26, 26,
+    25, 27, 27, 27, 27, 27, 28, 28,
+    28, 28, 30, 30, 30, 31, 31, 33
+};
+
+#endif
+const unsigned char mp4_ClassicalZigzag[64] = {
+    0,   1,  8, 16,  9,  2,  3, 10, 17, 24, 32, 25, 18, 11,  4,  5,
+    12, 19, 26, 33, 40, 48, 41, 34, 27, 20, 13,  6,  7, 14, 21, 28,
+    35, 42, 49, 56, 57, 50, 43, 36, 29, 22, 15, 23, 30, 37, 44, 51,
+    58, 59, 52, 45, 38, 31, 39, 46, 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static inline int mp4_GetMacroBlockNumberSize(int nmb)
+{
+    int  nb = 0;
+    nmb --;
+    do {
+        nmb >>= 1;
+        nb ++;
+    } while (nmb);
+    return nb;
+}
+
+static inline void mp4_copy_default_table(const uint8_t *src, uint8_t *dst, uint32_t len)
+{
+    uint32_t i;
+    for (i=0; i< len; i++)
+        dst[i] = src[i];
+}
+
+
+static inline mp4_Status_t mp4_Parse_QuantMatrix(void *parent, uint8_t *pQM)
+{
+    uint32_t i,code=0;
+    uint8_t last=0;
+    int32_t                 getbits=0;
+    mp4_Status_t            ret = MP4_STATUS_OK;
+
+    for (i = 0; i < 64; i ++)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 8);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        if (code == 0) break;
+        pQM[mp4_ClassicalZigzag[i]] = (uint8_t)(code & 0xFF);
+    }
+    last = pQM[mp4_ClassicalZigzag[i-1]];
+    for (; i < 64; i ++)
+    {
+        pQM[mp4_ClassicalZigzag[i]] = last;
+    }
+    return ret;;
+}
+
+static inline uint8_t mp4_pvt_valid_object_type_indication(uint8_t val)
+{
+    return ((1 <= val) || (val <= 18));
+}
+
+static inline uint8_t mp4_pvt_valid_object_layer_verid(uint8_t val)
+{
+    uint8_t ret=false;
+    switch (val)
+    {
+    case 1:
+    case 2:
+    case 4:
+    case 5:
+    {
+        ret = true;
+        break;
+    }
+    default:
+    {
+        break;
+    }
+    }
+    return ret;
+}
+
+static mp4_Status_t
+mp4_pvt_VOL_volcontrolparameters(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_VOLControlParameters_t *cxt = &(parser->info.VisualObject.VideoObject.VOLControlParameters);
+    mp4_Status_t            ret = MP4_STATUS_PARSE_ERROR;
+    int32_t                 getbits=0;
+    uint32_t                code=0;
+
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &(code), 4);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        cxt->chroma_format = (code >> 2) & 0x3;
+        cxt->low_delay = ((code & 0x2) > 0);
+        cxt->vbv_parameters = code & 0x1;
+
+        if (cxt->chroma_format != MP4_CHROMA_FORMAT_420)
+        {
+            DEB("Warning: mp4_Parse_VideoObject:vol_control_parameters.chroma_format != 4:2:0\n");
+            cxt->chroma_format= MP4_CHROMA_FORMAT_420;
+            parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+            ret = MP4_STATUS_NOTSUPPORT;
+        }
+
+        if (cxt->vbv_parameters)
+        {/* TODO: Check for validity of marker bits */
+            getbits = viddec_pm_get_bits(parent, &(code), 32);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            /* 32 bits= firsthalf(15) + M + LatterHalf(15) + M */
+            cxt->bit_rate = (code & 0xFFFE) >> 1; // Get rid of 1 marker bit
+            cxt->bit_rate |= ((code & 0xFFFE0000) >> 2); // Get rid of 2 marker bits
+
+            if (cxt->bit_rate == 0)
+            {
+                DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.bit_rate = 0\n");
+                parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+                ret = MP4_STATUS_NOTSUPPORT;
+                // Do we need to really break here? Why not just set an error and proceed
+                //break;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 19);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            /* 19 bits= firsthalf(15) + M + LatterHalf(3)*/
+            cxt->vbv_buffer_size = code & 0x7;
+            cxt->vbv_buffer_size |= ( (code >> 4) & 0x7FFF);
+            if (cxt->vbv_buffer_size == 0)
+            {
+                DEB("Error: mp4_Parse_VideoObject:vidObjLay->VOLControlParameters.vbv_buffer_size = 0\n");
+                parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+                ret = MP4_STATUS_NOTSUPPORT;
+                // Do we need to really break here? Why not just set an error and proceed
+                //break;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 28);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            /* 28 bits= firsthalf(11) + M + LatterHalf(15) + M */
+            code = code >>1;
+            cxt->vbv_occupancy = code & 0x7FFF;
+            code = code >>16;
+            cxt->vbv_occupancy |= (code & 0x07FF);
+        }
+        ret = MP4_STATUS_OK;
+    } while (0);
+
+    return ret;
+}
+
+static uint32_t mp4_pvt_count_number_of_bits(uint32_t val)
+{
+    uint32_t num_bits=0;
+    do {
+        val >>= 1;
+        num_bits++;
+    } while (val);
+    return num_bits;
+}
+
+static mp4_Status_t
+mp4_Parse_VOL_sprite(void *parent,  viddec_mp4_parser_t *parser)
+{
+    mp4_VideoObjectLayer_t  *vidObjLay = (&parser->info.VisualObject.VideoObject);
+    mp4_VOLSpriteInfo_t     *cxt = &(vidObjLay->sprite_info);
+    uint32_t                sprite_enable = vidObjLay->sprite_enable;
+    uint32_t                code;
+    mp4_Status_t            ret = MP4_STATUS_PARSE_ERROR;
+    int32_t                 getbits=0;
+
+    do {
+        if ((sprite_enable == MP4_SPRITE_STATIC) ||
+                (sprite_enable == MP4_SPRITE_GMC))
+        {
+            if (sprite_enable != MP4_SPRITE_GMC)
+            {
+                /* This is not a supported type by HW */
+                DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 9);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            cxt->sprite_brightness_change = code & 0x1;
+            cxt->sprite_warping_accuracy = (code >> 1) & 0x3;
+            cxt->no_of_sprite_warping_points = code >> 3;
+            if (cxt->no_of_sprite_warping_points > 1)
+            {
+                DEB("Warning: mp4_Parse_VideoObject:bad no_of_sprite_warping_points %d\n",
+                    cxt->no_of_sprite_warping_points);
+            }
+
+            if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && (cxt->sprite_brightness_change))
+            {
+                DEB("Error: mp4_Parse_VideoObject:sprite_brightness_change should be 0 for GMC sprites\n");
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+
+            if (vidObjLay->sprite_enable != MP4_SPRITE_GMC)
+            {
+                DEB("ERROR: mp4_Parse_VideoObject:sprite_enable = %.2X\n", sprite_enable);
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+        }
+        ret = MP4_STATUS_OK;
+    } while (0);
+
+    return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_quant_mat(void *parent, mp4_VideoObjectLayer_t  *vidObjLay)
+{
+    uint32_t                code;
+    mp4_Status_t            ret = MP4_STATUS_PARSE_ERROR;
+    int32_t                 getbits=0;
+    mp4_VOLQuant_mat_t      *quant = &(vidObjLay->quant_mat_info);
+
+    do {
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        quant->load_intra_quant_mat = code;
+        if (quant->load_intra_quant_mat)
+        {
+            mp4_Parse_QuantMatrix(parent, &(quant->intra_quant_mat[0]));
+        }
+        else
+        {
+            mp4_copy_default_table((const uint8_t *)&mp4_DefaultIntraQuantMatrix[0], (uint8_t *)&(quant->intra_quant_mat[0]), 64);
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        quant->load_nonintra_quant_mat = code;
+        if (quant->load_nonintra_quant_mat)
+        {
+            mp4_Parse_QuantMatrix(parent, &(quant->nonintra_quant_mat[0]));
+        }
+        else
+        {
+            mp4_copy_default_table((const uint8_t *)&mp4_DefaultNonIntraQuantMatrix[0], (uint8_t *)&(quant->nonintra_quant_mat[0]), 64);
+        }
+        ret = MP4_STATUS_OK;
+    } while (0);
+    return ret;
+}
+
+static mp4_Status_t mp4_Parse_VOL_notbinaryonly(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t                code;
+    mp4_Info_t              *pInfo = &(parser->info);
+    mp4_VideoObjectLayer_t  *vidObjLay = &(pInfo->VisualObject.VideoObject);
+    mp4_Status_t            ret = MP4_STATUS_PARSE_ERROR;
+    int32_t                 getbits=0;
+
+    do {
+        if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR)
+        {
+            /* TODO: check for validity of marker bits */
+            getbits = viddec_pm_get_bits(parent, &(code), 29);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->video_object_layer_height = (code >> 1) & 0x1FFF;
+            vidObjLay->video_object_layer_width = (code >> 15) & 0x1FFF;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 2);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjLay->interlaced = ((code & 0x2) > 0);
+        vidObjLay->obmc_disable = ((code & 0x1) > 0);
+
+        {
+            uint32_t num_bits=1;
+            if (vidObjLay->video_object_layer_verid != 1) num_bits=2;
+            getbits = viddec_pm_get_bits(parent, &(code), num_bits);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->sprite_enable = code;
+        }
+
+        ret = mp4_Parse_VOL_sprite(parent, parser);
+        if (ret != MP4_STATUS_OK)
+        {
+            break;
+        }
+
+        if ((vidObjLay->video_object_layer_verid != 1) &&
+                (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR))
+        {
+            /*  not supported shape*/
+            DEB("Error: mp4_Parse_VideoObject: sadct_disable, not supp\n");
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        vidObjLay->not_8_bit = (code  > 0 );
+        if (vidObjLay->not_8_bit)
+        {
+            /*  8 bit is only supported mode*/
+            DEB("Error: mp4_Parse_VideoObject: not_8_bit, not supp\n");
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+        else
+        {/* We use default values since only 8 bit mode is supported */
+            vidObjLay->quant_precision = 5;
+            vidObjLay->bits_per_pixel = 8;
+        }
+
+        if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+        {
+            /* Should not get here as shape is checked earlier */
+            DEB("Error: mp4_Parse_VideoObject: GRAYSCALE, not supp\n");
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjLay->quant_type = code;
+        if (vidObjLay->quant_type)
+        {
+            ret = mp4_Parse_VOL_quant_mat(parent, vidObjLay);
+            if (ret != MP4_STATUS_OK)
+            {
+                break;
+            }
+        }
+
+        if (vidObjLay->video_object_layer_verid != 1)
+        {
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->quarter_sample = code;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjLay->complexity_estimation_disable = code;
+        if (!vidObjLay->complexity_estimation_disable)
+        {/*  complexity estimation not supported */
+            DEB("Error: mp4_Parse_VideoObject: vidObjLay->complexity_estimation_disable, not supp\n");
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 2);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjLay->resync_marker_disable = ((code & 0x2) > 0);
+        vidObjLay->data_partitioned = code & 0x1;
+        if (vidObjLay->data_partitioned)
+        {
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->reversible_vlc = code;
+        }
+
+        if (vidObjLay->video_object_layer_verid != 1)
+        {
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            vidObjLay->newpred_enable = code;
+            if (vidObjLay->newpred_enable)
+            {
+                DEB("Error: NEWPRED mode is not supported\n");
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            vidObjLay->reduced_resolution_vop_enable = code;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &(code), 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        vidObjLay->scalability = code;
+        if (vidObjLay->scalability)
+        {
+            DEB("Error: VOL scalability is not supported\n");
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+
+        // No need to parse further - none of the fields are interesting to parser/decoder/user
+        ret = MP4_STATUS_OK;
+    } while (0);
+    return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t                code;
+    mp4_Info_t              *pInfo = &(parser->info);
+    mp4_VisualObject_t      *visObj = &(pInfo->VisualObject);
+    mp4_VideoObjectLayer_t  *vidObjLay = &(pInfo->VisualObject.VideoObject);
+    mp4_Status_t            ret = MP4_STATUS_PARSE_ERROR;
+    int32_t                 getbits=0;
+
+//DEB("entering mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+
+    // Trying to parse more header data as it is more important than frame data
+    if (parser->bitstream_error > MP4_HDR_ERROR_MASK)
+        return ret;
+
+    do {
+        vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+
+        vidObjLay->short_video_header = 0;
+        vidObjLay->video_object_layer_id = (parser->current_sc & 0xF);
+
+        getbits = viddec_pm_get_bits(parent, &code, 9);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjLay->video_object_type_indication = code & 0xFF;
+        vidObjLay->random_accessible_vol = ((code & 0x100) > 0);
+
+        if (!mp4_pvt_valid_object_type_indication(vidObjLay->video_object_type_indication))
+        {        /* Streams with "unknown" type mismatch with ref */
+            DEB("Warning: video_object_type_indication = %d, forcing to 1\n",
+                vidObjLay->video_object_type_indication);
+            vidObjLay->video_object_type_indication = 1;
+        }
+
+        if (vidObjLay->video_object_type_indication == MP4_VIDEO_OBJECT_TYPE_FINE_GRANULARITY_SCALABLE)
+        {/* This is not a supported type by HW */
+            DEB("ERROR: mp4_Parse_VideoObject:video_object_type_indication = %.2X\n",
+                vidObjLay->video_object_type_indication);
+            ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+            break;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->is_object_layer_identifier = code;
+            vidObjLay->video_object_layer_verid =
+                (mp4_pvt_valid_object_layer_verid(visObj->visual_object_verid)) ? visObj->visual_object_verid : 1;
+
+            if (vidObjLay->is_object_layer_identifier)
+            {
+                getbits = viddec_pm_get_bits(parent, &(code), 7);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjLay->video_object_layer_priority = code & 0x7;
+                vidObjLay->video_object_layer_verid = (code >> 3) & 0xF;
+                if (!mp4_pvt_valid_object_layer_verid(vidObjLay->video_object_layer_verid))
+                {
+                    DEB("Error: mp4_Parse_VideoObject:is_identifier = %d, expected[1,5]\n",
+                        vidObjLay->video_object_layer_verid);
+                    ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                    break;
+                }
+                /* Video object layer ID supercedes visual object ID */
+                visObj->visual_object_verid = vidObjLay->video_object_layer_verid;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 4);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->aspect_ratio_info = code & 0xF;
+            if (vidObjLay->aspect_ratio_info == MP4_ASPECT_RATIO_EXTPAR)
+            {
+                getbits = viddec_pm_get_bits(parent, &(code), 16);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjLay->aspect_ratio_info_par_width = (code >> 8) & 0xFF;
+                vidObjLay->aspect_ratio_info_par_height = code & 0xFF;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->is_vol_control_parameters = code;
+            if (vidObjLay->is_vol_control_parameters)
+            {
+                ret = mp4_pvt_VOL_volcontrolparameters(parent, parser);
+                if (ret != MP4_STATUS_OK)
+                {
+                    break;
+                }
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 2);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjLay->video_object_layer_shape = code;
+            /* If shape is not rectangluar exit early without parsing */
+            if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+            {
+                DEB("Error: mp4_Parse_VideoObject: shape not rectangluar(%d):%d\n",
+                    MP4_SHAPE_TYPE_RECTANGULAR, vidObjLay->video_object_layer_shape);
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+
+            if ((vidObjLay->video_object_layer_verid != 1) &&
+                    (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE))
+            {/* Grayscale not supported */
+                DEB("Error: MP4_SHAPE_TYPE_GRAYSCALE not supported\n");
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+
+            getbits = viddec_pm_get_bits(parent, &(code), 19);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            /* TODO: check validity of marker */
+            vidObjLay->vop_time_increment_resolution = (code >> 2) & 0xFFFF;
+            vidObjLay->fixed_vop_rate = code & 0x1;
+
+            if (vidObjLay->vop_time_increment_resolution == 0)
+            {
+                DEB("Error: 0 value for vop_time_increment_resolution\n");
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+            /* calculate number bits in vop_time_increment_resolution */
+            vidObjLay->vop_time_increment_resolution_bits = (uint8_t)mp4_pvt_count_number_of_bits(
+                        (uint32_t)(vidObjLay->vop_time_increment_resolution -1));
+
+            if (vidObjLay->fixed_vop_rate)
+            {
+                getbits = viddec_pm_get_bits(parent, &(code), vidObjLay->vop_time_increment_resolution_bits);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjLay->fixed_vop_time_increment = code;
+            }
+
+            if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+            {
+                ret = mp4_Parse_VOL_notbinaryonly(parent, parser);
+                if (ret != MP4_STATUS_OK)
+                {
+                    break;
+                }
+            }
+            else
+            {
+                DEB("Error: MP4_SHAPE_TYPE_BINARYONLY not supported\n");
+                ret = (mp4_Status_t)(MP4_STATUS_NOTSUPPORT | MP4_STATUS_REQD_DATA_ERROR);
+                break;
+            }
+        }
+
+        vidObjLay->VideoObjectPlane.sprite_transmit_mode = MP4_SPRITE_TRANSMIT_MODE_PIECE;
+        ret = MP4_STATUS_OK;
+    } while (0);
+
+    mp4_set_hdr_bitstream_error(parser, true, ret);
+    if (ret != MP4_STATUS_OK) {
+        parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+        return ret;
+    }
+//DEB("before wkld mp4_Parse_VideoObjectLayer: bs_err: %d, ret: %d\n", parser->bitstream_error, ret);
+
+    // POPULATE WORKLOAD ITEM
+/*
+    {
+        viddec_workload_item_t wi;
+        viddec_workload_t *wl = viddec_pm_get_header(parent);
+
+        wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VIDEO_OBJ;
+
+        wi.mp4_vol.vol_aspect_ratio = 0;
+        wi.mp4_vol.vol_bit_rate = 0;
+        wi.mp4_vol.vol_frame_rate = 0;
+
+        viddec_fw_mp4_vol_set_aspect_ratio_info(&wi.mp4_vol, vidObjLay->aspect_ratio_info);
+        viddec_fw_mp4_vol_set_par_width(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_width);
+        viddec_fw_mp4_vol_set_par_height(&wi.mp4_vol, vidObjLay->aspect_ratio_info_par_height);
+        viddec_fw_mp4_vol_set_control_param(&wi.mp4_vol, vidObjLay->is_vol_control_parameters);
+        viddec_fw_mp4_vol_set_chroma_format(&wi.mp4_vol, vidObjLay->VOLControlParameters.chroma_format);
+        viddec_fw_mp4_vol_set_interlaced(&wi.mp4_vol, vidObjLay->interlaced);
+        viddec_fw_mp4_vol_set_fixed_vop_rate(&wi.mp4_vol, vidObjLay->fixed_vop_rate);
+
+        viddec_fw_mp4_vol_set_vbv_param(&wi.mp4_vol, vidObjLay->VOLControlParameters.vbv_parameters);
+        viddec_fw_mp4_vol_set_bit_rate(&wi.mp4_vol, vidObjLay->VOLControlParameters.bit_rate);
+
+        viddec_fw_mp4_vol_set_fixed_vop_time_increment(&wi.mp4_vol, vidObjLay->fixed_vop_time_increment);
+        viddec_fw_mp4_vol_set_vop_time_increment_resolution(&wi.mp4_vol, vidObjLay->vop_time_increment_resolution);
+
+        ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false);
+        if (ret == 1)
+            ret = MP4_STATUS_OK;
+
+        memset(&(wl->attrs), 0, sizeof(viddec_frame_attributes_t));
+
+        wl->attrs.cont_size.width = vidObjLay->video_object_layer_width;
+        wl->attrs.cont_size.height = vidObjLay->video_object_layer_height;
+    }
+*/
+    return ret;
+}
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h
new file mode 100755
index 0000000..f093553
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectlayer.h
@@ -0,0 +1,16 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#define VIDDEC_MP4_VIDEOOBJECTLAYER_H
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+void mp4_ResetVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_InitVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_FreeVOL(mp4_Info_t *pInfo);
+
+mp4_Status_t mp4_Parse_VideoObjectLayer(void *parent, viddec_mp4_parser_t *cxt);
+
+
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c
new file mode 100755
index 0000000..97f36a3
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.c
@@ -0,0 +1,423 @@
+#include <vbp_common.h>
+#include "viddec_mp4_videoobjectplane.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_Info_t* pInfo = &(parser->info);
+    uint32_t  code;
+    int32_t getbits=0;
+    mp4_Status_t ret = MP4_STATUS_REQD_DATA_ERROR;
+    mp4_GroupOfVideoObjectPlane_t *data;
+    uint32_t time_code = 0;
+
+    data = &(pInfo->VisualObject.VideoObject.GroupOfVideoObjectPlane);
+
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 20);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        ret = MP4_STATUS_OK;
+
+        data->broken_link = ((code & 0x1) > 0);
+        data->closed_gov = ((code & 0x2) > 0);
+        time_code = code = code >> 2;
+        data->time_code_seconds = code & 0x3F;
+        code = code >> 6;
+        if ((code & 1) == 0)
+        {/* SGA:Should we ignore marker bit? */
+            DEB("Error:mp4_Parse_GroupOfVideoObjectPlane: Invalid marker\n");
+        }
+        code = code >>1;
+        data->time_code_minutes = code & 0x3F;
+        code = code >> 6;
+        data->time_code_hours = code & 0x1F;
+
+        // This is the timebase in full second units
+        data->time_base = data->time_code_seconds + (60*data->time_code_minutes) + (3600*data->time_code_hours);
+        // Need to convert this into no. of ticks
+        data->time_base *= pInfo->VisualObject.VideoObject.vop_time_increment_resolution;
+
+    } while (0);
+
+    mp4_set_hdr_bitstream_error(parser, true, ret);
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi;
+
+        wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_GRP_VIDEO_OBJ;
+
+        wi.mp4_gvop.gvop_info = 0;
+        wi.mp4_gvop.pad1 = 0;
+        wi.mp4_gvop.pad2 = 0;
+
+        viddec_fw_mp4_gvop_set_broken_link(&wi.mp4_gvop, data->broken_link);
+        viddec_fw_mp4_gvop_set_closed_gov(&wi.mp4_gvop, data->closed_gov);
+        viddec_fw_mp4_gvop_set_time_code(&wi.mp4_gvop, time_code);
+
+        ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false);
+        if (ret == 1)
+            ret = MP4_STATUS_OK;
+    }
+
+    return ret;
+}
+
+static inline mp4_Status_t mp4_brightness_change(void *parent, int32_t *b_change)
+{
+    uint32_t code;
+    int32_t getbits=0;
+
+    *b_change = 0;
+    getbits = viddec_pm_peek_bits(parent, &code, 4);
+    if (code == 15)
+    {
+        getbits = viddec_pm_skip_bits(parent, 4);
+        getbits = viddec_pm_get_bits(parent, &code, 10);
+        *b_change = 625 + code;
+    }
+    else if (code == 14)
+    {
+        getbits = viddec_pm_skip_bits(parent, 4);
+        getbits = viddec_pm_get_bits(parent, &code, 9);
+        *b_change = 113 + code;
+    }
+    else if (code >= 12)
+    {
+        getbits = viddec_pm_skip_bits(parent, 3);
+        getbits = viddec_pm_get_bits(parent, &code, 7);
+        *b_change = (code < 64) ? ((int32_t)code - 112) : ((int32_t)code - 15);
+    }
+    else if (code >= 8)
+    {
+        getbits = viddec_pm_skip_bits(parent, 2);
+        getbits = viddec_pm_get_bits(parent, &code, 6);
+        *b_change = (code < 32) ? ((int32_t)code - 48) : ((int32_t)code - 15);
+    }
+    else
+    {
+        getbits = viddec_pm_skip_bits(parent, 1);
+        getbits = viddec_pm_get_bits(parent, &code, 5);
+        *b_change = (code < 16) ? ((int32_t)code - 16) : ((int32_t)code - 15);
+    }
+
+    return ( (getbits == -1) ? MP4_STATUS_PARSE_ERROR: MP4_STATUS_OK);
+}
+static inline int32_t mp4_Sprite_dmv_length(void * parent, int32_t *dmv_length)
+{
+    uint32_t code, skip;
+    int32_t getbits=0;
+    mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+    *dmv_length=0;
+    skip=3;
+    do {
+        getbits = viddec_pm_peek_bits(parent, &code, skip);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+        if (code == 7)
+        {
+            viddec_pm_skip_bits(parent, skip);
+            getbits = viddec_pm_peek_bits(parent, &code, 9);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+            skip=1;
+            while ((code & 256) != 0)
+            {/* count number of 1 bits */
+                code <<=1;
+                skip++;
+            }
+            *dmv_length = 5 + skip;
+        }
+        else
+        {
+            skip=(code <= 1) ? 2 : 3;
+            *dmv_length = code - 1;
+        }
+        viddec_pm_skip_bits(parent, skip);
+        ret= MP4_STATUS_OK;
+
+    } while (0);
+    return ret;
+}
+
+static inline mp4_Status_t
+mp4_Sprite_Trajectory(void *parent, mp4_VideoObjectLayer_t *vidObjLay, mp4_VideoObjectPlane_t *vidObjPlane)
+{
+    uint32_t code, i;
+    int32_t dmv_length=0, dmv_code=0, getbits=0;
+    mp4_Status_t ret = MP4_STATUS_OK;
+    for (i=0; i < (uint32_t)vidObjLay->sprite_info.no_of_sprite_warping_points; i++ )
+    {
+        ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length);
+        if (ret != MP4_STATUS_OK)
+        {
+            break;
+        }
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            dmv_code = (int32_t)code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        if (code != 1)
+        {
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+        vidObjPlane->warping_mv_code_du[i] = dmv_code;
+        /* TODO: create another inline function to avoid code duplication */
+        ret = (mp4_Status_t)mp4_Sprite_dmv_length(parent, &dmv_length);
+        if (ret != MP4_STATUS_OK)
+        {
+            break;
+        }
+        if (dmv_length <= 0)
+        {
+            dmv_code = 0;
+        }
+        else
+        {
+            getbits = viddec_pm_get_bits(parent, &code, (uint32_t)dmv_length);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            dmv_code = (int32_t)code;
+            if ((dmv_code & (1 << (dmv_length - 1))) == 0)
+            {
+                dmv_code -= (1 << dmv_length) - 1;
+            }
+        }
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        if (code != 1)
+        {
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+        vidObjPlane->warping_mv_code_dv[i] = dmv_code;
+
+    }
+    return ret;
+}
+
+static inline mp4_Status_t mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(void *parent, uint32_t *base)
+{
+    mp4_Status_t ret= MP4_STATUS_OK;
+    int32_t getbits=0;
+    uint32_t  code = 0;
+
+    *base = 0;
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        *base += code;
+    } while (code != 0);
+    return ret;
+}
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t  code;
+    mp4_Info_t               *pInfo = &(parser->info);
+    mp4_VideoObjectLayer_t   *vidObjLay  = &(pInfo->VisualObject.VideoObject);
+    mp4_VideoObjectPlane_t   *vidObjPlane = &(pInfo->VisualObject.VideoObject.VideoObjectPlane);
+    int32_t getbits=0;
+    mp4_Status_t ret= MP4_STATUS_PARSE_ERROR;
+
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 2);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+        vidObjPlane->vop_coding_type = code & 0x3;
+        if ( mp4_pvt_extract_modulotimebase_from_VideoObjectPlane(parent,
+                &(vidObjPlane->modulo_time_base)) == MP4_STATUS_REQD_DATA_ERROR)
+        {
+            break;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+        /* TODO: check for marker bit validity */
+        {
+            uint32_t numbits=0;
+            numbits = vidObjLay->vop_time_increment_resolution_bits;
+            if (numbits == 0) numbits=1; /*TODO:check if its greater than 16 bits ?? */
+            getbits = viddec_pm_get_bits(parent, &code, numbits);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjPlane->vop_time_increment = code;
+        }
+
+        getbits = viddec_pm_get_bits(parent, &code, 2);
+        BREAK_GETBITS_REQD_MISSING(getbits, ret);
+
+        vidObjPlane->vop_coded = code & 0x1;
+        if (vidObjPlane->vop_coded == 0)
+        {
+            ret = MP4_STATUS_OK;/* Exit point 1 */
+            break;
+        }
+
+        if (vidObjLay->newpred_enable)
+        {
+            /* New pred mode not supported in HW */
+            DEB("Error: mp4_Parse_VideoObjectPlane: New pred in vidObjPlane is not supported\n");
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+
+        if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+                ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P) ||
+                 ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S) &&
+                  (vidObjLay->sprite_enable == MP4_SPRITE_GMC))))
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjPlane->vop_rounding_type = code;
+        }
+
+        if (vidObjLay->reduced_resolution_vop_enable &&
+                (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+                ((vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) ||
+                 (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_P)))
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 1);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjPlane->vop_reduced_resolution = code;
+            if (vidObjPlane->vop_reduced_resolution)
+            {
+                DEB("Error: mp4_Parse_VideoObjectPlane: Reduced Resolution vidObjPlane is not supported\n");
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+        }
+
+        if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR)
+        {
+            /* we support only rectangular shapes so the following logic is not required */
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+
+        if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) &&
+                (!vidObjLay->complexity_estimation_disable))
+        {
+            /* Not required according to DE team */
+            //read_vop_complexity_estimation_header();
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+
+        if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+        {
+            getbits = viddec_pm_get_bits(parent, &code, 3);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjPlane->intra_dc_vlc_thr = code;
+            if (vidObjLay->interlaced)
+            {
+                getbits = viddec_pm_get_bits(parent, &code, 2);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjPlane->top_field_first = ((code & 0x2) > 0);
+                vidObjPlane->alternate_vertical_scan_flag = code & 0x1;
+            }
+        }
+
+        if (((vidObjLay->sprite_enable == MP4_SPRITE_STATIC) || (vidObjLay->sprite_enable == MP4_SPRITE_GMC)) &&
+                (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_S))
+        {
+            if (vidObjLay->sprite_info.no_of_sprite_warping_points > 0) {
+                if (mp4_Sprite_Trajectory(parent, vidObjLay, vidObjPlane) != MP4_STATUS_OK) {
+                    break;
+                }
+            }
+            vidObjPlane->brightness_change_factor = 0;
+            if (vidObjLay->sprite_info.sprite_brightness_change)
+            {
+                int32_t change=0;
+                if (mp4_brightness_change(parent, &change) == MP4_STATUS_PARSE_ERROR)
+                {
+                    break;
+                }
+                vidObjPlane->brightness_change_factor = change;
+            }
+
+            if (vidObjLay->sprite_enable == MP4_SPRITE_STATIC)
+            {
+                /* SGA: IS decode sprite not required. Is static even supported */
+                ret = MP4_STATUS_OK;/* Exit point 2 */
+                break;
+            }
+        }
+
+        if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY)
+        {
+            // Length of vop_quant is specified by quant_precision
+            getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
+            BREAK_GETBITS_REQD_MISSING(getbits, ret);
+            vidObjPlane->vop_quant = code;
+            if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_GRAYSCALE)
+            {
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+            if (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I)
+            {
+                vidObjPlane->vop_fcode_forward = 0;
+                getbits = viddec_pm_get_bits(parent, &code, 3);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjPlane->vop_fcode_forward = code & 0x7;
+                if (vidObjPlane->vop_fcode_forward == 0)
+                {
+                    DEB("Error: vop_fcode_forward == 0\n");
+                    break;
+                }
+            }
+            if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B)
+            {
+                vidObjPlane->vop_fcode_backward = 0;
+                getbits = viddec_pm_get_bits(parent, &code, 3);
+                BREAK_GETBITS_REQD_MISSING(getbits, ret);
+                vidObjPlane->vop_fcode_backward = code &0x7;
+                if (vidObjPlane->vop_fcode_backward == 0)
+                {
+                    DEB("Error: vop_fcode_backward == 0\n");
+                    break;
+                }
+            }
+            if (!vidObjLay->scalability)
+            {
+                if ((vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) &&
+                        (vidObjPlane->vop_coding_type != MP4_VOP_TYPE_I))
+                {
+                    ret = MP4_STATUS_NOTSUPPORT;
+                    break;
+                }
+                // The remaining data contains the macroblock information that is handled by the BSP
+                // The offsets to be sent to the BSP are obtained in the workload population
+            }
+            else
+            {
+                ret = MP4_STATUS_NOTSUPPORT;
+                break;
+            }
+        }
+        else
+        {/* Binary Not supported */
+            ret = MP4_STATUS_NOTSUPPORT;
+            break;
+        }
+        /* Since we made it all the way here it a success condition */
+        ret = MP4_STATUS_OK;  /* Exit point 3 */
+    } while (0);
+
+    mp4_set_hdr_bitstream_error(parser, false, ret);
+
+    return ret;
+} // mp4_Parse_VideoObjectPlane
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h
new file mode 100755
index 0000000..0262ab2
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_videoobjectplane.h
@@ -0,0 +1,10 @@
+#ifndef VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#define VIDDEC_MP4_VIDEOOBJECTPLANE_H
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_GroupOfVideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VideoObjectPlane(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c
new file mode 100755
index 0000000..d1ec032
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.c
@@ -0,0 +1,290 @@
+#include <vbp_common.h>
+#include "viddec_mp4_visualobject.h"
+
+static inline uint8_t mp4_pvt_isValid_verID(uint8_t id)
+{
+    uint8_t ret=true;
+    switch (id)
+    {
+    case 1:
+    case 2:
+    case 4:
+    case 5:
+    {
+        break;
+    }
+    default:
+    {
+        ret = false;
+        break;
+    }
+    }
+    return ret;
+} // mp4_pvt_isValid_verID
+
+static mp4_Status_t mp4_Parse_video_signal_type(void *parent, mp4_VideoSignalType_t *vidSignal)
+{
+    uint32_t data=0;
+    int32_t getbits=0;
+    mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+    /* Set default values defined in spec first */
+    vidSignal->video_format = 5;
+    vidSignal->video_range = 0;
+    vidSignal->colour_primaries = 1;
+    vidSignal->transfer_characteristics = 1;
+    vidSignal->matrix_coefficients = 1;
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &data, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        vidSignal->is_video_signal_type = (data > 0);
+        if (vidSignal->is_video_signal_type)
+        {
+            getbits = viddec_pm_get_bits(parent, &data, 5);
+            BREAK_GETBITS_FAIL(getbits, ret);
+            vidSignal->is_colour_description = data & 0x1;
+            vidSignal->video_range = ((data & 0x2) > 0);
+            data =  data >> 2;
+            vidSignal->video_format = data & 0x7;
+            if (vidSignal->is_colour_description)
+            {
+                getbits = viddec_pm_get_bits(parent, &data, 24);
+                BREAK_GETBITS_FAIL(getbits, ret);
+                vidSignal->colour_primaries = (data >> 16) & 0xFF;
+                vidSignal->transfer_characteristics = (data >> 8) & 0xFF;
+                vidSignal->matrix_coefficients = data & 0xFF;
+            }
+        }
+        ret = MP4_STATUS_OK;
+    } while (0);
+
+    return ret;
+} // mp4_Parse_video_signal_type
+
+void mp4_set_hdr_bitstream_error(viddec_mp4_parser_t *parser, uint8_t hdr_flag, mp4_Status_t parse_status)
+{
+    //DEB("Entering mp4_set_hdr_bitstream_error: bs_err: 0x%x, hdr: %d, parse_status: %d\n",
+    //  parser->bitstream_error, hdr_flag, parse_status);
+
+    if (hdr_flag)
+    {
+        if (parse_status & MP4_STATUS_NOTSUPPORT)
+            parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+        if (parse_status & MP4_STATUS_PARSE_ERROR)
+            parser->bitstream_error |= MP4_BS_ERROR_HDR_PARSE;
+        if (parse_status & MP4_STATUS_REQD_DATA_ERROR)
+            parser->bitstream_error |= MP4_BS_ERROR_HDR_NONDEC;
+        parser->bitstream_error &= MP4_HDR_ERROR_MASK;
+    }
+    else
+    {
+        if (parse_status & MP4_STATUS_NOTSUPPORT)
+            parser->bitstream_error |= MP4_BS_ERROR_FRM_UNSUP;
+        if (parse_status & MP4_STATUS_PARSE_ERROR)
+            parser->bitstream_error |= MP4_BS_ERROR_FRM_PARSE;
+        if (parse_status & MP4_STATUS_REQD_DATA_ERROR)
+            parser->bitstream_error |= MP4_BS_ERROR_FRM_NONDEC;
+    }
+
+    //DEB("Exiting mp4_set_hdr_bitstream_error: bs_err: 0x%x\n", parser->bitstream_error);
+
+    return;
+} // mp4_set_hdr_bitstream_error
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser)
+{
+    uint32_t data=0;
+    int32_t getbits=0;
+    mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+    getbits = viddec_pm_get_bits(parent, &data, 8);
+    if (getbits != -1)
+    {
+        parser->info.profile_and_level_indication = data & 0xFF;
+        // If present, check for validity
+        switch (parser->info.profile_and_level_indication)
+        {
+        case MP4_SIMPLE_PROFILE_LEVEL_0:
+        case MP4_SIMPLE_PROFILE_LEVEL_1:
+        case MP4_SIMPLE_PROFILE_LEVEL_2:
+        case MP4_SIMPLE_PROFILE_LEVEL_3:
+        case MP4_SIMPLE_PROFILE_LEVEL_4a:
+        case MP4_SIMPLE_PROFILE_LEVEL_5:
+        case MP4_SIMPLE_PROFILE_LEVEL_6:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_0:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_1:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_2:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_4:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_5:
+        case MP4_ADVANCED_SIMPLE_PROFILE_LEVEL_3B:
+            parser->bitstream_error = MP4_BS_ERROR_NONE;
+            ret = MP4_STATUS_OK;
+            break;
+        default:
+            parser->bitstream_error = MP4_BS_ERROR_HDR_UNSUP | MP4_BS_ERROR_HDR_NONDEC;
+            break;
+        }
+    }
+    else
+    {
+        parser->bitstream_error = MP4_BS_ERROR_HDR_PARSE | MP4_BS_ERROR_HDR_NONDEC;
+    }
+
+    return ret;
+} // mp4_Parse_VisualSequence
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_Info_t *pInfo = &(parser->info);
+    mp4_VisualObject_t *visObj = &(pInfo->VisualObject);
+    uint32_t data=0;
+    int32_t getbits=0;
+    mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+
+    do
+    {
+        getbits = viddec_pm_get_bits(parent, &data, 1);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        visObj->is_visual_object_identifier = (data > 0);
+
+        visObj->visual_object_verid = 1; /* Default value as per spec */
+        if (visObj->is_visual_object_identifier)
+        {
+            viddec_pm_get_bits(parent, &data, 7);
+            visObj->visual_object_priority = data & 0x7;
+            data = data >> 3;
+            if (mp4_pvt_isValid_verID(data & 0xF))
+            {
+                visObj->visual_object_verid = data & 0xF;
+            }
+            else
+            {
+                DEB("Warning: Unsupported visual_object_verid\n");
+                parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+                // Continue parsing as it is not a required field for decoder
+            }
+        }
+
+        getbits = viddec_pm_get_bits(parent, &data, 4);
+        BREAK_GETBITS_FAIL(getbits, ret);
+        visObj->visual_object_type = data;
+        if (visObj->visual_object_type != MP4_VISUAL_OBJECT_TYPE_VIDEO)
+        {
+            /* VIDEO is the only supported type */
+            DEB("Error: Unsupported object: visual_object_type != video ID\n");
+            parser->bitstream_error |= MP4_BS_ERROR_HDR_UNSUP;
+            break;
+        }
+
+        /* Not required to check for visual_object_type as we already handle it above */
+        ret = mp4_Parse_video_signal_type(parent, &(visObj->VideoSignalType));
+
+        // No need to check for user data or visual object layer because they have a different start code
+        // and will not be part of this header
+
+    } while (0);
+
+    mp4_set_hdr_bitstream_error(parser, true, ret);
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi;
+        mp4_VideoSignalType_t *vst = &(visObj->VideoSignalType);
+
+        wi.vwi_type = VIDDEC_WORKLOAD_MPEG4_VISUAL_SEQ_OBJ;
+
+        wi.mp4_vs_vo.vs_item = 0;
+        wi.mp4_vs_vo.video_signal_type = 0;
+        wi.mp4_vs_vo.color_desc = 0;
+
+        viddec_fw_mp4_vs_set_profile_and_level_indication(&wi.mp4_vs_vo, pInfo->profile_and_level_indication);
+
+        viddec_fw_mp4_vo_set_video_signal_type(&wi.mp4_vs_vo, vst->is_video_signal_type);
+        if (vst->is_video_signal_type)
+        {
+            viddec_fw_mp4_vo_set_video_range(&wi.mp4_vs_vo, vst->video_range);
+            viddec_fw_mp4_vo_set_video_format(&wi.mp4_vs_vo, vst->video_format);
+            viddec_fw_mp4_vo_set_colour_description(&wi.mp4_vs_vo, vst->is_colour_description);
+            if (vst->is_colour_description)
+            {
+                viddec_fw_mp4_vo_set_transfer_char(&wi.mp4_vs_vo, vst->transfer_characteristics);
+                viddec_fw_mp4_vo_set_color_primaries(&wi.mp4_vs_vo, vst->colour_primaries);
+            }
+        }
+
+        int ret_val;
+        ret_val = viddec_pm_append_workitem(parent, &wi, false);
+        if (ret_val == 1)
+            ret = MP4_STATUS_OK;
+    }
+
+    return ret;
+} // mp4_Parse_VisualObject
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser)
+{
+    mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+    uint32_t user_data;
+    viddec_workload_item_t wi;
+
+    DEB("ParseUser-prev_sc: 0x%x\n", parser->prev_sc);
+
+    /* find the scope based on start code sc */
+    switch (parser->prev_sc) {
+    case MP4_SC_VISUAL_OBJECT_SEQUENCE:
+        wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+        break;
+    case MP4_SC_VISUAL_OBJECT:
+        wi.vwi_type = VIDDEC_WORKLOAD_VISUAL_OBJ_USER_DATA;
+        break;
+    case MP4_SC_GROUP_OF_VOP:
+        wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+        break;
+    case MP4_SC_VIDEO_OBJECT_LAYER_MIN:
+        wi.vwi_type = VIDDEC_WORKLOAD_VIDEO_OBJ_USER_DATA;
+        break;
+    default:
+        wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+        break;
+    }
+
+    /* Read 1 byte of user data and store it in workitem for the current stream level (VS/VO/VOL/GVOP).
+       Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+       append the workitem. This loop is repeated till all user data is extracted and appended. */
+    wi.user_data.size = 0;
+    while (viddec_pm_get_bits(parent, &user_data, 8) != -1)
+    {
+        /* Store the valid byte in data payload */
+        wi.user_data.data_payload[wi.user_data.size] = user_data;
+        wi.user_data.size++;
+
+        /* When size exceeds payload size, append workitem and continue */
+        if (wi.user_data.size >= 11)
+        {
+            viddec_pm_setup_userdata(&wi);
+
+            ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false);
+            wi.user_data.size = 0;
+        }
+    }
+    /* If size is not 0, append remaining user data. */
+    if (wi.user_data.size > 0)
+    {
+        int i;
+        for (i=wi.user_data.size; i<11; i++)
+        {
+            wi.user_data.data_payload[i] = 0;
+        }
+        viddec_pm_setup_userdata(&wi);
+        ret = (mp4_Status_t)viddec_pm_append_workitem(parent, &wi, false);
+        wi.user_data.size = 0;
+    }
+
+    if (ret == 1)
+        ret = MP4_STATUS_OK;
+
+    return ret;
+} // mp4_Parse_UserData
+
diff --git a/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h
new file mode 100755
index 0000000..d1f5a23
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_mp4_visualobject.h
@@ -0,0 +1,12 @@
+#ifndef VIDDEC_MP4_VISUALOBJECT_H
+#define VIDDEC_MP4_VISUALOBJECT_H
+#include "viddec_parser_ops.h"
+#include "viddec_mp4_parse.h"
+
+mp4_Status_t mp4_Parse_VisualSequence(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_VisualObject(void *parent, viddec_mp4_parser_t *parser);
+
+mp4_Status_t mp4_Parse_UserData(void *parent, viddec_mp4_parser_t *parser);
+
+#endif
diff --git a/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c b/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c
new file mode 100755
index 0000000..3988169
--- /dev/null
+++ b/mixvbp/vbp_plugin/mp4/viddec_parse_sc_mp4.c
@@ -0,0 +1,151 @@
+#include "viddec_pm_parse.h"
+#include "viddec_mp4_parse.h"
+
+/* Parse for Sc code of pattern 0x00 0x00 0xXX in the current buffer. Returns either sc found or success.
+   The conext is updated with current phase and sc_code position in the buffer.
+
+   What is phase?: phase is a value between [0-4], we keep track of consecutive '0's with this.
+   Any time a '0' is found its incremented by 1(uptp 2) and reset to '0' if a zero not found.
+   if 0xXX code is found and current phase is 2, its changed to 3 which means we found the pattern
+   we are looking for. Its incremented to 4 once we see a byte after this pattern.
+
+   For MP4 there are two startcode patterns LVH & SVH. LVH is same as other codecs (00 00 01), SVH
+   A.K.A H263 is (00 00 8X). So we have to look for both kind of start codes. The spec doesn't
+   explicitly say if both of them can exist in a stream? So current implemenation will assume
+   that only one of them is present in a given stream to simplify implementation. The reason it can
+   get complicated is resync marker in LVH can potentially be (00 00 8) which will cause false detect
+   of SVH start code.
+*/
+#ifndef VBP
+uint32_t viddec_parse_sc_mp4(void *in, void *pcxt, void *sc_state)
+{
+    uint8_t *ptr;
+    uint32_t size;
+    uint32_t data_left=0, phase = 0, ret = 0;
+    viddec_sc_parse_cubby_cxt_t *cxt;
+    viddec_mp4_parser_t *p_info;
+
+    cxt = ( viddec_sc_parse_cubby_cxt_t *)in;
+    viddec_sc_prefix_state_t *state = (viddec_sc_prefix_state_t *)sc_state;
+    size = 0;
+    data_left = cxt->size;
+    ptr = cxt->buf;
+    phase = cxt->phase;
+    cxt->sc_end_pos = -1;
+    p_info = (viddec_mp4_parser_t *)pcxt;
+
+    /* parse until there is more data and start code not found */
+    while ((data_left > 0) &&(phase < 3))
+    {
+        /* Check if we are byte aligned & phase=0, if thats the case we can check
+           work at a time instead of byte*/
+        if (((((uint32_t)ptr) & 0x3) == 0) && (phase == 0))
+        {
+            while (data_left > 3)
+            {
+                uint32_t data;
+                char mask1 = 0, mask2=0;
+
+                data = *((uint32_t *)ptr);
+#ifndef MFDBIGENDIAN
+                data = SWAP_WORD(data);
+#endif
+                mask1 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK0));
+                mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
+                /* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
+                   two consecutive zero bytes for a start code pattern */
+                if (mask1 && mask2)
+                {/* Success so skip 4 bytes and start over */
+                    ptr+=4;
+                    size+=4;
+                    data_left-=4;
+                    continue;
+                }
+                else
+                {
+                    break;
+                }
+            }
+        }
+
+        /* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
+           two zero bytes in the word so we look one byte at a time*/
+        if (data_left > 0)
+        {
+            if (*ptr == FIRST_STARTCODE_BYTE)
+            {/* Phase can be 3 only if third start code byte is found */
+                phase++;
+                ptr++;
+                size++;
+                data_left--;
+                if (phase > 2)
+                {
+                    phase = 2;
+
+                    if ( (((uint32_t)ptr) & 0x3) == 0 )
+                    {
+                        while ( data_left > 3 )
+                        {
+                            if (*((uint32_t *)ptr) != 0)
+                            {
+                                break;
+                            }
+                            ptr+=4;
+                            size+=4;
+                            data_left-=4;
+                        }
+                    }
+                }
+            }
+            else
+            {
+                uint8_t normal_sc=0, short_sc=0;
+                if (phase == 2)
+                {
+                    normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
+                    short_sc  = (p_info->ignore_scs == 0) && (SHORT_THIRD_STARTCODE_BYTE == ( *ptr & 0xFC));
+                }
+
+                if (!(normal_sc | short_sc))
+                {
+                    phase = 0;
+                }
+                else
+                {/* Match for start code so update context with byte position */
+                    cxt->sc_end_pos = size;
+                    phase = 3;
+                    p_info->cur_sc_prefix = p_info->next_sc_prefix;
+                    p_info->next_sc_prefix = (normal_sc) ? 1: 0;
+                    if (normal_sc)
+                    {
+                        p_info->ignore_scs=1;
+                    }
+                    else
+                    {
+                        /* For short start code since start code is in one nibble just return at this point */
+                        phase += 1;
+                        state->next_sc = *ptr;
+                        state->second_scprfx_length = 2;
+                        ret=1;
+                        break;
+                    }
+                }
+                ptr++;
+                size++;
+                data_left--;
+            }
+        }
+    }
+    if ((data_left > 0) && (phase == 3))
+    {
+        cxt->sc_end_pos++;
+        state->next_sc = cxt->buf[cxt->sc_end_pos];
+        state->second_scprfx_length = 3;
+        phase++;
+        ret = 1;
+    }
+    cxt->phase = phase;
+    /* Return SC found only if phase is 4, else always success */
+    return ret;
+}
+#endif
diff --git a/mixvbp/vbp_plugin/vc1/Android.mk b/mixvbp/vbp_plugin/vc1/Android.mk
new file mode 100755
index 0000000..b0245fa
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/Android.mk
@@ -0,0 +1,36 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+    mix_vbp_vc1_stubs.c \
+    vc1parse_bpic_adv.c \
+    vc1parse_huffman.c \
+    vc1parse_mv_com.c \
+    vc1parse_ppic_adv.c \
+    viddec_vc1_parse.c \
+    vc1parse_bpic.c \
+    vc1parse_common_tables.c \
+    vc1parse_ipic_adv.c \
+    vc1parse_pic_com_adv.c \
+    vc1parse_ppic.c \
+    vc1parse_bitplane.c \
+    vc1parse.c \
+    vc1parse_ipic.c \
+    vc1parse_pic_com.c \
+    vc1parse_vopdq.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES := \
+    $(MIXVBP_DIR)/include   \
+    $(MIXVBP_DIR)/vbp_manager/include   \
+    $(MIXVBP_DIR)/vbp_plugin/vc1/include
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_vc1
+
+LOCAL_SHARED_LIBRARIES := \
+    libmixvbp
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/mixvbp/vbp_plugin/vc1/include/vc1common.h b/mixvbp/vbp_plugin/vc1/include/vc1common.h
new file mode 100755
index 0000000..e4f1b3f
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/include/vc1common.h
@@ -0,0 +1,143 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    VC1 header.
+//
+*/
+
+#ifndef _VC1_COMMON_H_
+#define _VC1_COMMON_H_
+
+/* If the pixel data is left near an emulation prevention sequence, the decoder will be unaware
+   unless we send some previous bytes */
+//#define PADDING_FOR_EMUL 3
+#define PADDING_FOR_EMUL 0
+
+#define GET_BLSB( name, bitf )  BLSB_MFD_##name##_##bitf
+#define GET_BMSK( name, bitf )  BMSK_MFD_##name##_##bitf
+
+#define BF_READ( name, bitf, value )  ((value & GET_BMSK(name, bitf) ) >> GET_BLSB(name, bitf) )
+#define BF_WRITE( name, bitf, value, data ) value = ((value & ~GET_BMSK(name, bitf)) | ((data) << GET_BLSB(name, bitf)))
+
+enum vc1_workload_item_type
+{
+    VIDDEC_WORKLOAD_VC1_DMEM = VIDDEC_WORKLOAD_DECODER_SPECIFIC,
+    VIDDEC_WORKLOAD_VC1_BITOFFSET,
+    VIDDEC_WORKLOAD_VC1_BITPLANE0,
+    VIDDEC_WORKLOAD_VC1_BITPLANE1,
+    VIDDEC_WORKLOAD_VC1_BITPLANE2,
+    VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY,
+    VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP,
+    VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW,
+    VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW,
+    VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO,
+    VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO,
+    VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO,
+    VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO,
+    VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE,
+    VIDDEC_WORKLOAD_VC1_PAST_FRAME   = VIDDEC_WORKLOAD_REF_FRAME_SOURCE_0,
+    VIDDEC_WORKLOAD_VC1_FUTURE_FRAME,
+};
+
+typedef enum
+{
+    vc1_ProgressiveFrame = 0,
+    vc1_InterlacedFrame  = 2,
+    vc1_InterlacedField  = 3,
+    vc1_PictureFormatNone
+} vc1_fcm;
+
+/** This enumeration defines the various frame types as defined in PTYPE syntax
+element.
+PTYPE interpretation depends on bitstream profile. The value that needs to get
+programmed in the frame_type register 0x2218 is this generic enum obtained
+from Canmore code.
+Changing this enum to match the spec for each profile caused md5 mismatches.
+TODO: Why are these the values to program - is this the case with reference decoder?
+*/
+enum
+{
+    VC1_I_FRAME       = (1 << 0),
+    VC1_P_FRAME       = (1 << 1),
+    VC1_B_FRAME       = (1 << 2),
+    VC1_BI_FRAME      = VC1_I_FRAME | VC1_B_FRAME,
+    VC1_SKIPPED_FRAME = (1 << 3) | VC1_P_FRAME
+};
+
+enum {
+    vc1_FrameDone   = 1 << 0,
+    vc1_FieldDone   = 1 << 1,
+    vc1_SliceDone   = 1 << 2,
+    vc1_Field1Done  = 1 << 3,
+    vc1_Field2Done  = 1 << 4,
+    vc1_FrameError  = 1 << 8,
+};
+
+typedef struct {
+    /* 0x00 */
+    uint32_t general;
+    /* 0x04 */
+    uint32_t stream_format1;
+    /* 0x08 */
+    uint32_t coded_size;
+    /* 0x0c */
+    uint32_t stream_format2;
+    /* 0x10 */
+    uint32_t entrypoint1;
+    /* 0x14 */
+    uint32_t ap_range_map;
+    /* 0x18 */
+    uint32_t frame_type;
+    /* 0x1c */
+    uint32_t recon_control;
+    /* 0x20 */
+    uint32_t mv_control;
+    /* 0x24 */
+    uint32_t intcomp_fwd_top;
+    /* 0x28 */
+    uint32_t ref_bfraction;
+    /* 0x2c */
+    uint32_t blk_control;
+    /* 0x30 */
+    uint32_t trans_data;
+    /* 0x34 */
+    uint32_t vop_dquant;
+#define NUM_REF_ID 4
+    /* 0x38-0x48 */ uint32_t ref_frm_id[NUM_REF_ID];
+    /* 0x48 */
+    uint32_t fieldref_ctrl_id;
+    /* 0x4c */
+    uint32_t auxfrmctrl;
+    /* 0x50 */
+    uint32_t imgstruct;
+    /* 0x54 */
+    uint32_t alt_frame_type;
+    /* 0x58 */
+    uint32_t intcomp_fwd_bot;
+    /* 0x5c */
+    uint32_t intcomp_bwd_top;
+    /* 0x60 */
+    uint32_t intcomp_bwd_bot;
+    /* 0x14 */
+    uint32_t smp_range_map;
+} VC1D_SPR_REGS;
+
+/*
+In VC1, past reference is the fwd reference and future reference is the backward reference
+i.e. P frame has only a forward reference and B frame has both a forward and a backward reference.
+*/
+enum {
+    VC1_FRAME_CURRENT_REF = 0,
+    VC1_FRAME_CURRENT_DIS,
+    VC1_FRAME_PAST,
+    VC1_FRAME_FUTURE,
+    VC1_FRAME_ALT
+};
+
+#endif  //_VC1_COMMON_H_
+
diff --git a/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c b/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c
new file mode 100755
index 0000000..cffa7b8
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/mix_vbp_vc1_stubs.c
@@ -0,0 +1,30 @@
+#include "vc1.h"
+
+void vc1_start_new_frame (void *parent, vc1_viddec_parser_t   *parser )
+{
+    return;
+}
+
+void vc1_end_frame (vc1_viddec_parser_t *parser)
+{
+    return;
+}
+
+
+int32_t vc1_parse_emit_current_frame( void *parent,  vc1_viddec_parser_t   *parser )
+{
+    return(0);
+}
+
+
+void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser)
+{
+}
+
+void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser)
+{
+}
+
+void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser)
+{
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1.h b/mixvbp/vbp_plugin/vc1/vc1.h
new file mode 100755
index 0000000..ca92d17
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1.h
@@ -0,0 +1,236 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2001-2006 Intel Corporation. All Rights Reserved.
+//
+//  Description:    VC1 header.
+//
+*/
+
+#ifndef _VC1_H_
+#define _VC1_H_
+
+#ifdef MFD_FIRMWARE
+typedef unsigned int size_t;
+#define LOG(...)
+#else
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdint.h>
+#ifndef VBP
+enum {
+    NONE = 0,
+    CRITICAL,
+    WARNING,
+    INFO,
+    DEBUG,
+} ;
+
+#define vc1_log_level DEBUG
+
+#define LOG( log_lev, format, args ... ) \
+      if (vc1_log_level >= log_lev) { OS_INFO("%s[%d]:: " format "\n", __FUNCTION__ , __LINE__ ,  ## args ); }
+#endif
+#endif
+
+#include "viddec_fw_workload.h"
+#include "vc1parse_common_defs.h"
+#include "vc1common.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifndef VBP
+#define LOG_CRIT(format, args ... )  LOG( CRITICAL, format, ## args)
+#define LOG_WARN(format, args ... )  LOG( WARNING,  format, ## args)
+#define LOG_INFO(format, args ... )  LOG( INFO,     format, ## args)
+#define LOG_DEBUG(format, args ... ) LOG( DEBUG,    format, ## args)
+#else
+#define LOG_CRIT(format, args ... )
+#define LOG_WARN(format, args ... )
+#define LOG_INFO(format, args ... )
+#define LOG_DEBUG(format, args ... )
+#endif
+
+// Seems to be hardware bug: DO NOT TRY TO SWAP BITPLANE0 and BITPLANE2
+// Block Control Register at offset 222C uses Bitplane_raw_ID0 to indicate directmb/fieldtx while
+// and Bitplane_raw_ID2 for acpred/mvtypemb/forwardmb
+// but when we send bitplane index 0 for directmb/fieldtx and bitplane index 2 for acpred/mvtypemb/forwardmb
+// md5 mismatches are seen
+    typedef enum
+    {
+        BPP_FORWARDMB  =  VIDDEC_WORKLOAD_VC1_BITPLANE0,
+        BPP_ACPRED     =  VIDDEC_WORKLOAD_VC1_BITPLANE0,
+        BPP_MVTYPEMB   =  VIDDEC_WORKLOAD_VC1_BITPLANE0,
+        BPP_OVERFLAGS  =  VIDDEC_WORKLOAD_VC1_BITPLANE1,
+        BPP_SKIPMB     =  VIDDEC_WORKLOAD_VC1_BITPLANE1,
+        BPP_DIRECTMB   =  VIDDEC_WORKLOAD_VC1_BITPLANE2,
+        BPP_FIELDTX    =  VIDDEC_WORKLOAD_VC1_BITPLANE2,
+    } vc1_bpp_type_t;
+
+    /* status codes */
+    typedef enum {
+        VC1_STATUS_EOF          =  1,   // end of file
+        VC1_STATUS_OK           =  0,   // no error
+        VC1_STATUS_NO_MEM       =  2,   // out of memory
+        VC1_STATUS_FILE_ERROR   =  2,   // file error
+        VC1_STATUS_NOTSUPPORT   =  2,   // not supported mode
+        VC1_STATUS_PARSE_ERROR  =  2,   // fail in parse MPEG-4 stream
+        VC1_STATUS_ERROR        =  2    // unknown/unspecified error
+    } vc1_Status;
+
+    /* VC1 start code values */
+    typedef enum {
+        vc1_Forbidden           = 0x80,/*0x80-0xFF*/
+        vc1_Reserved1           = 0x09,/*0x00-0x09*/
+        vc1_Reserved2           = 0x10,
+        vc1_Reserved3           = 0x1A,
+        vc1_Reserved4           = 0x20,/*0x20-0x7F*/
+        vc1_SCEndOfSequence     = 0x0A,
+        vc1_SCSlice             = 0x0B,
+        vc1_SCField             = 0x0C,
+        vc1_SCFrameHeader       = 0x0D,
+        vc1_SCEntryPointHeader  = 0x0E,
+        vc1_SCSequenceHeader    = 0x0F,
+        vc1_SCSliceUser         = 0x1B,
+        vc1_SCFieldUser         = 0x1C,
+        vc1_SCFrameUser         = 0x1D,
+        vc1_SCEntryPointUser    = 0x1E,
+        vc1_SCSequenceUser      = 0x1F
+    } vc1_sc;
+
+#if 0
+    typedef enum
+    {
+        vc1_ProfileSimple = 0,  /** Simple profile */
+        vc1_ProfileMain,        /** Main profile */
+        vc1_ProfileReserved,    /** Reserved */
+        vc1_ProfileAdvanced     /** Advanced profile */
+    } vc1_Profile;
+#endif
+
+    typedef enum
+    {
+        vc1_PtypeI  = 1,
+        vc1_PtypeP  = 2,
+        vc1_PtypeB  = 4,
+        vc1_PtypeBI = 5,
+        vc1_PtypeSkipped = 8|2,
+    } vc1_ptype;
+
+    typedef enum
+    {
+        vc1_PtypeII = 0,
+        vc1_PtypeIP = 1,
+        vc1_PtypePI = 2,
+        vc1_PtypePP = 3,
+        vc1_PtypeBB = 4,
+        vc1_PtypeBBI = 5,
+        vc1_PtypeBIB = 6,
+        vc1_PtypeBIBI = 7
+    } vc1_fptype;
+
+    typedef enum
+    {
+        vc1_Imode_Raw  = 0,         //0x0000
+        vc1_Imode_Norm2,        //0x10
+        vc1_Imode_Diff2,        //0x001
+        vc1_Imode_Norm6,        //0x11
+        vc1_Imode_Diff6,        //0x0001
+        vc1_Imode_Rowskip,      //0x010
+        vc1_Imode_Colskip,      //0x011
+    } vc1_Imode;
+
+    /* calculation of MAX_BITPLANE_SZ 2048/16x1088/16 pel= 128x68 bit used for bitplane
+     * as rows are packed in DWORDS
+     * we have (128)/32 * 68 Dwords needed for bitplane storage
+     */
+#define MAX_BITPLANE_SZ 272
+
+    /* Full Info */
+    typedef struct {
+        unsigned char*       bufptr;         /* current frame, point to header or data */
+        int                  bitoff;         /* mostly point to next frame header or PSC */
+        int                  picture_info_has_changed;
+        vc1_metadata_t       metadata;
+        vc1_PictureLayerHeader picLayerHeader;
+        uint32_t             bitplane[MAX_BITPLANE_SZ];
+    } vc1_Info;
+
+#ifdef __cplusplus
+}
+#endif
+
+enum {
+    VC1_REF_FRAME_T_MINUS_1 = 0,
+    VC1_REF_FRAME_T_MINUS_2,
+    VC1_REF_FRAME_T_MINUS_0,
+    VC1_NUM_REFERENCE_FRAMES,
+};
+
+enum vc1_sc_seen_flags
+{
+    VC1_SC_INVALID = 0 << 0,
+    VC1_SC_SEQ     = 1 << 0,
+    VC1_SC_EP      = 1 << 1,
+    VC1_SC_FRM     = 1 << 2,
+    VC1_SC_FLD     = 1 << 3,
+    VC1_SC_SLC     = 1 << 4,
+    VC1_SC_UD      = 1 << 5,
+};
+#define VC1_SEQ_MASK VC1_SC_SEQ
+#define VC1_EP_MASK VC1_SC_SEQ | VC1_SC_EP
+#define VC1_FRM_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM
+#define VC1_FLD_MASK VC1_SC_SEQ | VC1_SC_EP | VC1_SC_FRM | VC1_SC_FLD
+
+typedef struct {
+    int id;
+    uint32_t intcomp_top;
+    uint32_t intcomp_bot;
+    int fcm;         /* frame coding mode */
+    int type;
+    int anchor[2];   /* one per field */
+    int rr_en;       /* range reduction enable flag at sequence layer */
+    int rr_frm;      /* range reduction flag at picture layer */
+    int tff;
+} ref_frame_t;
+
+typedef struct
+{
+    uint32_t      sc_seen_since_last_wkld;
+    uint32_t      sc_seen;
+    uint32_t      is_frame_start;
+    uint32_t      is_second_start;
+    uint32_t      is_reference_picture;
+    uint32_t      intcomp_last[4]; /* for B frames */
+    uint32_t      intcomp_top[2];
+    uint32_t      intcomp_bot[2];
+    vc1_Info      info;
+    VC1D_SPR_REGS spr;
+    ref_frame_t   ref_frame[VC1_NUM_REFERENCE_FRAMES];
+#ifdef VBP
+    /* A storage area is provided for each type of bit plane.  Only one of */
+    /* each type will ever be used for a picture and never more than three */
+    /* bit-planes per picture, and often only one is used.  We never clear */
+    /* this data and writes into it when we need to.  vc1parse_bitplane.c  */
+    /* makes use of these set them to one of the bitplane types included   */
+    /* in the picture header structure.  Those sturctures are set every    */
+    /* time a picture parse begins. */
+    uint32_t      bp_forwardmb[4096];
+    uint32_t      bp_acpred[4096];
+    uint32_t      bp_mvtypemb[4096];
+    uint32_t      bp_overflags[4096];
+    uint32_t      bp_skipmb[4096];
+    uint32_t      bp_directmb[4096];
+    uint32_t      bp_fieldtx[4096];
+    uint32_t	  start_code;
+#endif
+} vc1_viddec_parser_t;
+
+#endif  //_VC1_H_
+
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.c b/mixvbp/vbp_plugin/vc1/vc1parse.c
new file mode 100755
index 0000000..06ac094
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse.c
@@ -0,0 +1,604 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 bitstream layers down to but not including
+//  macroblock layer.
+//
+*/
+
+#include "vc1parse.h"
+
+#define VC1_PIXEL_IN_LUMA 16
+
+/*------------------------------------------------------------------------------
+ * Parse modified rcv file, start codes are inserted using rcv2vc1.c.
+ * source is in
+ * http://svn.jf.intel.com/svn/DHG_Src/CESWE_Src/DEV/trunk/sv/mfd/tools/utils.
+ * Assumme rcv file width < 90,112 pixel to differenciate from real VC1
+ * advanced profile header.
+ * Original rcv description is in annex L
+ * Table 263 of SMPTE 421M.
+ */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t result;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_RcvSequenceHeader rcv;
+
+    memset(&rcv, 0, sizeof(vc1_RcvSequenceHeader));
+
+    result = viddec_pm_get_bits(ctxt, &rcv.struct_a_rcv, 32);
+    md->width = rcv.struct_a.HORIZ_SIZE;
+    md->height = rcv.struct_a.VERT_SIZE;
+#ifdef VBP
+    //The HRD rate and HRD buffer size may be encoded according to a 64 bit sequence header data structure B
+    //if there is no data strcuture B metadata contained in the bitstream, we will not be able to get the
+    //bitrate data, hence we set it to 0 for now
+    md->HRD_NUM_LEAKY_BUCKETS = 0;
+    md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0;
+#endif
+
+    result = viddec_pm_get_bits(ctxt, &rcv.struct_c_rcv, 32);
+    md->PROFILE = rcv.struct_c.PROFILE >> 2;
+    md->LOOPFILTER = rcv.struct_c.LOOPFILTER;
+    md->MULTIRES = rcv.struct_c.MULTIRES;
+    md->FASTUVMC = rcv.struct_c.FASTUVMC;
+    md->EXTENDED_MV = rcv.struct_c.EXTENDED_MV;
+    md->DQUANT = rcv.struct_c.DQUANT;
+    md->VSTRANSFORM = rcv.struct_c.VSTRANSFORM;
+    md->OVERLAP = rcv.struct_c.OVERLAP;
+    md->RANGERED = rcv.struct_c.RANGERED;
+    md->MAXBFRAMES = rcv.struct_c.MAXBFRAMES;
+    md->QUANTIZER = rcv.struct_c.QUANTIZER;
+    md->FINTERPFLAG = rcv.struct_c.FINTERPFLAG;
+#ifdef VBP
+    md->SYNCMARKER = rcv.struct_c.SYNCMARKER;
+#endif
+
+    if ((md->PROFILE == VC1_PROFILE_SIMPLE) ||
+            (md->MULTIRES && md->PROFILE == VC1_PROFILE_MAIN))
+    {
+        md->DQUANT = 0;
+    }
+    // TODO: NEED TO CHECK RESERVED BITS ARE 0
+
+    md->widthMB = (md->width + 15 )  / VC1_PIXEL_IN_LUMA;
+    md->heightMB = (md->height + 15) / VC1_PIXEL_IN_LUMA;
+
+    DEB("rcv: beforemod: res: %dx%d\n", md->width, md->height);
+
+    /* WL takes resolution in unit of 2 pel - sec. 6.2.13.1 */
+    md->width = md->width/2 -1;
+    md->height = md->height/2 -1;
+
+    DEB("rcv: res: %dx%d\n", md->width, md->height);
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi;
+
+        wi.vwi_type = VIDDEC_WORKLOAD_VC1_SEQ_HDR_STRUCT_A_C;
+
+        wi.vc1_sh_struct_a_c.size = 0;
+        wi.vc1_sh_struct_a_c.flags = 0;
+        wi.vc1_sh_struct_a_c.pad = 0;
+
+        viddec_fw_vc1_set_rcv_horiz_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.HORIZ_SIZE);
+        viddec_fw_vc1_set_rcv_vert_size(&wi.vc1_sh_struct_a_c, rcv.struct_a.VERT_SIZE);
+
+        viddec_fw_vc1_set_rcv_bitrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.BITRTQ_POSTPROC);
+        viddec_fw_vc1_set_rcv_frmrtq_postproc(&wi.vc1_sh_struct_a_c, rcv.struct_c.FRMRTQ_POSTPROC);
+        viddec_fw_vc1_set_rcv_profile(&wi.vc1_sh_struct_a_c, rcv.struct_c.PROFILE);
+        viddec_fw_vc1_set_rcv_level(&wi.vc1_sh_struct_a_c, 0);
+        viddec_fw_vc1_set_rcv_cbr(&wi.vc1_sh_struct_a_c, 0);
+        viddec_fw_vc1_set_rcv_rangered(&wi.vc1_sh_struct_a_c, rcv.struct_c.RANGERED);
+        viddec_fw_vc1_set_rcv_maxbframes(&wi.vc1_sh_struct_a_c, rcv.struct_c.MAXBFRAMES);
+        viddec_fw_vc1_set_rcv_finterpflag(&wi.vc1_sh_struct_a_c, rcv.struct_c.FINTERPFLAG);
+
+        result = viddec_pm_append_workitem(ctxt, &wi, false);
+    }
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse sequence layer.  This function is only applicable to advanced profile
+ * as simple and main profiles use other mechanisms to communicate these
+ * metadata.
+ * Table 3 of SMPTE 421M.
+ * Table 13 of SMPTE 421M for HRD_PARAM().
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_SequenceLayerHeader sh;
+    uint32_t result;
+
+    memset(&sh, 0, sizeof(vc1_SequenceLayerHeader));
+
+    // PARSE SEQUENCE HEADER
+    result = viddec_pm_get_bits(ctxt, &sh.flags, 15);
+    if (result == 1)
+    {
+        md->PROFILE = sh.seq_flags.PROFILE;
+        md->LEVEL = sh.seq_flags.LEVEL;
+        md->CHROMAFORMAT = sh.seq_flags.COLORDIFF_FORMAT;
+        md->FRMRTQ = sh.seq_flags.FRMRTQ_POSTPROC;
+        md->BITRTQ = sh.seq_flags.BITRTQ_POSTPROC;
+    }
+
+    result = viddec_pm_get_bits(ctxt, &sh.max_size, 32);
+    if (result == 1)
+    {
+        md->POSTPROCFLAG = sh.seq_max_size.POSTPROCFLAG;
+        md->width = sh.seq_max_size.MAX_CODED_WIDTH;
+        md->height = sh.seq_max_size.MAX_CODED_HEIGHT;
+        md->PULLDOWN = sh.seq_max_size.PULLDOWN;
+        md->INTERLACE = sh.seq_max_size.INTERLACE;
+        md->TFCNTRFLAG = sh.seq_max_size.TFCNTRFLAG;
+        md->FINTERPFLAG = sh.seq_max_size.FINTERPFLAG;
+        md->PSF = sh.seq_max_size.PSF;
+    }
+
+    if (sh.seq_max_size.DISPLAY_EXT == 1)
+    {
+        result = viddec_pm_get_bits(ctxt, &sh.disp_size, 29);
+        if (result == 1)
+        {
+            if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1)
+            {
+                result = viddec_pm_get_bits(ctxt, &tempValue, 4);
+                sh.ASPECT_RATIO = tempValue;
+                if (sh.ASPECT_RATIO == 15)
+                {
+                    result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16);
+                }
+#ifdef VBP
+                md->ASPECT_RATIO_FLAG = 1;
+                md->ASPECT_RATIO = sh.ASPECT_RATIO;
+                md->ASPECT_HORIZ_SIZE = sh.seq_aspect_size.ASPECT_HORIZ_SIZE;
+                md->ASPECT_VERT_SIZE = sh.seq_aspect_size.ASPECT_VERT_SIZE;
+#endif
+            }
+
+            result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+            sh.FRAMERATE_FLAG = tempValue;
+            if (sh.FRAMERATE_FLAG == 1)
+            {
+                result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+                sh.FRAMERATEIND = tempValue;
+                if (sh.FRAMERATEIND == 0)
+                {
+                    result = viddec_pm_get_bits(ctxt, &sh.framerate_fraction, 12);
+                }
+                else
+                {
+                    result = viddec_pm_get_bits(ctxt, &tempValue, 16);
+                    sh.FRAMERATEEXP = tempValue;
+                }
+            }
+
+            result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+            sh.COLOR_FORMAT_FLAG = tempValue;
+            if (sh.COLOR_FORMAT_FLAG == 1)
+            {
+                result = viddec_pm_get_bits(ctxt, &sh.color_format, 24);
+            }
+#ifdef VBP
+            md->COLOR_FORMAT_FLAG = sh.COLOR_FORMAT_FLAG;
+            md->MATRIX_COEF = sh.seq_color_format.MATRIX_COEF;
+#endif
+        } // Successful get of display size
+    } // DISPLAY_EXT is 1
+
+    result = viddec_pm_get_bits(ctxt, &tempValue, 1);
+    sh.HRD_PARAM_FLAG = tempValue;
+    if (sh.HRD_PARAM_FLAG == 1)
+    {
+        /* HRD_PARAM(). */
+        result = viddec_pm_get_bits(ctxt, &tempValue, 5);
+        sh.HRD_NUM_LEAKY_BUCKETS = tempValue;
+        md->HRD_NUM_LEAKY_BUCKETS = sh.HRD_NUM_LEAKY_BUCKETS;
+#ifndef VBP
+        // Skip the rest of the parsing - hrdinfo is not required for decode or for attributes
+#else
+        {
+            uint8_t count;
+            uint8_t bitRateExponent;
+            uint8_t bufferSizeExponent;
+
+            /* bit_rate_exponent */
+            result = viddec_pm_get_bits(ctxt, &tempValue, 4);
+            bitRateExponent = (uint8_t)(tempValue + 6);
+
+            /* buffer_size_exponent */
+            result = viddec_pm_get_bits(ctxt, &tempValue, 4);
+            bufferSizeExponent = (uint8_t)(tempValue + 4);
+            md->hrd_initial_state.BUFFER_SIZE_EXPONENT = bufferSizeExponent;
+
+            for(count = 0; count < sh.HRD_NUM_LEAKY_BUCKETS; count++)
+            {
+                /* hrd_rate */
+                result = viddec_pm_get_bits(ctxt, &tempValue, 16);
+                md->hrd_initial_state.sLeakyBucket[count].HRD_RATE =
+                    (uint32_t)(tempValue + 1) << bitRateExponent;
+
+                /* hrd_buffer */
+                result = viddec_pm_get_bits(ctxt, &tempValue, 16);
+                md->hrd_initial_state.sLeakyBucket[count].HRD_BUFFER =
+                    (uint32_t)(tempValue + 1) << bufferSizeExponent;
+            }
+        }
+#endif
+    }
+    else
+    {
+        md->HRD_NUM_LEAKY_BUCKETS = 0;
+#ifdef VBP
+        md->hrd_initial_state.sLeakyBucket[0].HRD_RATE = 0;
+#endif
+    }
+
+    md->widthMB = (((md->width + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+    md->heightMB = (((md->height + 1) * 2) + 15) / VC1_PIXEL_IN_LUMA;
+
+    DEB("md: res: %dx%d\n", md->width, md->height);
+    DEB("sh: dispres: %dx%d\n", sh.seq_disp_size.DISP_HORIZ_SIZE, sh.seq_disp_size.DISP_VERT_SIZE);
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi_sl, wi_de;
+
+        wi_sl.vwi_type = VIDDEC_WORKLOAD_SEQUENCE_INFO;
+
+        wi_sl.vc1_sl.size = 0;
+        wi_sl.vc1_sl.flags = 0;
+        wi_sl.vc1_sl.pad = 0;
+
+        viddec_fw_vc1_set_profile(&wi_sl.vc1_sl, sh.seq_flags.PROFILE);
+        viddec_fw_vc1_set_level(&wi_sl.vc1_sl, sh.seq_flags.LEVEL);
+        viddec_fw_vc1_set_colordiff_format(&wi_sl.vc1_sl, sh.seq_flags.COLORDIFF_FORMAT);
+        viddec_fw_vc1_set_pulldown(&wi_sl.vc1_sl, sh.seq_max_size.PULLDOWN);
+        viddec_fw_vc1_set_max_coded_width(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_WIDTH);
+        viddec_fw_vc1_set_max_coded_height(&wi_sl.vc1_sl, sh.seq_max_size.MAX_CODED_HEIGHT);
+
+        viddec_fw_vc1_set_bitrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.BITRTQ_POSTPROC);
+        viddec_fw_vc1_set_frmrtq_postproc(&wi_sl.vc1_sl, sh.seq_flags.FRMRTQ_POSTPROC);
+        viddec_fw_vc1_set_interlace(&wi_sl.vc1_sl, sh.seq_max_size.INTERLACE);
+        viddec_fw_vc1_set_tfcntrflag(&wi_sl.vc1_sl, sh.seq_max_size.TFCNTRFLAG);
+        viddec_fw_vc1_set_finterpflag(&wi_sl.vc1_sl, sh.seq_max_size.FINTERPFLAG);
+        viddec_fw_vc1_set_psf(&wi_sl.vc1_sl, sh.seq_max_size.PSF);
+        viddec_fw_vc1_set_display_ext(&wi_sl.vc1_sl, sh.seq_max_size.DISPLAY_EXT);
+
+        result = viddec_pm_append_workitem(ctxt, &wi_sl, false);
+
+        // send DISPLAY EXTENSION metadata if present
+        if (sh.seq_max_size.DISPLAY_EXT)
+        {
+            wi_de.vwi_type = VIDDEC_WORKLOAD_DISPLAY_INFO;
+
+            wi_de.vc1_sl_de.size = 0;
+            wi_de.vc1_sl_de.framerate = 0;
+            wi_de.vc1_sl_de.aspectsize = 0;
+
+            viddec_fw_vc1_set_disp_horiz_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_HORIZ_SIZE);
+            viddec_fw_vc1_set_disp_vert_size(&wi_de.vc1_sl_de, sh.seq_disp_size.DISP_VERT_SIZE);
+            viddec_fw_vc1_set_disp_aspect_ratio_flag(&wi_de.vc1_sl_de, sh.seq_disp_size.ASPECT_RATIO_FLAG);
+            viddec_fw_vc1_set_disp_color_format_flag(&wi_de.vc1_sl_de, sh.COLOR_FORMAT_FLAG);
+            viddec_fw_vc1_set_disp_framerate_flag(&wi_de.vc1_sl_de, sh.FRAMERATE_FLAG);
+            viddec_fw_vc1_set_disp_framerateind(&wi_de.vc1_sl_de, sh.FRAMERATEIND);
+
+            viddec_fw_vc1_set_disp_aspect_ratio(&wi_de.vc1_sl_de, sh.ASPECT_RATIO);
+            viddec_fw_vc1_set_disp_frameratenr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATENR);
+            viddec_fw_vc1_set_disp_frameratedr(&wi_de.vc1_sl_de, sh.seq_framerate_fraction.FRAMERATEDR);
+            viddec_fw_vc1_set_disp_framerateexp(&wi_de.vc1_sl_de, sh.FRAMERATEEXP);
+
+            viddec_fw_vc1_set_disp_aspect_ratio_horiz_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_HORIZ_SIZE);
+            viddec_fw_vc1_set_disp_aspect_ratio_vert_size(&wi_de.vc1_sl_de, sh.seq_aspect_size.ASPECT_VERT_SIZE);
+            viddec_fw_vc1_set_disp_color_prim(&wi_de.vc1_sl_de, sh.seq_color_format.COLOR_PRIM);
+            viddec_fw_vc1_set_disp_transfer_char(&wi_de.vc1_sl_de, sh.seq_color_format.TRANSFER_CHAR);
+
+            result = viddec_pm_append_workitem(ctxt, &wi_de, false);
+        }
+    }
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse entry point layer.  This function is only applicable for advanced
+ * profile and is used to signal a random access point and changes in coding
+ * control parameters.
+ * Table 14 of SMPTE 421M.
+ * Table 15 of SMPTE 421M for HRD_FULLNESS().
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_EntryPointHeader ep;
+    uint32_t result;
+    uint32_t temp;
+
+    memset(&ep, 0, sizeof(vc1_EntryPointHeader));
+
+    // PARSE ENTRYPOINT HEADER
+    result = viddec_pm_get_bits(ctxt, &ep.flags, 13);
+    if (result == 1)
+    {
+        // Skip the flags already peeked at (13) and the unneeded hrd_full data
+        // NOTE: HRD_NUM_LEAKY_BUCKETS is initialized to 0 when HRD_PARAM_FLAG is not present
+        int hrd_bits = md->HRD_NUM_LEAKY_BUCKETS * 8;
+        while (hrd_bits >= 32)
+        {
+            result = viddec_pm_skip_bits(ctxt, 32);
+            hrd_bits -= 32;
+        }
+        result = viddec_pm_skip_bits(ctxt, hrd_bits);
+
+        md->REFDIST = 0;
+        md->BROKEN_LINK = ep.ep_flags.BROKEN_LINK;
+        md->CLOSED_ENTRY = ep.ep_flags.CLOSED_ENTRY;
+        md->PANSCAN_FLAG = ep.ep_flags.PANSCAN_FLAG;
+        md->REFDIST_FLAG = ep.ep_flags.REFDIST_FLAG;
+        md->LOOPFILTER = ep.ep_flags.LOOPFILTER;
+        md->FASTUVMC = ep.ep_flags.FASTUVMC;
+        md->EXTENDED_MV = ep.ep_flags.EXTENDED_MV;
+        md->DQUANT = ep.ep_flags.DQUANT;
+        md->VSTRANSFORM = ep.ep_flags.VSTRANSFORM;
+        md->OVERLAP = ep.ep_flags.OVERLAP;
+        md->QUANTIZER = ep.ep_flags.QUANTIZER;
+
+        result = viddec_pm_get_bits(ctxt, &temp, 1);
+        if (result == 1)
+        {
+            ep.CODED_SIZE_FLAG = temp;
+            if (ep.CODED_SIZE_FLAG)
+            {
+                result = viddec_pm_get_bits(ctxt, &ep.size, 24);
+                md->width = ep.ep_size.CODED_WIDTH;
+                md->height = ep.ep_size.CODED_HEIGHT;
+            }
+        }
+        if (ep.ep_flags.EXTENDED_MV)
+        {
+            result = viddec_pm_get_bits(ctxt, &temp, 1);
+            md->EXTENDED_DMV = ep.EXTENDED_DMV = temp;
+        }
+
+        result = viddec_pm_get_bits(ctxt, &temp, 1);
+        if (result == 1)
+        {
+            md->RANGE_MAPY_FLAG = ep.RANGE_MAPY_FLAG = temp;
+            if (ep.RANGE_MAPY_FLAG)
+            {
+                result = viddec_pm_get_bits(ctxt, &temp, 3);
+                md->RANGE_MAPY = ep.RANGE_MAPY = temp;
+            }
+        }
+
+        result = viddec_pm_get_bits(ctxt, &temp, 1);
+        if (result == 1)
+        {
+            md->RANGE_MAPUV_FLAG = ep.RANGE_MAPUV_FLAG = temp;
+            if (ep.RANGE_MAPUV_FLAG)
+            {
+                result = viddec_pm_get_bits(ctxt, &temp, 3);
+                md->RANGE_MAPUV = ep.RANGE_MAPUV = temp;
+            }
+        }
+    }
+
+    // POPULATE WORKLOAD ITEM
+    {
+        viddec_workload_item_t wi;
+
+        wi.vwi_type = VIDDEC_WORKLOAD_GOP_INFO;
+
+        wi.vc1_ep.size = 0;
+        wi.vc1_ep.flags = 0;
+        wi.vc1_ep.pad = 0;
+
+        viddec_fw_vc1_set_ep_size_flag(&wi.vc1_ep, ep.CODED_SIZE_FLAG);
+        viddec_fw_vc1_set_ep_horiz_size(&wi.vc1_ep, ep.ep_size.CODED_WIDTH);
+        viddec_fw_vc1_set_ep_vert_size(&wi.vc1_ep, ep.ep_size.CODED_HEIGHT);
+
+        viddec_fw_vc1_set_ep_broken_link(&wi.vc1_ep, ep.ep_flags.BROKEN_LINK);
+        viddec_fw_vc1_set_ep_closed_entry(&wi.vc1_ep, ep.ep_flags.CLOSED_ENTRY);
+        viddec_fw_vc1_set_ep_panscan_flag(&wi.vc1_ep, ep.ep_flags.PANSCAN_FLAG);
+        viddec_fw_vc1_set_ep_range_mapy_flag(&wi.vc1_ep, ep.RANGE_MAPY_FLAG);
+        viddec_fw_vc1_set_ep_range_mapy(&wi.vc1_ep, ep.RANGE_MAPY);
+        viddec_fw_vc1_set_ep_range_mapuv_flag(&wi.vc1_ep, ep.RANGE_MAPUV_FLAG);
+        viddec_fw_vc1_set_ep_range_mapuv(&wi.vc1_ep, ep.RANGE_MAPUV);
+
+        result = viddec_pm_append_workitem(ctxt, &wi, false);
+    }
+
+    DEB("ep: res: %dx%d\n", ep.ep_size.CODED_WIDTH, ep.ep_size.CODED_HEIGHT);
+    DEB("md: after ep: res: %dx%d\n", md->width, md->height);
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses the picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    uint32_t temp;
+    int i;
+
+    for (i=0; i<VC1_MAX_BITPLANE_CHUNKS; i++)
+    {
+        pInfo->metadata.bp_raw[i] = true;
+    }
+
+    if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED)
+    {
+        VC1_PEEK_BITS(2, temp); /* fcm */
+        if ( (pInfo->metadata.INTERLACE == 1) && (temp == VC1_FCM_FIELD_INTERLACE))
+        {
+            status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+        }
+        else
+        {
+            status = vc1_ParsePictureHeader_Adv(ctxt, pInfo);
+        }
+    }
+    else
+    {
+        status = vc1_ParsePictureHeader(ctxt, pInfo);
+    }
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse field picture layer.  This function parses the field picture layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_PARSE_ERROR;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if (pInfo->metadata.PROFILE == VC1_PROFILE_ADVANCED) {
+        if (picLayerHeader->CurrField == 0)
+        {
+            picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+            picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+        }
+        else
+        {
+            picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+            picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+        }
+        status = vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+    }
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse slice layer.  This function parses the slice layer, which is only
+ * supported by advanced profile.
+ * Table 26 of SMPTE 421M but skipping parsing of macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t tempValue;
+    uint32_t SLICE_ADDR;
+    vc1_Status status = VC1_STATUS_OK;
+
+    VC1_GET_BITS9(9, SLICE_ADDR);
+    VC1_GET_BITS9(1, tempValue); /* PIC_HEADER_FLAG. */
+    if (tempValue == 1) {
+        uint8_t *last_bufptr = pInfo->bufptr;
+        uint32_t last_bitoff = pInfo->bitoff;
+        status = vc1_ParsePictureLayer(ctxt, pInfo);
+        pInfo->picture_info_has_changed = 1;
+        if ( status ) {
+            /* FIXME - is this a good way of handling this? Failed, see if it's for fields */
+            pInfo->bufptr = last_bufptr;
+            pInfo->bitoff = last_bitoff;
+            status = vc1_ParseFieldHeader_Adv(ctxt, pInfo);
+        }
+    } else
+        pInfo->picture_info_has_changed = 0;
+
+    pInfo->picLayerHeader.SLICE_ADDR = SLICE_ADDR;
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * This function parses the user data information as defined in SMPTE 421M annex F.
+ * It then appends that data to the workload.
+ * Assume the flush byte 0x80 is within the 3 bytes before next start code.
+ * let's put 1 byte per item first
+ *------------------------------------------------------------------------------
+ */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    uint32_t user_data;
+    viddec_workload_item_t wi;
+    uint32_t ud_id;
+
+    /* find the scope based on start code sc */
+    switch (sc) {
+    case vc1_SCSequenceUser:
+        wi.vwi_type = VIDDEC_WORKLOAD_SEQ_USER_DATA;
+        break;
+    case vc1_SCEntryPointUser:
+        wi.vwi_type = VIDDEC_WORKLOAD_GOP_USER_DATA;
+        break;
+    case vc1_SCFrameUser:
+        wi.vwi_type = VIDDEC_WORKLOAD_FRM_USER_DATA;
+        break;
+    case vc1_SCFieldUser:
+        wi.vwi_type = VIDDEC_WORKLOAD_FLD_USER_DATA;
+        break;
+    case vc1_SCSliceUser:
+        wi.vwi_type = VIDDEC_WORKLOAD_SLC_USER_DATA;
+        break;
+    default:
+        wi.vwi_type = VIDDEC_WORKLOAD_INVALID; //ERROR - should not happen
+        break;
+    }
+
+    /* get identifier - 4 bytes*/
+    // Extract this information but discard it for now
+    VC1_GET_BITS(32, ud_id);
+
+    /* Read 1 byte of user data and store it in workitem for the current stream level (SEQ/GOP/PIC).
+       Keep adding data payloads till it reaches size 11. When it is 11, the maximum user data payload size,
+       append the workitem. This loop is repeated till all user data is extracted and appended. */
+    wi.user_data.size = 0;
+    while (viddec_pm_get_bits(ctxt, &user_data, 8) != -1)
+    {
+        /* Store the valid byte in data payload */
+        wi.user_data.data_payload[wi.user_data.size] = user_data;
+        wi.user_data.size++;
+
+        /* When size exceeds payload size, append workitem and continue */
+        if (wi.user_data.size >= 11)
+        {
+            viddec_pm_setup_userdata(&wi);
+            viddec_pm_append_workitem(ctxt, &wi,false);
+            wi.user_data.size = 0;
+        }
+        if (user_data == 0x80) // flushing byte
+            break;
+    }
+    /* If size is not 0, append remaining user data. */
+    if (wi.user_data.size > 0)
+    {
+        int i;
+        for (i=wi.user_data.size; i<11; i++)
+        {
+            wi.user_data.data_payload[i] = 0;
+        }
+        viddec_pm_setup_userdata(&wi);
+        viddec_pm_append_workitem(ctxt, &wi,false);
+        wi.user_data.size = 0;
+    }
+
+    return(status);
+} // vc1_ParseAndAppendUserData
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse.h b/mixvbp/vbp_plugin/vc1/vc1parse.h
new file mode 100755
index 0000000..e190fe1
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse.h
@@ -0,0 +1,140 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Common functions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_H_
+#define _VC1PARSE_H_
+#include <vbp_common.h>
+#include "viddec_parser_ops.h"
+#include "vc1.h"
+
+/** @weakgroup vc1parse_defs VC-1 Parse Definitions */
+/** @ingroup vc1parse_defs */
+/*@{*/
+
+/* This macro gets the next less-than-nine bits from the bitstream.  It is
+assumed that numBits is less than ten. */
+#ifdef VC1_VERBOSE
+#include <stdio.h>
+#define AUTO_TRACE OS_INFO("trace: %s\n", __FUNCTION__)
+#define DEBUGBITS(arg1, args ...) OS_INFO( arg1, ## args)
+#else
+#define AUTO_TRACE
+#define DEBUGBITS(...)
+#endif
+
+extern void *memset(void *s, int32_t c, uint32_t n);
+
+/* This macro gets the next numBits from the bitstream. */
+#define VC1_GET_BITS VC1_GET_BITS9
+#define VC1_GET_BITS9(numBits, value) \
+{   uint32_t __tmp__; \
+    viddec_pm_get_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+    value = __tmp__;\
+    DEBUGBITS("BIT:%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+#define VC1_PEEK_BITS(numBits, value) \
+{   uint32_t __tmp__; \
+    viddec_pm_peek_bits(ctxt, (uint32_t*)&__tmp__, numBits ); \
+    value = __tmp__;\
+    DEBUGBITS("PEEK%40s= 0x%x\tNo. of bits=%d\tbyte = %02x\t%s[%d]\n", #value, value, numBits, 0, __FILE__, __LINE__); \
+}
+
+/* This macro asserts if the condition is not true. */
+#ifdef VC1_VERBOSE
+#define VC1_ASSERT(condition) \
+{ \
+    if (! (condition)) \
+        OS_INFO("Failed " #condition "!\n"); \
+}
+#else
+#define VC1_ASSERT(condition)
+#endif
+
+/*@}*/
+
+/** @weakgroup vc1parse VC-1 Parse Functions */
+/** @ingroup vc1parse */
+/*@{*/
+
+extern const uint8_t VC1_MVMODE_LOW_TBL[];
+extern const uint8_t VC1_MVMODE_HIGH_TBL[];
+extern const int32_t VC1_BITPLANE_IMODE_TBL[];
+extern const int32_t VC1_BITPLANE_K_TBL[];
+extern const int32_t VC1_BFRACTION_TBL[];
+extern const int32_t VC1_REFDIST_TBL[];
+
+void vc1_end_frame(vc1_viddec_parser_t *parser);
+
+/* Top-level functions to parse bitstream layers for rcv format. */
+vc1_Status vc1_ParseRCVSequenceLayer (void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse bitstream layers for the various profiles. */
+vc1_Status vc1_ParseSequenceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseEntryPointLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseSliceLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureLayer(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldLayer(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse headers for various picture layers for the
+simple and main profiles. */
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo);
+
+/* Top-level functions to parse common part of the headers for various picture
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_Adv (void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various progressive
+picture layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse remainder part of the headers for various interlace frame
+layers for the advanced profile. */
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo);
+
+/* Functions to parse syntax element in bitstream. */
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo);
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo);
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo, uint32_t width, uint32_t height, vc1_bpp_type_t bptype);
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable);
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable, int8_t *pFirst, int16_t *pSecond);
+
+void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser);
+void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser);
+void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser);
+
+
+
+
+/* function to handle user data */
+vc1_Status vc1_ParseAndAppendUserData(void* ctxt, uint32_t sc);
+
+/*@}*/
+
+#endif /* _VC1PARSE_H_. */
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c
new file mode 100755
index 0000000..4996e28
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_bitplane.c
@@ -0,0 +1,716 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 bitstreams.
+//
+*/
+
+#include "vc1parse.h"
+
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+
+/*----------------------------------------------------------------------------*/
+
+
+/* put one bit into a buffer
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ *        mbx - image width in MB
+ *        mby - image height in MB
+ *        x   - x location (column) of MB in MB unit
+ *        y   - y location (row) of MB in MB unit
+ * output: outp - buffer to fill
+ */
+//#define put_bit(value,x,y,mbx,mby,invert,outp)
+static inline void put_bit( uint32_t value, int x, int y, int mbx, int mby, uint8_t invert, uint32_t* outp)
+{
+    int bit;
+    uint32_t *out;
+
+    bit = mby;
+
+    value ^= invert;
+    if (!value) return; /* assume buffer is initialized with zeros */
+
+    out = outp;
+    /* go to corresponding row location in DW unit */
+    out += (( mbx + 31 ) >> 5) * y;
+    out +=  x >> 5; /* go to corresponding column location in DW unit */
+    bit = x & 0x1f; /* compute remaining bits */
+    *out |= 1 << bit; /* put bit */
+}
+
+/* if b is the bit at location (x,y)
+ * b = b^invert
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ *        x   - x location (column) of MB in MB unit
+ *        y   - y location (row) of MB in MB unit
+ *        mbx - image width in MB
+ * output: outp - buffer to fill
+ * returns bit value
+ */
+static inline int xor_bit(  int x, int y, int mbx, uint32_t invert, uint32_t* outp)
+{
+    int bit;
+    uint32_t *out;
+    uint8_t value;
+    //if (invert == 0) return; /* do nothing if XOR with 0 */
+
+    out = outp;
+    out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+    out +=  x >> 5; /* go to corresponding row location in DW unit */
+    bit = x & 0x1f; /* compute remaining bits */
+
+    if (invert == 1)
+        *out ^= (1 << bit); /* put XOR bit */
+    value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+    return(value);
+
+}
+
+/* get bit at location (x,y)
+ * used for bitplane decoding, each bit correspond to a MB
+ * HW requires row to start at DW (32 bits) boundary
+ * input: value - bit value
+ *        x   - x location (column) of MB in MB unit
+ *        y   - y location (row) of MB in MB unit
+ *        mbx - image width in MB
+ *        outp - bit buffer in dwords
+ * returns bit value
+ */
+static inline int get_bit(  int x, int y, int mbx, uint32_t* outp)
+{
+    int bit;
+    uint32_t *out;
+    uint8_t value;
+
+    out = outp;
+    out += (( mbx + 31 ) >> 5) * y; /* go to corresponding row location in DW unit */
+    out +=  x >> 5; /* go to corresponding row location in DW unit */
+    bit = x & 0x1f; /* compute remaining bits */
+    value = (*out & (1 << bit)) >> bit; /* return bit value */
+
+    return(value);
+
+}
+
+static void vc1_InverseDiff(vc1_Bitplane *pBitplane, int32_t widthMB, int32_t heightMB)
+{
+    int32_t i, j, previousBit=0, temp;
+
+    for (i = 0; i < heightMB; i++)
+    {
+        for (j = 0; j < widthMB; j++)
+        {
+            if ((i == 0 && j == 0))
+            {
+                previousBit=xor_bit(j, i, widthMB, pBitplane->invert,
+                                    pBitplane->databits);
+            }
+            else if (j == 0) /* XOR with TOP */
+            {
+                previousBit = get_bit(0, i-1, widthMB, pBitplane->databits);
+                temp=xor_bit(j, i, widthMB, previousBit,
+                             pBitplane->databits);
+                previousBit = temp;
+            }
+            //TODO isSameAsTop can be optimized
+            else if (((i > 0) && (previousBit !=
+                                  get_bit(j, i-1, widthMB, pBitplane->databits))))
+            {
+                temp=xor_bit(j, i, widthMB, pBitplane->invert,
+                             pBitplane->databits);
+                previousBit = temp;
+            }
+            else
+            {
+                temp=xor_bit(j, i, widthMB, previousBit,
+                             pBitplane->databits);
+                previousBit = temp;
+            }
+        }
+    }
+}
+
+
+/*----------------------------------------------------------------------------*/
+/* implement normal 2 mode bitplane decoding, SMPTE 412M 8.7.3.2
+ * width, height are in MB unit.
+ */
+static void vc1_Norm2ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+                                int32_t width, int32_t height)
+{
+    int32_t i;
+    int32_t tmp_databits = 0;
+
+    int32_t row[2], col[2];
+    int8_t tmp=0;
+
+    /* disable pBitplane->invert in the Norm2 decode stage of
+       VC1_BITPLANE_DIFF2_MODE */
+    if (pBitplane->imode == VC1_BITPLANE_DIFF2_MODE)
+    {
+        tmp = pBitplane->invert;
+        pBitplane->invert=0;
+    }
+
+    // By default, initialize the values for the even case
+    col[0] = 0;   /* i%width; */
+    row[0] = 0;   /* i/width; */
+    col[1] = 1;   /* (i+1)%width; */
+    row[1] = 0;   /* (i+1)/width; */
+
+    // If width*height is odd, the first bit is the value of the bitplane
+    // for the first macroblock
+    if ((width*height) & 1) /* first bit if size is odd */
+    {
+        VC1_GET_BITS(1, tmp_databits);
+        put_bit(tmp_databits, 0, 0, width, height, pBitplane->invert,
+                pBitplane->databits);
+
+        // Modify initialization for odd sizes
+        col[0] = 1;   /* i%width; */
+        col[1] = 2;   /* (i+1)%width; */
+
+        // Consider special case where width is 1
+        if (width == 1)
+        {
+            col[0] = 0;   /* i%width; */
+            row[0] = 1;   /* i/width; */
+            col[1] = 0;   /* (i+1)%width; */
+            row[1] = 2;   /* (i+1)/width; */
+        }
+    }
+
+    /* decode every pair of bits in natural scan order */
+    for (i = (width*height) & 1; i < (width*height/2)*2; i += 2)
+    {
+        int32_t tmp = 0;
+
+        //col[0]=i%width;
+        //row[0]=i/width;
+        //col[1]=(i+1)%width;
+        //row[1]=(i+1)/width;
+
+        VC1_GET_BITS(1, tmp);
+        if (tmp == 0)
+        {
+            put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+                    pBitplane->databits);
+            put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+                    pBitplane->databits);
+        }
+        else
+        {
+            VC1_GET_BITS(1, tmp);
+            if (tmp == 1)
+            {
+                put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+                        pBitplane->databits);
+                put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+                        pBitplane->databits);
+            }
+            else
+            {
+                VC1_GET_BITS(1, tmp);
+                if (tmp == 0)
+                {
+                    put_bit(1, col[0],row[0], width, height, pBitplane->invert,
+                            pBitplane->databits);
+                    put_bit(0, col[1],row[1], width, height, pBitplane->invert,
+                            pBitplane->databits);
+                }
+                else
+                {
+                    put_bit(0, col[0],row[0], width, height, pBitplane->invert,
+                            pBitplane->databits);
+                    put_bit(1, col[1],row[1], width, height, pBitplane->invert,
+                            pBitplane->databits);
+                }
+            }
+        }
+
+        // Consider special case where width is 1
+        if (width == 1)
+        {
+            row[0] += 2;
+            row[1] += 2;
+        }
+        else
+        {
+            col[0] += 2;   /* i%width; */
+            if ( col[0] >= width )
+            {
+                // For odd sizes, col[0] can alternatively start at 0 and 1
+                col[0] -= width;
+                row[0]++;
+            }
+
+            col[1] += 2;   /* (i+1)%width; */
+            if ( col[1] >= width )
+            {
+                // For odd sizes, col[1] can alternatively start at 0 and 1
+                col[1] -= width;
+                row[1]++;
+            }
+        }
+    }
+
+    /* restore value */
+    pBitplane->invert=tmp;
+}
+
+/*----------------------------------------------------------------------------*/
+/* compute Normal-6 mode bitplane decoding
+ * algorithm is described in SMPTE 421M 8.7.3.4
+ * width, height are in MB unit.
+ */
+static void vc1_Norm6ModeDecode(void* ctxt, vc1_Bitplane *pBitplane,
+                                int32_t width, int32_t height)
+{
+    vc1_Status status;
+    int32_t i, j, k;
+    int32_t ResidualX = 0;
+    int32_t ResidualY = 0;
+    uint8_t _2x3tiled = (((width%3)!=0)&&((height%3)==0));
+
+    int32_t row, col;
+    int8_t tmp=0;
+
+    /* disable pBitplane->invert in the Norm2 decode stage of
+       VC1_BITPLANE_DIFF2_MODE */
+    if (pBitplane->imode == VC1_BITPLANE_DIFF6_MODE)
+    {
+        tmp = pBitplane->invert;
+        pBitplane->invert=0;
+    }
+
+    if (_2x3tiled)
+    {
+        int32_t sizeW = width/2;
+        int32_t sizeH = height/3;
+
+        for (i = 0; i < sizeH; i++)
+        {
+            row = 3*i; /* compute row location for tile */
+
+            for (j = 0; j < sizeW; j++)
+            {
+                col = 2*j + (width & 1); /* compute column location for tile */
+
+                /* get k=sum(bi2^i) were i is the ith bit of the tile */
+                status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+                VC1_ASSERT(status == VC1_STATUS_OK);
+
+                /* put bits in tile */
+                put_bit(k&1, col, row, width, height, pBitplane->invert,
+                        pBitplane->databits);
+                put_bit(((k&2)>>1), col+1, row, width, height,
+                        pBitplane->invert,pBitplane->databits);
+
+                put_bit(((k&4)>>2), col, row+1, width, height,
+                        pBitplane->invert,pBitplane->databits);
+                put_bit(((k&8)>>3), col+1, row+1, width, height,
+                        pBitplane->invert,pBitplane->databits);
+
+                put_bit(((k&16)>>4), col, row+2, width, height,
+                        pBitplane->invert,pBitplane->databits);
+                put_bit(((k&32)>>5), col+1, row+2, width,
+                        height,pBitplane->invert, pBitplane->databits);
+            }
+        }
+        ResidualX = width & 1;
+        ResidualY = 0;
+    }
+    else /* 3x2 tile */
+    {
+        int32_t sizeW = width/3;
+        int32_t sizeH = height/2;
+
+        for (i = 0; i < sizeH; i++)
+        {
+            row = 2*i + (height&1) ; /* compute row location for tile */
+
+            for (j = 0; j < sizeW; j++)
+            {
+                col = 3*j + (width%3); /* compute column location for tile */
+
+                /* get k=sum(bi2^i) were i is the ith bit of the tile */
+                status = vc1_DecodeHuffmanOne(ctxt, &k, VC1_BITPLANE_K_TBL);
+                VC1_ASSERT(status == VC1_STATUS_OK);
+
+                put_bit(k&1, col, row, width, height,pBitplane->invert,
+                        pBitplane->databits);
+                put_bit((k&2)>>1, col+1, row, width, height, pBitplane->invert,
+                        pBitplane->databits);
+                put_bit((k&4)>>2, col+2, row, width, height, pBitplane->invert,
+                        pBitplane->databits);
+
+                put_bit((k&8)>>3, col, row+1, width, height,pBitplane->invert,
+                        pBitplane->databits);
+                put_bit((k&16)>>4, col+1, row+1, width,
+                        height,pBitplane->invert, pBitplane->databits);
+                put_bit((k&32)>>5, col+2, row+1, width,
+                        height,pBitplane->invert, pBitplane->databits);
+            }
+        }
+        ResidualX = width % 3;
+        ResidualY = height & 1;
+    }
+
+    for (i = 0; i < ResidualX; i++)
+    {
+        int32_t ColSkip;
+        VC1_GET_BITS(1, ColSkip);
+
+        //if (1 == ColSkip)
+        {
+            for (j = 0; j < height; j++)
+            {
+                int32_t Value = 0;
+                if (1 == ColSkip) VC1_GET_BITS(1, Value);
+
+                put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits);
+            }
+        }
+    }
+
+    for (j = 0; j < ResidualY; j++)
+    {
+        int32_t RowSkip;
+        VC1_GET_BITS(1, RowSkip);
+        //if (1 == RowSkip)
+        {
+            for (i = ResidualX; i < width; i++)
+            {
+                int32_t Value = 0;
+                if (1 == RowSkip) VC1_GET_BITS(1, Value);
+
+                put_bit(Value, i, j, width, height,pBitplane->invert,pBitplane->databits);
+            }
+        }
+    }
+
+    /* restore value */
+    pBitplane->invert=tmp;
+
+}
+
+/*----------------------------------------------------------------------------*/
+/* initialize bitplane to array of zeros
+ * each row begins with a dword
+ * input:
+ *    width: widh in MB unit
+ *    height: height in MB unit
+ * returns even bitplane size in dwords
+ */
+int initBitplane(vc1_Bitplane *pBitplane,uint32_t width, uint32_t height)
+{
+    int i;
+    int numDword = 0;
+
+    numDword = ((width + 31)>>5) *  height;
+    numDword += numDword & 1; /* add 1 in case numDword is odd */
+
+    for (i=0; i<numDword; i++) pBitplane->databits[i] = 0;
+    return(numDword);
+}
+
+/*----------------------------------------------------------------------------*/
+/* modified IPP code for bitplane decoding
+ *    width: width in MB unit
+ *    height: height in MB unit
+ */
+vc1_Status vc1_DecodeBitplane(void* ctxt, vc1_Info *pInfo,
+                              uint32_t width, uint32_t height, vc1_bpp_type_t bpnum)
+{
+    uint32_t i, j;
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    uint32_t biplaneSz; /* bitplane sz in dwords */
+    vc1_Bitplane bp;
+    vc1_Bitplane *bpp = &bp;
+
+    // By default, set imode to raw
+    pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = true;
+
+    // bitplane data would be temporarily stored in the vc1 context
+    bpp->databits = pInfo->bitplane;
+
+    /* init bitplane to zero, function retunr bitplane buffer size in dword */
+    biplaneSz = initBitplane(bpp, width, height);
+
+    VC1_GET_BITS(1, tempValue);
+    bpp->invert = (uint8_t) tempValue;
+
+    bpp->imode = -1;
+
+    if ((status = vc1_DecodeHuffmanOne(ctxt, &bpp->imode,VC1_BITPLANE_IMODE_TBL)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    // If the imode is VC1_BITPLANE_RAW_MODE: bitplane information is in the MB layer
+    // there is no need to parse for bitplane information in the picture layer
+    // Only bits need to be appropriately set in the block control register
+    // In all other modes, bitplane information follows and needs to be parsed and sent to the decoder
+
+    if (bpp->imode == VC1_BITPLANE_NORM2_MODE)
+    {
+        vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+    }
+    else if (bpp->imode == VC1_BITPLANE_DIFF2_MODE)
+    {
+        vc1_Norm2ModeDecode(ctxt, bpp, width, height);
+        vc1_InverseDiff(bpp, width, height);
+    }
+    else if (bpp->imode == VC1_BITPLANE_NORM6_MODE)
+    {
+        vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+
+    }
+    else if (bpp->imode == VC1_BITPLANE_DIFF6_MODE)
+    {
+        vc1_Norm6ModeDecode(ctxt, bpp, width, height);
+        vc1_InverseDiff(bpp, width, height);
+    }
+    else if (bpp->imode == VC1_BITPLANE_ROWSKIP_MODE)
+    {
+
+        for (i = 0; i < height; i++)
+        {
+            VC1_GET_BITS(1, tempValue);
+            /* if tempValue==0,  put row of zeros Dwords*/
+            if (tempValue == 1)
+            {
+                for (j = 0; j < width; j++)
+                {
+                    VC1_GET_BITS(1, tempValue);
+                    put_bit( tempValue, j, i, width, height, bpp->invert,bpp->databits);
+                }
+            }
+            else if (bpp->invert) { //TO TEST
+                for (j = 0; j < width; j++) {
+                    put_bit( 0, j, i, width, height, bpp->invert, bpp->databits);
+                }
+            }
+        }
+
+    }
+    else if (bpp->imode == VC1_BITPLANE_COLSKIP_MODE)
+    {
+        for (i = 0; i < width; i++)
+        {
+            VC1_GET_BITS(1, tempValue);
+            /* if tempValue==0, and invert == 0, fill column with zeros */
+            if (tempValue == 1)
+            {
+                for (j = 0; j < height; j++)
+                {
+                    VC1_GET_BITS(1, tempValue);
+                    put_bit( tempValue, i, j, width, height, bpp->invert, bpp->databits);
+                }
+            }
+            else if (bpp->invert) { // fill column with ones
+                for (j = 0; j < height; j++) {
+                    put_bit( 0, i, j, width, height, bpp->invert, bpp->databits);
+                }
+            }//end for else
+        }
+    }
+
+    if (bpp->imode != VC1_BITPLANE_RAW_MODE)
+    {
+        uint32_t* pl;
+        int sizeinbytes,nitems,i;
+        viddec_workload_item_t    wi;
+        uint32_t *bit_dw;
+
+        pInfo->metadata.bp_raw[bpnum - VIDDEC_WORKLOAD_VC1_BITPLANE0] = false;
+
+        sizeinbytes = ((( width + 31 ) / 32)) * (height) * 4;
+
+        pl = bpp->databits;
+        bit_dw = bpp->databits;
+
+        // How many payloads must be generated
+        nitems = (sizeinbytes + (sizeof(wi.data.data_payload) - 1)) /
+                 sizeof(wi.data.data_payload);
+
+        // Dump DMEM to an array of workitems
+        for ( i = 0; i < nitems; i++ )
+        {
+            wi.vwi_type           =  bpnum;
+            wi.data.data_offset   = (char *)pl - (char *)bit_dw; // offset within struct
+
+            wi.data.data_payload[0] = pl[0];
+            wi.data.data_payload[1] = pl[1];
+            pl += 2;
+
+            viddec_pm_append_workitem( ctxt, &wi, false);
+        }
+    }
+
+#ifdef VBP
+    {
+        viddec_pm_cxt_t     *cxt    = (viddec_pm_cxt_t *)ctxt;
+        vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *)(cxt->codec_data);
+
+        if (biplaneSz > 4096)
+        {
+            /* bigger than we got, so let's bail with a non meaningful error. */
+            return VC1_STATUS_ERROR;
+        }
+
+        /* At this point bp contains the information we need for the bit-plane */
+        /* bpnum is the enumeration that tells us which bitplane this is for.  */
+        /* pInfo->picLayerHeader.ACPRED is one of the bitplanes I need to fill.*/
+        switch (bpnum)
+        {
+        case VIDDEC_WORKLOAD_VC1_BITPLANE0:
+            if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.FORWARDMB.invert = bp.invert;
+                    pInfo->picLayerHeader.FORWARDMB.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_forwardmb[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.FORWARDMB.databits = parser->bp_forwardmb;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_FORWARDMB = 1;
+                }
+            }
+            if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+                    || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.ACPRED.invert = bp.invert;
+                    pInfo->picLayerHeader.ACPRED.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_acpred[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.ACPRED.databits = parser->bp_acpred;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_ACPRED = 1;
+                }
+            }
+            if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.MVTYPEMB.invert = bp.invert;
+                    pInfo->picLayerHeader.MVTYPEMB.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_mvtypemb[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.MVTYPEMB.databits = parser->bp_mvtypemb;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_MVTYPEMB = 1;
+                }
+            }
+            break;
+        case VIDDEC_WORKLOAD_VC1_BITPLANE1:
+            if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+                    || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.OVERFLAGS.invert = bp.invert;
+                    pInfo->picLayerHeader.OVERFLAGS.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_overflags[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.OVERFLAGS.databits = parser->bp_overflags;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_OVERFLAGS = 1;
+                }
+            }
+            if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+                    || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.SKIPMB.invert = bp.invert;
+                    pInfo->picLayerHeader.SKIPMB.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_skipmb[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.SKIPMB.databits = parser->bp_skipmb;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_SKIPMB = 1;
+                }
+            }
+            break;
+        case VIDDEC_WORKLOAD_VC1_BITPLANE2:
+            if ( (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+                    || (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME) )
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.DIRECTMB.invert = bp.invert;
+                    pInfo->picLayerHeader.DIRECTMB.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_directmb[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.DIRECTMB.databits = parser->bp_directmb;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_DIRECTMB = 1;
+                }
+            }
+            if ( (pInfo->picLayerHeader.PTYPE == VC1_I_FRAME)
+                    || (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME) )
+            {
+                if (bp.imode != VC1_BITPLANE_RAW_MODE)
+                {
+                    pInfo->picLayerHeader.FIELDTX.invert = bp.invert;
+                    pInfo->picLayerHeader.FIELDTX.imode = bp.imode;
+                    for (i = 0; i < biplaneSz; i++)
+                    {
+                        parser->bp_fieldtx[i] = bp.databits[i];
+                    }
+                    pInfo->picLayerHeader.FIELDTX.databits = parser->bp_fieldtx;
+                }
+                else
+                {
+                    pInfo->picLayerHeader.raw_FIELDTX = 1;
+                }
+            }
+            break;
+        }
+    }
+#endif
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c b/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c
new file mode 100755
index 0000000..15c9f53
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_bpic.c
@@ -0,0 +1,99 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive B picture in simple
+//  or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive B picture for main
+ * profile bitstream.  This parser starts after PTYPE was parsed but stops
+ * before parsing of macroblock layer.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+                                        &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+            VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else picLayerHeader->HALFQP=0;
+
+    if (md->QUANTIZER == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+    }
+
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+                             VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+    {
+        return VC1_STATUS_OK;
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+    VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c
new file mode 100755
index 0000000..d7a5ccd
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_bpic_adv.c
@@ -0,0 +1,256 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive B picture in advanced
+//  profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive B picture for advanced
+ * profile bitstream.
+ * Table 22 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    picLayerHeader->MVMODE = (picLayerHeader->MVMODE == 1) ?
+                             VC1_MVMODE_1MV : VC1_MVMODE_HPELBI_1MV;
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+    VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace B frame for advanced
+ * profile bitstream.
+ * Table 84 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+                                        &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+            VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_DIRECTMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    // EPC picLayerHeader->MVMODE = VC1_MVMODE_1MV;
+    VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+    VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+    VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+    VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace B field for advanced
+ * profile bitstream.
+ * Table 89 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceBpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint8_t bit_count;
+    const uint8_t *table;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader* picLayerHeader = &pInfo->picLayerHeader;
+
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else
+        picLayerHeader->HALFQP = 0;
+
+    if (md->QUANTIZER == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+        picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+    }
+
+    if (md->POSTPROCFLAG == 1)
+    {
+        VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+    }
+
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQUANT > 12)
+        table = VC1_MVMODE_LOW_TBL;
+    else
+        table = VC1_MVMODE_HIGH_TBL;
+
+    bit_count = 0;
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+        bit_count++;
+    }
+    if ((bit_count == 2) && (picLayerHeader->MVMODE == 0))
+        bit_count++;
+    picLayerHeader->MVMODE = table[bit_count];
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, (md->heightMB+1)/2, BPP_FORWARDMB)) !=
+            VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+    VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+    VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+    if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+    {
+        VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+    }
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h
new file mode 100755
index 0000000..e474800
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_common_defs.h
@@ -0,0 +1,645 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Common definitions for parsing VC-1 bitstreams.
+//
+*/
+
+#ifndef _VC1PARSE_COMMON_DEFS_H_
+#define _VC1PARSE_COMMON_DEFS_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdint.h>
+
+    /** @weakgroup vc1parse_common_defs VC-1 Common Definitions */
+    /** @ingroup vc1parse_common_defs */
+    /*@{*/
+
+    /** This defines the maximum number of horizontal macroblocks in a picture. */
+#define VC1_WIDTH_MB_MAX         ((2048+15)/16)
+
+    /** This defines the maximum number of vertical macroblocks in a picture. */
+#define VC1_HEIGHT_MB_MAX        ((1088+15)/16)
+
+    /** This defines the maximum number of bitplane storage per picture. */
+#define VC1_MAX_BITPLANE_CHUNKS   3
+
+    /** This defines the value for an invalid BFRACTION syntax element. */
+#define VC1_BFRACTION_INVALID    0
+
+    /** This defines the value for BFRACTION syntax element that defines a BI
+    picture. */
+#define VC1_BFRACTION_BI         9
+
+    /** This enumeration defines the various supported profiles as defined in
+    PROFILE syntax element. */
+    enum
+    {
+        VC1_PROFILE_SIMPLE,
+        VC1_PROFILE_MAIN,
+        VC1_PROFILE_RESERVED,
+        VC1_PROFILE_ADVANCED
+    };
+
+    /** This enumeration defines the frame coding mode as defined in FCM syntax
+    element. */
+    enum
+    {
+        VC1_FCM_PROGRESSIVE,
+        VC1_FCM_FRAME_INTERLACE = 2,
+        VC1_FCM_FIELD_INTERLACE = 3
+    };
+
+    /** This enumeration defines the various bitplane types as defined in IMODE
+    syntax element. */
+    enum
+    {
+        VC1_BITPLANE_RAW_MODE,
+        VC1_BITPLANE_NORM2_MODE,
+        VC1_BITPLANE_DIFF2_MODE,
+        VC1_BITPLANE_NORM6_MODE,
+        VC1_BITPLANE_DIFF6_MODE,
+        VC1_BITPLANE_ROWSKIP_MODE,
+        VC1_BITPLANE_COLSKIP_MODE
+    };
+
+    /** This enumeration defines the various motion vector modes as defined in
+    MVMODE or MVMODE2 syntax element. */
+    enum
+    {
+        VC1_MVMODE_1MV,
+#ifdef VBP
+        VC1_MVMODE_HPELBI_1MV,
+        VC1_MVMODE_HPEL_1MV,
+#else
+        VC1_MVMODE_HPEL_1MV,
+        VC1_MVMODE_HPELBI_1MV,
+#endif
+        VC1_MVMODE_MIXED_MV,
+        VC1_MVMODE_INTENSCOMP
+    };
+
+    /** This enumeration defines the extended differential motion vector range flag
+    as defined in DMVRANGE syntax element. */
+    enum
+    {
+        VC1_DMVRANGE_NONE,
+        VC1_DMVRANGE_HORIZONTAL_RANGE,
+        VC1_DMVRANGE_VERTICAL_RANGE,
+        VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE
+    };
+
+    /** This enumeration defines the intensity compensation field as defined in
+    INTCOMPFIELD syntax element. */
+    enum
+    {
+        VC1_INTCOMP_TOP_FIELD    = 1,
+        VC1_INTCOMP_BOTTOM_FIELD = 2,
+        VC1_INTCOMP_BOTH_FIELD   = 3
+    };
+
+    /** This enumeration defines the differential quantizer profiles as defined in
+    DQPROFILE syntax element. */
+    enum
+    {
+        VC1_DQPROFILE_ALL4EDGES,
+        VC1_DQPROFILE_DBLEDGES,
+        VC1_DQPROFILE_SNGLEDGES,
+        VC1_DQPROFILE_ALLMBLKS
+    };
+
+    /** This enumeration defines the conditional overlap flag as defined in CONDOVER
+    syntax element. */
+    enum
+    {
+        VC1_CONDOVER_FLAG_NONE = 0,
+        VC1_CONDOVER_FLAG_ALL  = 2,
+        VC1_CONDOVER_FLAG_SOME = 3
+    };
+
+    /** This enumeration defines the type of quantizer to be used and is derived
+    from bitstream syntax. */
+    enum
+    {
+        VC1_QUANTIZER_NONUNIFORM,
+        VC1_QUANTIZER_UNIFORM
+    };
+
+    /** This structure represents the various bitplanes within VC-1 bitstream. */
+    typedef struct
+    {
+        uint8_t invert;
+        int32_t imode;
+        uint32_t *databits;
+    } vc1_Bitplane;
+
+#ifdef VBP
+#define VC1_MAX_HRD_NUM_LEAKY_BUCKETS   32
+
+    typedef struct
+    {
+        uint32_t	 HRD_RATE;				 /** Maximum bit rate in bits per second */
+        uint32_t	 HRD_BUFFER;			 /** Buffer size in bits */
+        uint32_t	 HRD_FULLNESS;			 /** Buffer fullness in complete bits */
+        uint32_t	 HRD_FULLFRACTION;		 /** Numerator of fractional bit buffer fullness count */
+        uint32_t	 HRD_FULLDENOMINATOR;	 /** Denominator of fractional bit buffer fullness count */
+    } vc1_leaky_bucket;
+
+    typedef struct _vc1_hrd_state
+    {
+        uint8_t 		 BIT_RATE_EXPONENT; 							  /** Buckets
+																			(0 if none specified) */
+        uint8_t 		 BUFFER_SIZE_EXPONENT;
+        vc1_leaky_bucket sLeakyBucket[VC1_MAX_HRD_NUM_LEAKY_BUCKETS];	/** Per-bucket information */
+    } vc1_hrd_state, *vc1_hrd_state_ptr;
+#endif
+
+    /** This structure represents all bitstream metadata needed for register programming. */
+    typedef struct
+    {
+        // From Sequence Layer for Advanced Profile
+        uint8_t  PROFILE;                   /**  2 bit(s). */
+        uint8_t  LEVEL;                     /**  3 bit(s). */
+        uint8_t  CHROMAFORMAT;              /**  2 bit(s). */
+        uint8_t  FRMRTQ;                    /**  3 bit(s). */
+
+        uint8_t  BITRTQ;                    /**  5 bit(s). */
+        uint8_t  POSTPROCFLAG;              /**  1 bit(s). */
+        uint8_t  PULLDOWN;                  /**  1 bit(s). */
+        uint8_t  INTERLACE;                 /**  1 bit(s). */
+
+        uint8_t  TFCNTRFLAG;                /**  1 bit(s). */
+        uint8_t  FINTERPFLAG;               /**  1 bit(s). */
+        uint8_t  PSF;                       /**  1 bit(s). */
+        uint8_t  HRD_NUM_LEAKY_BUCKETS;     /**  5 bit(s). */
+
+        // From STRUCT_C
+        uint8_t  MAXBFRAMES;                /**  3 bit(s). */
+        uint8_t  MULTIRES;                  /**  1 bit(s). */
+
+        // From EntryPoint Layer for Advanced Profile
+        uint8_t BROKEN_LINK;
+        uint8_t CLOSED_ENTRY;
+
+        uint8_t PANSCAN_FLAG;
+        uint8_t REFDIST_FLAG;
+        uint8_t LOOPFILTER;
+        uint8_t FASTUVMC;
+
+        uint8_t EXTENDED_MV;
+        uint8_t DQUANT;
+        uint8_t VSTRANSFORM;
+        uint8_t OVERLAP;
+
+        uint8_t QUANTIZER;
+        uint8_t EXTENDED_DMV;
+        uint8_t RANGE_MAPY_FLAG;
+        uint8_t RANGE_MAPY;
+
+        uint8_t RANGE_MAPUV_FLAG;
+        uint8_t RANGE_MAPUV;
+
+        // From Picture Header
+        uint8_t  RANGERED;                  /**  1 bit(s). */
+        uint8_t  RNDCTRL;                   /**  1 bit(s), rcv specific. */
+
+        // REFDIST is present only in field-interlaced mode on I/I, I/P, P/I, P/P frames
+        // From Canmore, looks like this needs to be propagated to following B frames
+        uint8_t  REFDIST;
+        uint8_t  INTCOMPFIELD;              /**  ? bit(s)? */
+        uint8_t  LUMSCALE2;                 /**  6 bit(s). */
+        uint8_t  LUMSHIFT2;                 /**  6 bit(s). */
+
+        uint8_t bp_raw[VC1_MAX_BITPLANE_CHUNKS];
+        uint8_t res_1;
+
+        // From SequenceLayerHeader, EntryPointHeader or Struct_A
+        uint16_t width;
+        uint16_t height;
+        uint16_t widthMB;
+        uint16_t heightMB;
+
+#ifdef VBP
+        uint8_t COLOR_FORMAT_FLAG;
+        uint8_t MATRIX_COEF;
+        uint8_t SYNCMARKER;
+        uint8_t ASPECT_RATIO_FLAG;
+        uint8_t ASPECT_RATIO;
+        uint8_t ASPECT_HORIZ_SIZE;
+        uint8_t ASPECT_VERT_SIZE;
+        vc1_hrd_state hrd_initial_state;
+#endif
+
+    } vc1_metadata_t;
+
+    /** This structure represents the sequence header for advanced profile. */
+    typedef struct
+    {
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned BITRTQ_POSTPROC:5;
+                unsigned FRMRTQ_POSTPROC:3;
+                unsigned COLORDIFF_FORMAT:2;
+                unsigned LEVEL:3;
+                unsigned PROFILE:2;
+                unsigned pad:17;
+            } seq_flags;
+#else
+            struct
+            {
+                unsigned pad:17;
+                unsigned PROFILE:2;
+                unsigned LEVEL:3;
+                unsigned COLORDIFF_FORMAT:2;
+                unsigned FRMRTQ_POSTPROC:3;
+                unsigned BITRTQ_POSTPROC:5;
+            } seq_flags;
+#endif
+            uint32_t flags;
+        };
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned DISPLAY_EXT:1;
+                unsigned PSF:1;
+                unsigned RESERVED:1;
+                unsigned FINTERPFLAG:1;
+                unsigned TFCNTRFLAG:1;
+                unsigned INTERLACE:1;
+                unsigned PULLDOWN:1;
+                unsigned MAX_CODED_HEIGHT:12;
+                unsigned MAX_CODED_WIDTH:12;
+                unsigned POSTPROCFLAG:1;
+            } seq_max_size;
+#else
+            struct
+            {
+                unsigned POSTPROCFLAG:1;
+                unsigned MAX_CODED_WIDTH:12;
+                unsigned MAX_CODED_HEIGHT:12;
+                unsigned PULLDOWN:1;
+                unsigned INTERLACE:1;
+                unsigned TFCNTRFLAG:1;
+                unsigned FINTERPFLAG:1;
+                unsigned RESERVED:1;
+                unsigned PSF:1;
+                unsigned DISPLAY_EXT:1;
+            } seq_max_size;
+#endif
+            uint32_t max_size;
+        };
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned ASPECT_RATIO_FLAG:1;
+                unsigned DISP_VERT_SIZE:14;
+                unsigned DISP_HORIZ_SIZE:14;
+                unsigned pad:3;
+            } seq_disp_size;
+#else
+            struct
+            {
+                unsigned pad:3;
+                unsigned DISP_HORIZ_SIZE:14;
+                unsigned DISP_VERT_SIZE:14;
+                unsigned ASPECT_RATIO_FLAG:1;
+            } seq_disp_size;
+#endif
+            uint32_t disp_size;
+        };
+
+        uint8_t ASPECT_RATIO;   // 4 bits
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned ASPECT_VERT_SIZE:8;
+                unsigned ASPECT_HORIZ_SIZE:8;
+                unsigned pad:16;
+            } seq_aspect_size;
+#else
+            struct
+            {
+                unsigned pad:16;
+                unsigned ASPECT_HORIZ_SIZE:8;
+                unsigned ASPECT_VERT_SIZE:8;
+            } seq_aspect_size;
+#endif
+            uint32_t aspect_size;
+        };
+
+        uint8_t FRAMERATE_FLAG; // 1b
+        uint8_t FRAMERATEIND;   // 1b
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned FRAMERATEDR:4;
+                unsigned FRAMERATENR:8;
+                unsigned pad:20;
+            } seq_framerate_fraction;
+#else
+            struct
+            {
+                unsigned pad:20;
+                unsigned FRAMERATENR:8;
+                unsigned FRAMERATEDR:4;
+            } seq_framerate_fraction;
+#endif
+            uint32_t framerate_fraction;
+        };
+
+        uint16_t FRAMERATEEXP;      // 16b
+        uint8_t COLOR_FORMAT_FLAG; // 1b
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned MATRIX_COEF:8;
+                unsigned TRANSFER_CHAR:8;
+                unsigned COLOR_PRIM:8;
+                unsigned pad:8;
+            } seq_color_format;
+#else
+            struct
+            {
+                unsigned pad:8;
+                unsigned COLOR_PRIM:8;
+                unsigned TRANSFER_CHAR:8;
+                unsigned MATRIX_COEF:8;
+            } seq_color_format;
+#endif
+            uint32_t color_format;
+        };
+
+        uint8_t HRD_PARAM_FLAG;         // 1b
+        uint8_t HRD_NUM_LEAKY_BUCKETS;  // 5b
+        // No need to parse remaining items - not needed so far
+    } vc1_SequenceLayerHeader;
+
+    /** This structure represents metadata for struct c. */
+    typedef struct
+    {
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned res6:1;
+                unsigned FINTERPFLAG:1;
+                unsigned QUANTIZER:2;
+                unsigned MAXBFRAMES:3;
+                unsigned RANGERED:1;
+                unsigned SYNCMARKER:1;
+                unsigned OVERLAP:1;
+                unsigned res5:1;
+                unsigned VSTRANSFORM:1;
+                unsigned DQUANT:2;
+                unsigned EXTENDED_MV:1;
+                unsigned FASTUVMC:1;
+                unsigned res4:1;
+                unsigned MULTIRES:1;
+                unsigned res3:1;
+                unsigned LOOPFILTER:1;
+                unsigned BITRTQ_POSTPROC:5;
+                unsigned FRMRTQ_POSTPROC:3;
+                unsigned PROFILE:4;
+            } struct_c;
+#else
+            struct
+            {
+                unsigned PROFILE:4;
+                unsigned FRMRTQ_POSTPROC:3;
+                unsigned BITRTQ_POSTPROC:5;
+                unsigned LOOPFILTER:1;
+                unsigned res3:1;
+                unsigned MULTIRES:1;
+                unsigned res4:1;
+                unsigned FASTUVMC:1;
+                unsigned EXTENDED_MV:1;
+                unsigned DQUANT:2;
+                unsigned VSTRANSFORM:1;
+                unsigned res5:1;
+                unsigned OVERLAP:1;
+                unsigned SYNCMARKER:1;
+                unsigned RANGERED:1;
+                unsigned MAXBFRAMES:3;
+                unsigned QUANTIZER:2;
+                unsigned FINTERPFLAG:1;
+                unsigned res6:1;
+            } struct_c;
+#endif
+            uint32_t struct_c_rcv;
+        };
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned VERT_SIZE:16;
+                unsigned HORIZ_SIZE:16;
+            } struct_a;
+#else
+            struct
+            {
+                unsigned HORIZ_SIZE:16;
+                unsigned VERT_SIZE:16;
+            } struct_a;
+#endif
+            uint32_t struct_a_rcv;
+        };
+
+    } vc1_RcvSequenceHeader;
+
+    /** This structure represents metadata for entry point layers. */
+    typedef struct
+    {
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned QUANTIZER:2;
+                unsigned OVERLAP:1;
+                unsigned VSTRANSFORM:1;
+                unsigned DQUANT:2;
+                unsigned EXTENDED_MV:1;
+                unsigned FASTUVMC:1;
+                unsigned LOOPFILTER:1;
+                unsigned REFDIST_FLAG:1;
+                unsigned PANSCAN_FLAG:1;
+                unsigned CLOSED_ENTRY:1;
+                unsigned BROKEN_LINK:1;
+                unsigned pad1:19;
+            } ep_flags;
+#else
+            struct
+            {
+                unsigned pad1:19;
+                unsigned BROKEN_LINK:1;
+                unsigned CLOSED_ENTRY:1;
+                unsigned PANSCAN_FLAG:1;
+                unsigned REFDIST_FLAG:1;
+                unsigned LOOPFILTER:1;
+                unsigned FASTUVMC:1;
+                unsigned EXTENDED_MV:1;
+                unsigned DQUANT:2;
+                unsigned VSTRANSFORM:1;
+                unsigned OVERLAP:1;
+                unsigned QUANTIZER:2;
+            } ep_flags;
+#endif
+            uint32_t flags;
+        };
+
+        // Skipping HRD data because it is not needed for our processing
+
+        union
+        {
+#ifndef MFDBIGENDIAN
+            struct
+            {
+                unsigned CODED_HEIGHT:12;
+                unsigned CODED_WIDTH:12;
+                unsigned pad2:8;
+            } ep_size;
+#else
+            struct
+            {
+                unsigned pad2:8;
+                unsigned CODED_WIDTH:12;
+                unsigned CODED_HEIGHT:12;
+            } ep_size;
+#endif
+            uint32_t size;
+        };
+
+        uint8_t  CODED_SIZE_FLAG;           /**  1 bit(s). */
+        uint8_t  EXTENDED_DMV;              /**  1 bit(s). */
+        uint8_t  RANGE_MAPY_FLAG;           /**  1 bit(s). */
+        uint8_t  RANGE_MAPY;                /**  3 bit(s). */
+        uint8_t  RANGE_MAPUV_FLAG;          /**  1 bit(s). */
+        uint8_t  RANGE_MAPUV;               /**  3 bit(s). */
+    } vc1_EntryPointHeader;
+
+    /** This structure represents metadata for slice and picture layers. */
+    typedef struct
+    {
+        /* Slice layer. */
+        uint16_t SLICE_ADDR;                /**  9 bit(s). */
+
+        /* Picture layer for simple or main profile. */
+        uint8_t  RANGEREDFRM;               /**  1 bit(s). */
+        uint8_t  PTYPE;                     /**  4 bit(s)? */
+        int8_t   BFRACTION_NUM;             /**  ? bit(s). */
+        int16_t  BFRACTION_DEN;             /**  ? bit(s). */
+        uint8_t  PQINDEX;                   /**  5 bit(s). */
+        uint8_t  HALFQP;                    /**  1 bit(s). */
+        uint8_t  PQUANTIZER;                /**  1 bit(s). */
+        uint8_t  MVRANGE;                   /**  3 bit(s)? */
+        uint8_t  MVMODE;                    /**  4 bit(s)? */
+        uint8_t  MVMODE2;                   /**  3 bit(s)? */
+        uint8_t  LUMSCALE;                  /**  6 bit(s). */
+        uint8_t  LUMSHIFT;                  /**  6 bit(s). */
+        uint8_t  MVTAB;                     /**  2 bit(s). */
+        uint8_t  CBPTAB;                    /**  2 bit(s). */
+        uint8_t  TTMBF;                     /**  1 bit(s). */
+        uint8_t  TTFRM;                     /**  2 bit(s). */
+        uint8_t  TRANSACFRM;                /**  2 bit(s)? */
+        uint8_t  TRANSACFRM2;               /**  2 bit(s)? */
+        uint8_t  TRANSDCTAB;                /**  1 bit(s). */
+
+        /* Picture layer for advanced profile. */
+        uint8_t  FCM;                       /**  2 bit(s)? */
+        uint8_t  FPTYPE;                    /**  3 bit(s). */
+        uint8_t  TFCNTR;                    /**  8 bit(s) */
+        uint8_t  RPTFRM;                    /**  2 bit(s) */
+        uint8_t  TFF;                       /**  1 bit(s). */
+        uint8_t  RFF;                    	/**  1 bit(s) */
+        uint8_t  RNDCTRL;                   /**  1 bit(s). */
+        uint8_t  UVSAMP;                    /**  1 bit(s). */
+        uint8_t  POSTPROC;                  /**  2 bit(s). */
+        uint8_t  CONDOVER;                  /**  2 bit(s)? */
+        uint8_t  DMVRANGE;                  /**  ? bit(s)? */
+        uint8_t  MV4SWITCH;                 /**  1 bit(s). */
+        uint8_t  INTCOMP;                   /**  1 bit(s). */
+        uint8_t  MBMODETAB;                 /**  2 bit(s). */
+        uint8_t  MV2BPTAB;                  /**  2 bit(s). */
+        uint8_t  MV4BPTAB;                  /**  2 bit(s). */
+        uint8_t  NUMREF;                    /**  1 bit(s). */
+        uint8_t  REFFIELD;                  /**  1 bit(s). */
+
+        /* PAN SCAN */
+        uint8_t  PS_PRESENT;                /**  1 bit(s). */
+        uint8_t number_of_pan_scan_window;	/** 4 max. */
+        viddec_vc1_pan_scan_window_t PAN_SCAN_WINDOW[VIDDEC_PANSCAN_MAX_OFFSETS];
+
+        /* VOPDQUANT. */
+        uint8_t  PQDIFF;                    /**  3 bit(s). */
+        uint8_t  ABSPQ;                     /**  5 bit(s). */
+        uint8_t  DQUANTFRM;                 /**  1 bit(s). */
+        uint8_t  DQPROFILE;                 /**  2 bit(s). */
+        uint8_t  DQSBEDGE;                  /**  2 bit(s). */
+        uint8_t  DQBILEVEL;                 /**  1 bit(s). */
+
+        /* Others. */
+        uint8_t  PTypeField1;
+        uint8_t  PTypeField2;
+        uint32_t PQUANT;
+        uint8_t  CurrField;
+        uint8_t  BottomField;
+        uint32_t UniformQuant;
+
+#ifdef VBP
+        uint8_t  raw_MVTYPEMB;
+        uint8_t  raw_DIRECTMB;
+        uint8_t  raw_SKIPMB;
+        uint8_t  raw_ACPRED;
+        uint8_t  raw_FIELDTX;
+        uint8_t  raw_OVERFLAGS;
+        uint8_t  raw_FORWARDMB;
+
+        vc1_Bitplane MVTYPEMB;
+        vc1_Bitplane DIRECTMB;
+        vc1_Bitplane SKIPMB;
+        vc1_Bitplane ACPRED;
+        vc1_Bitplane FIELDTX;
+        vc1_Bitplane OVERFLAGS;
+        vc1_Bitplane FORWARDMB;
+        uint32_t  ALTPQUANT;
+        uint8_t		DQDBEDGE;
+#endif
+
+    } vc1_PictureLayerHeader;
+
+    /*@}*/
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus. */
+
+#endif /* _VC1PARSE_COMMON_DEFS_H_. */
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c b/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c
new file mode 100755
index 0000000..896e18c
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_common_tables.c
@@ -0,0 +1,198 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Contains tables for VLC decoding of syntax elements in simple
+//  or main profile of VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+const uint8_t VC1_MVMODE_LOW_TBL[] =
+{
+    VC1_MVMODE_HPELBI_1MV,
+    VC1_MVMODE_1MV,
+    VC1_MVMODE_HPEL_1MV,
+    VC1_MVMODE_MIXED_MV,
+    VC1_MVMODE_INTENSCOMP
+};
+
+const uint8_t VC1_MVMODE_HIGH_TBL[] =
+{
+    VC1_MVMODE_1MV,
+    VC1_MVMODE_MIXED_MV,
+    VC1_MVMODE_HPEL_1MV,
+    VC1_MVMODE_HPELBI_1MV,
+    VC1_MVMODE_INTENSCOMP
+};
+
+const int32_t VC1_BITPLANE_IMODE_TBL[] =
+{
+    4, /* max bits */
+    1, /* total subtables */
+    4, /* subtable sizes */
+
+    0, /* 1-bit codes */
+    2, /* 2-bit codes */
+    2, VC1_BITPLANE_NORM2_MODE,
+    3, VC1_BITPLANE_NORM6_MODE,
+    3, /* 3-bit codes */
+    1, VC1_BITPLANE_DIFF2_MODE,
+    2, VC1_BITPLANE_ROWSKIP_MODE,
+    3, VC1_BITPLANE_COLSKIP_MODE,
+    2, /* 4-bit codes */
+    0, VC1_BITPLANE_RAW_MODE,
+    1, VC1_BITPLANE_DIFF6_MODE,
+    -1
+};
+
+/* This VLC table is used for decoding of k in bitplane. */
+const int32_t VC1_BITPLANE_K_TBL[] =
+{
+    13, /* max bits */
+    2,  /* total subtables */
+    6,7,/* subtable sizes */
+
+    1, /* 1-bit codes */
+    1,       0 ,
+    0, /* 2-bit codes */
+    0, /* 3-bit codes */
+    6, /* 4-bit codes */
+    2, 1,    3, 2,     4, 4,    5, 8,
+    6, 16,   7, 32,
+    0, /* 5-bit codes */
+    1, /* 6-bit codes */
+    (3 << 1)| 1,     63,
+    0, /* 7-bit codes */
+    15, /* 8-bit codes */
+    0, 3,    1, 5,    2, 6,    3, 9,
+    4, 10,   5, 12,   6, 17,   7, 18,
+    8, 20,   9, 24,   10, 33,  11, 34,
+    12, 36,  13, 40,  14, 48,
+    6, /* 9-bit codes */
+    (3 << 4)| 7,    31,
+    (3 << 4)| 6,    47,
+    (3 << 4)| 5,    55,
+    (3 << 4)| 4,    59,
+
+    (3 << 4)| 3,    61,
+    (3 << 4)| 2,    62,
+    20, /* 10-bit codes */
+    (1 << 6)| 11,  11,
+    (1 << 6)|  7,  7 ,
+    (1 << 6)| 13,  13,
+    (1 << 6)| 14,  14,
+
+    (1 << 6)| 19,  19,
+    (1 << 6)| 21,  21,
+    (1 << 6)| 22,  22,
+    (1 << 6)| 25,  25,
+
+    (1 << 6)| 26,  26,
+    (1 << 6)| 28,  28,
+    (1 << 6)|  3,  35,
+    (1 << 6)|  5,  37,
+
+    (1 << 6)|  6,  38,
+    (1 << 6)|  9,  41,
+    (1 << 6)| 10,  42,
+    (1 << 6)| 12,  44,
+
+    (1 << 6)| 17,  49,
+    (1 << 6)| 18,  50,
+    (1 << 6)| 20,  52,
+    (1 << 6)| 24,  56,
+    0,  /* 11-bit codes */
+    0,  /* 12-bit codes */
+    15, /* 13-bit codes */
+    (3 << 8)| 14,  15,
+    (3 << 8)| 13,  23,
+    (3 << 8)| 12,  27,
+    (3 << 8)| 11,  29,
+
+    (3 << 8)| 10,  30,
+    (3 << 8)|  9,  39,
+    (3 << 8)|  8,  43,
+    (3 << 8)|  7,  45,
+
+    (3 << 8)|  6,  46,
+    (3 << 8)|  5,  51,
+    (3 << 8)|  4,  53,
+    (3 << 8)|  3,  54,
+
+    (3 << 8)|  2,  57,
+    (3 << 8)|  1,  58,
+    (3 << 8)|  0,  60,
+    -1
+};
+
+/* This VLC table is used for decoding of BFRACTION. */
+const int32_t VC1_BFRACTION_TBL[] =
+{
+    7,        /* max bits */
+    2,        /* total subtables */
+    3,4,    /* subtable sizes */
+    0,        /* 1-bit codes */
+    0,        /* 2-bit codes */
+    7,        /* 3-bit codes */
+    0x00,1,2,    0x01,1,3,    0x02,2,3,    0x03,1,4,
+    0x04,3,4,    0x05,1,5,    0x06,2,5,
+    0,        /* 4-bit codes */
+    0,        /* 5-bit codes */
+    0,        /* 6-bit codes */
+    16,    /* 7-bit codes */
+    0x70, 3,5,    0x71, 4,5,    0x72, 1,6,    0x73, 5,6,
+    0x74, 1,7,    0x75, 2,7,    0x76, 3,7,    0x77, 4,7,
+    0x78, 5,7,    0x79, 6,7,    0x7A, 1,8,    0x7B, 3,8,
+    0x7C, 5,8,    0x7D, 7,8,
+    0x7E, VC1_BFRACTION_INVALID,VC1_BFRACTION_INVALID,
+    0x7F, VC1_BFRACTION_BI, VC1_BFRACTION_BI,
+
+    -1
+};
+
+/* This table is used for VLC decoding of REFDIST. */
+const int32_t VC1_REFDIST_TBL[] =
+{
+    16, /* Max bits. */
+    3, /* Total sub-tables. */
+    5, 6, 5, /* Sub-table sizes. */
+
+    0, /* 1-bit codes. */
+    3, /* 2-bit codes. */
+    0,  0,       1,  1,       2,  2,
+    1, /* 3-bit codes. */
+    6,  3,
+    1, /* 4-bit codes. */
+    14,  4,
+    1, /* 5-bit codes. */
+    30,  5,
+    1, /* 6-bit codes. */
+    62,  6,
+    1, /* 7-bit codes. */
+    126,  7,
+    1, /* 8-bit codes. */
+    254,  8,
+    1, /* 9-bit codes. */
+    510,  9,
+    1, /* 10-bit codes. */
+    1022, 10,
+    1, /* 11-bit codes. */
+    2046, 11,
+    1, /* 12-bit codes. */
+    4094, 12,
+    1, /* 13-bit codes. */
+    8190, 13,
+    1, /* 14-bit codes. */
+    16382, 14,
+    1, /* 15-bit codes. */
+    32766, 15,
+    1, /* 16-bit codes. */
+    65534, 16,
+    -1  /* end of table. */
+};
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c b/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c
new file mode 100755
index 0000000..5dc9b4d
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_huffman.c
@@ -0,0 +1,97 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VLC syntax elements within VC-1 bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanOne(void* ctxt, int32_t *pDst, const int32_t *pDecodeTable)
+{
+    uint32_t tempValue;
+    const int32_t *pTable = pDecodeTable;
+    vc1_Status status = VC1_STATUS_OK;
+    int32_t i, j, maxBits, loopCount, totalBits, value;
+
+    maxBits = *pTable++;
+    loopCount = *pTable++;
+    totalBits = 0;
+    for (i = 0; i < loopCount; i++)
+        totalBits += *pTable++;
+
+    if (totalBits != maxBits)
+        return VC1_STATUS_PARSE_ERROR;
+
+    value = 0;
+    for (i = 0; i < maxBits; i++)
+    {
+        VC1_GET_BITS9(1, tempValue);
+        value = (value << 1) | tempValue;
+        loopCount = *pTable++;
+        if (loopCount == -1)
+            break;
+        for (j = 0; j < loopCount; j++)
+        {
+            if (value == *pTable++)
+            {
+                *pDst = *pTable;
+                return status;
+            }
+            else
+                pTable++;
+        }
+    }
+
+    return status;
+}
+
+/*----------------------------------------------------------------------------*/
+
+vc1_Status vc1_DecodeHuffmanPair(void* ctxt, const int32_t *pDecodeTable,
+                                 int8_t *pFirst, int16_t *pSecond)
+{
+    uint32_t tempValue;
+    const int32_t *pTable = pDecodeTable;
+    vc1_Status status = VC1_STATUS_OK;
+    int32_t i, j, maxBits, loopCount, totalBits, value;
+
+    maxBits = *pTable++;
+    loopCount = *pTable++;
+    totalBits = 0;
+    for (i = 0; i < loopCount; i++)
+        totalBits += *pTable++;
+
+    if (totalBits != maxBits)
+        return VC1_STATUS_PARSE_ERROR;
+
+    value = 0;
+    for (i = 0; i < maxBits; i++)
+    {
+        VC1_GET_BITS9(1, tempValue);
+        value = (value << 1) | tempValue;
+        loopCount = *pTable++;
+        if (loopCount == -1)
+            break;
+        for (j = 0; j < loopCount; j++)
+        {
+            if (value == *pTable++)
+            {
+                *pFirst = *pTable++;
+                *pSecond = *pTable;
+                return status;
+            }
+            else
+                pTable += 2;
+        }
+    }
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c b/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c
new file mode 100755
index 0000000..3d8192a
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_ipic.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive I picture in simple
+//  or main profile bitstream or progressive BI picture in main profile
+//  bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive I picture for simple
+ * or main profile bitstream or progressive BI picture in main profile
+ * bitstream.  This parser starts after PTYPE was parsed but stops before
+ * parsing of macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+    For each I or BI frame, RND shall be set to 1 */
+    if (md->PROFILE != VC1_PROFILE_ADVANCED)
+    {
+        picLayerHeader->RNDCTRL = md->RNDCTRL | 1 ;
+        md->RNDCTRL = picLayerHeader->RNDCTRL;
+    }
+
+
+    if (picLayerHeader->PTYPE == VC1_BI_FRAME)
+    {
+        if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+                                            &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+                != VC1_STATUS_OK)
+        {
+            return status;
+        }
+        if (picLayerHeader->BFRACTION_DEN != VC1_BFRACTION_BI)
+            return VC1_STATUS_PARSE_ERROR;
+    }
+
+    VC1_GET_BITS9(7, tempValue); /* BF. */
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else picLayerHeader->HALFQP=0;
+
+    if (md->QUANTIZER == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+        picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+    }
+
+    /* MVRANGE but only for main profile. */
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->MULTIRES == 1 && picLayerHeader->PTYPE != VC1_BI_FRAME)
+    {
+        VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+    if (picLayerHeader->TRANSACFRM2)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+        picLayerHeader->TRANSACFRM2 += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c
new file mode 100755
index 0000000..4e98d96
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_ipic_adv.c
@@ -0,0 +1,256 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive I or BI picture in
+//  advanced profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive I or BI picture for
+ * advanced profile bitstream.
+ * Table 18 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressiveIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+        if (picLayerHeader->CONDOVER)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+            if (! picLayerHeader->CONDOVER)
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+            else
+            {
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+                if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                                 md->widthMB,
+                                                 md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+                {
+                    return status;
+                }
+            }
+        }
+        else
+            picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+    if (picLayerHeader->TRANSACFRM2)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+        picLayerHeader->TRANSACFRM2 += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    status = vc1_VOPDQuant(ctxt, pInfo);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace I or BI frame for
+ * advanced profile bitstream.
+ * Table 82 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_FIELDTX)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_ACPRED)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+        if (picLayerHeader->CONDOVER)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+            if (! picLayerHeader->CONDOVER)
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+            else
+            {
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+                if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                                 md->widthMB,
+                                                 md->heightMB, BPP_OVERFLAGS)) != VC1_STATUS_OK)
+                {
+                    return status;
+                }
+            }
+        }
+        else
+            picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+    if (picLayerHeader->TRANSACFRM2)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+        picLayerHeader->TRANSACFRM2 += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    status = vc1_VOPDQuant(ctxt, pInfo);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace I or BI field for
+ * advanced profile bitstream.
+ * Table 87 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlaceIpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    // Reset MVMODE when the second field is an I picture
+    // to avoid carrying forward the mvmode values from previous field
+    // especially the intensity compensation value
+    picLayerHeader->MVMODE = 0;
+
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK) {
+        DEB("Error parsing I field \n");
+        return status;
+    }
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else
+        picLayerHeader->HALFQP = 0;
+
+    if (md->QUANTIZER == 1) {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+        picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+    }
+
+    if (md->POSTPROCFLAG == 1)
+        VC1_GET_BITS9(2, tempValue); /* POSTPROC. */
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, (md->heightMB+1)/2, BPP_ACPRED)) !=
+            VC1_STATUS_OK)
+    {
+        DEB("Error parsing I field \n");
+        return status;
+    }
+
+    if ((md->OVERLAP == 1) && (picLayerHeader->PQUANT <= 8))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+        if (picLayerHeader->CONDOVER)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->CONDOVER);
+            if (! picLayerHeader->CONDOVER)
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_ALL;
+            else
+            {
+                picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_SOME;
+
+                if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                                 md->widthMB,
+                                                 (md->heightMB+1)/2, BPP_OVERFLAGS)) !=
+                        VC1_STATUS_OK)
+                {
+                    DEB("Error parsing I field \n");
+                    return status;
+                }
+            }
+        }
+        else
+            picLayerHeader->CONDOVER = VC1_CONDOVER_FLAG_NONE;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+    if (picLayerHeader->TRANSACFRM2)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM2);
+        picLayerHeader->TRANSACFRM2 += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    status = vc1_VOPDQuant(ctxt, pInfo);
+    if (status != VC1_STATUS_OK) {
+        DEB("Error parsing I field \n");
+        return status;
+    }
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c b/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c
new file mode 100755
index 0000000..55373b4
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_mv_com.c
@@ -0,0 +1,82 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 syntax elements MVRANGE and DMVRANGE.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element MVRANGE, which exists for main and advanced profiles.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_MVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if (md->EXTENDED_MV == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+        if (picLayerHeader->MVRANGE)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+            if (picLayerHeader->MVRANGE)
+            {
+                VC1_GET_BITS9(1, picLayerHeader->MVRANGE);
+                picLayerHeader->MVRANGE += 1;
+            }
+            picLayerHeader->MVRANGE += 1;
+        }
+    }
+    else
+        picLayerHeader->MVRANGE = 0;
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element DMVRANGE.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_DMVRangeDecode(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if (md->EXTENDED_DMV == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+        if (picLayerHeader->DMVRANGE == 0)
+            picLayerHeader->DMVRANGE = VC1_DMVRANGE_NONE;
+        else
+        {
+            VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+            if (picLayerHeader->DMVRANGE == 0)
+                picLayerHeader->DMVRANGE = VC1_DMVRANGE_HORIZONTAL_RANGE;
+            else
+            {
+                VC1_GET_BITS9(1, picLayerHeader->DMVRANGE);
+                if (picLayerHeader->DMVRANGE == 0)
+                    picLayerHeader->DMVRANGE = VC1_DMVRANGE_VERTICAL_RANGE;
+                else
+                {
+                    picLayerHeader->DMVRANGE =
+                        VC1_DMVRANGE_HORIZONTAL_VERTICAL_RANGE;
+                }
+            }
+        }
+    }
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c
new file mode 100755
index 0000000..87f8426
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com.c
@@ -0,0 +1,101 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for simple and main profiles.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses the picture header for simple or
+ * main profile down to macroblock layer.
+ * Table 16 of SMPTE 421M after processing up to PTYPE for I picture.
+ * Table 17 of SMPTE 421M after processing up to PTYPE for BI picture.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ * Table 21 of SMPTE 421M after processing up to PTYPE for B picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+    int32_t result;
+
+    if (md->PROFILE != VC1_PROFILE_ADVANCED)
+    {
+        // As per spec, for main/simple profile, if the size of the coded picture is <= 1B,
+        // it shall be treated as a skipped frame.
+        // In content with skipped frames, the data is "00".
+        // rcv to vc1 conversion process adds an additional byte (0x80) to the picture, hence
+        // the data looks like "00 80"
+        // Hence if data is <= 2B, we will consider it skipped (check for 16+1b, if it fails, the frame is skipped).
+        result = viddec_pm_peek_bits(ctxt, &tempValue, 17);
+        if (result == -1)
+        {
+            picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+            return status;
+        }
+    }
+
+    if (md->FINTERPFLAG == 1)
+    {
+        VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+    }
+
+    VC1_GET_BITS9(2, tempValue); /* FRMCNT. */
+
+    if (md->RANGERED == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->RANGEREDFRM);
+    }
+
+    if (md->MAXBFRAMES == 0)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+        if (picLayerHeader->PTYPE == 0)
+            picLayerHeader->PTYPE = VC1_I_FRAME;
+        else
+            picLayerHeader->PTYPE = VC1_P_FRAME;
+    }
+    else
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+        if (picLayerHeader->PTYPE == 0)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+            if (picLayerHeader->PTYPE == 0) {
+                picLayerHeader->PTYPE = VC1_B_FRAME; /* Or VC1_BI_FRAME. */
+                /* if peek(7) = 0b1111111 then ptype = bi */
+                VC1_PEEK_BITS( 7, tempValue );
+                if ( tempValue == 0x7f )
+                    picLayerHeader->PTYPE = VC1_BI_FRAME;
+            } else
+                picLayerHeader->PTYPE = VC1_I_FRAME;
+        }
+        else
+            picLayerHeader->PTYPE = VC1_P_FRAME;
+    }
+
+    if (picLayerHeader->PTYPE == VC1_I_FRAME ||
+            picLayerHeader->PTYPE == VC1_BI_FRAME)
+    {
+        status = vc1_ParsePictureHeader_ProgressiveIpicture(ctxt, pInfo);
+    }
+    else if (picLayerHeader->PTYPE == VC1_P_FRAME)
+        status = vc1_ParsePictureHeader_ProgressivePpicture(ctxt, pInfo);
+    else if (picLayerHeader->PTYPE == VC1_B_FRAME)
+        status = vc1_ParsePictureHeader_ProgressiveBpicture(ctxt, pInfo);
+    else
+        status = VC1_STATUS_PARSE_ERROR;
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c
new file mode 100755
index 0000000..dcfdb26
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_pic_com_adv.c
@@ -0,0 +1,404 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for advanced profile.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses the picture header for advanced
+ * profile down to POSTPROC syntax element.
+ * Table 18 of SMPTE 421M for progressive I or BI picture.
+ * Table 20 of SMPTE 421M for progressive P picture.
+ * Table 22 of SMPTE 421M for progressive B picture.
+ * Table 23 of SMPTE 421M for skipped picture.
+ * Table 82 of SMPTE 421M for interlace I or BI frame.
+ * Table 83 of SMPTE 421M for interlace P frame.
+ * Table 84 of SMPTE 421M for interlace B frame.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t i = 0;
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    uint32_t number_of_pan_scan_window;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if (md->INTERLACE == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->FCM);
+        if (picLayerHeader->FCM)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->FCM);
+            if (picLayerHeader->FCM)
+            {
+                picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+                return VC1_STATUS_PARSE_ERROR;
+            }
+            else
+                picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+        }
+        else
+            picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+    }
+    else
+        picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+
+
+    VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+    if (picLayerHeader->PTYPE)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+        if (picLayerHeader->PTYPE)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+            if (picLayerHeader->PTYPE)
+            {
+                VC1_GET_BITS9(1, picLayerHeader->PTYPE);
+                if (picLayerHeader->PTYPE)
+                    picLayerHeader->PTYPE = VC1_SKIPPED_FRAME;
+                else
+                    picLayerHeader->PTYPE = VC1_BI_FRAME;
+            }
+            else
+                picLayerHeader->PTYPE = VC1_I_FRAME;
+        }
+        else
+            picLayerHeader->PTYPE = VC1_B_FRAME;
+    }
+    else
+        picLayerHeader->PTYPE = VC1_P_FRAME;
+
+    if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+    {
+        if (md->TFCNTRFLAG)
+        {
+            VC1_GET_BITS9(8, picLayerHeader->TFCNTR); /* TFCNTR. */
+        }
+    }
+
+    if (md->PULLDOWN)
+    {
+        if ((md->INTERLACE == 0) || (md->PSF == 1))
+        {
+            VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+        }
+        else
+        {
+            VC1_GET_BITS9(1, picLayerHeader->TFF);
+            VC1_GET_BITS9(1, picLayerHeader->RFF);
+        }
+    }
+
+    if (md->PANSCAN_FLAG == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT); /* PS_PRESENT. */
+        if (picLayerHeader->PS_PRESENT == 1)
+        {
+            if ((md->INTERLACE == 1) &&
+                    (md->PSF == 0))
+            {
+                if (md->PULLDOWN == 1)
+                    number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+                else
+                    number_of_pan_scan_window = 2;
+            }
+            else
+            {
+                if (md->PULLDOWN == 1)
+                    number_of_pan_scan_window = 1 + picLayerHeader->RPTFRM;
+                else
+                    number_of_pan_scan_window = 1;
+            }
+            picLayerHeader->number_of_pan_scan_window = number_of_pan_scan_window;
+
+            for (i = 0; i < number_of_pan_scan_window; i++)
+            {
+                VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+                VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+                VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+                VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+            }
+        }
+    }
+
+    if (picLayerHeader->PTYPE != VC1_SKIPPED_FRAME)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->RNDCTRL);
+        md->RNDCTRL =  picLayerHeader->RNDCTRL;
+
+        if ((md->INTERLACE == 1) ||
+                (picLayerHeader->FCM != VC1_FCM_PROGRESSIVE))
+        {
+            VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+        }
+
+        if ((md->FINTERPFLAG == 1) &&
+                (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+        {
+            VC1_GET_BITS9(1, tempValue); /* INTERPFRM. */
+        }
+
+        if ((picLayerHeader->PTYPE == VC1_B_FRAME) &&
+                (picLayerHeader->FCM == VC1_FCM_PROGRESSIVE))
+        {
+            if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+                                                &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN))
+                    != VC1_STATUS_OK)
+            {
+                return status;
+            }
+        }
+
+        VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+        if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+            return status;
+
+        if (picLayerHeader->PQINDEX <= 8)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+        }
+        else
+            picLayerHeader->HALFQP = 0;
+
+        if (md->QUANTIZER == 1)
+        {
+            VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+            picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+        }
+
+        if (md->POSTPROCFLAG == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+        }
+    }
+
+    return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses the picture header for advanced
+ * profile down to BFRACTION syntax element.
+ * Table 85 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint32_t i = 0;
+    vc1_Status status = VC1_STATUS_OK;
+    uint32_t number_of_pan_scan_window;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    VC1_GET_BITS9(1, picLayerHeader->FCM);
+    if (picLayerHeader->FCM)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->FCM);
+        if (picLayerHeader->FCM)
+            picLayerHeader->FCM = VC1_FCM_FIELD_INTERLACE;
+        else
+            picLayerHeader->FCM = VC1_FCM_FRAME_INTERLACE;
+    }
+    else
+        picLayerHeader->FCM = VC1_FCM_PROGRESSIVE;
+    if (picLayerHeader->FCM != VC1_FCM_FIELD_INTERLACE)
+        return VC1_STATUS_PARSE_ERROR;
+
+    VC1_GET_BITS9(3, picLayerHeader->FPTYPE);
+    if (picLayerHeader->FPTYPE == 0)
+    {
+        picLayerHeader->PTypeField1 = VC1_I_FRAME;
+        picLayerHeader->PTypeField2 = VC1_I_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 1)
+    {
+        picLayerHeader->PTypeField1 = VC1_I_FRAME;
+        picLayerHeader->PTypeField2 = VC1_P_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 2)
+    {
+        picLayerHeader->PTypeField1 = VC1_P_FRAME;
+        picLayerHeader->PTypeField2 = VC1_I_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 3)
+    {
+        picLayerHeader->PTypeField1 = VC1_P_FRAME;
+        picLayerHeader->PTypeField2 = VC1_P_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 4)
+    {
+        picLayerHeader->PTypeField1 = VC1_B_FRAME;
+        picLayerHeader->PTypeField2 = VC1_B_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 5)
+    {
+        picLayerHeader->PTypeField1 = VC1_B_FRAME;
+        picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 6)
+    {
+        picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+        picLayerHeader->PTypeField2 = VC1_B_FRAME;
+    }
+    else if (picLayerHeader->FPTYPE == 7)
+    {
+        picLayerHeader->PTypeField1 = VC1_BI_FRAME;
+        picLayerHeader->PTypeField2 = VC1_BI_FRAME;
+    }
+
+    if (md->TFCNTRFLAG)
+    {
+        VC1_GET_BITS9(8, picLayerHeader->TFCNTR);
+    }
+
+    if (md->PULLDOWN == 1)
+    {
+        if (md->PSF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->RPTFRM);
+        }
+        else
+        {
+            VC1_GET_BITS9(1, picLayerHeader->TFF);
+            VC1_GET_BITS9(1, picLayerHeader->RFF);
+        }
+    } else
+        picLayerHeader->TFF = 1;
+
+    if (md->PANSCAN_FLAG == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PS_PRESENT);
+        if (picLayerHeader->PS_PRESENT)
+        {
+            if (md->PULLDOWN)
+                number_of_pan_scan_window = 2 + picLayerHeader->RFF;
+            else
+                number_of_pan_scan_window = 2;
+            picLayerHeader->number_of_pan_scan_window =number_of_pan_scan_window;
+
+            for (i = 0; i < number_of_pan_scan_window; i++)
+            {
+                VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].hoffset); /* PS_HOFFSET. */
+                VC1_GET_BITS(18, picLayerHeader->PAN_SCAN_WINDOW[i].voffset); /* PS_VOFFSET. */
+                VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].width); /* PS_WIDTH. */
+                VC1_GET_BITS(14, picLayerHeader->PAN_SCAN_WINDOW[i].height); /* PS_HEIGHT. */
+            }
+        }
+    }
+    VC1_GET_BITS9(1, md->RNDCTRL);
+
+#ifdef VBP
+    picLayerHeader->RNDCTRL = md->RNDCTRL;
+#endif
+
+    VC1_GET_BITS9(1, picLayerHeader->UVSAMP);
+
+    if ((md->REFDIST_FLAG == 1) && (picLayerHeader->FPTYPE <= 3))
+    {
+        int32_t tmp;
+        if ((status = vc1_DecodeHuffmanOne(ctxt, &tmp,
+                                           VC1_REFDIST_TBL)) != VC1_STATUS_OK)
+        {
+            return status;
+        }
+        md->REFDIST = tmp;
+    } else if (md->REFDIST_FLAG == 0) {
+        md->REFDIST = 0;
+    }
+
+    if ((picLayerHeader->FPTYPE >= 4) && (picLayerHeader->FPTYPE <= 7))
+    {
+        if ((status = vc1_DecodeHuffmanPair(ctxt, VC1_BFRACTION_TBL,
+                                            &picLayerHeader->BFRACTION_NUM, &picLayerHeader->BFRACTION_DEN)) !=
+                VC1_STATUS_OK)
+        {
+            return status;
+        }
+    }
+
+    if (picLayerHeader->CurrField == 0)
+    {
+        picLayerHeader->PTYPE = picLayerHeader->PTypeField1;
+        picLayerHeader->BottomField = (uint8_t) (1 - picLayerHeader->TFF);
+    }
+    else
+    {
+        picLayerHeader->BottomField = (uint8_t) (picLayerHeader->TFF);
+        picLayerHeader->PTYPE = picLayerHeader->PTypeField2;
+    }
+
+    return vc1_ParsePictureFieldHeader_Adv(ctxt, pInfo);
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function calls the appropriate function to further
+ * parse the picture header for advanced profile down to macroblock layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureFieldHeader_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_PARSE_ERROR;
+
+    if (pInfo->picLayerHeader.FCM == VC1_FCM_PROGRESSIVE)
+    {
+        if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+                (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+        {
+            status = vc1_ParsePictureHeader_ProgressiveIpicture_Adv(ctxt, pInfo);
+        }
+        else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+            status = vc1_ParsePictureHeader_ProgressivePpicture_Adv(ctxt, pInfo);
+        else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+            status = vc1_ParsePictureHeader_ProgressiveBpicture_Adv(ctxt, pInfo);
+        else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+            status = VC1_STATUS_OK;
+    }
+    else if (pInfo->picLayerHeader.FCM == VC1_FCM_FRAME_INTERLACE)
+    {
+        if ((pInfo->picLayerHeader.PTYPE == VC1_I_FRAME) ||
+                (pInfo->picLayerHeader.PTYPE == VC1_BI_FRAME))
+        {
+            status = vc1_ParsePictureHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+        }
+        else if (pInfo->picLayerHeader.PTYPE == VC1_P_FRAME)
+            status = vc1_ParsePictureHeader_InterlacePpicture_Adv(ctxt, pInfo);
+        else if (pInfo->picLayerHeader.PTYPE == VC1_B_FRAME)
+            status = vc1_ParsePictureHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+        else if (pInfo->picLayerHeader.PTYPE == VC1_SKIPPED_FRAME)
+            status = VC1_STATUS_OK;
+    }
+    else if (pInfo->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+    {
+        int ptype;
+        if ( pInfo->picLayerHeader.CurrField == 0)
+            ptype = pInfo->picLayerHeader.PTypeField1;
+        else
+            ptype = pInfo->picLayerHeader.PTypeField2;
+
+        if ((ptype == VC1_I_FRAME) ||
+                (ptype == VC1_BI_FRAME))
+        {
+            status = vc1_ParseFieldHeader_InterlaceIpicture_Adv(ctxt, pInfo);
+        }
+        else if (ptype == VC1_P_FRAME)
+            status = vc1_ParseFieldHeader_InterlacePpicture_Adv(ctxt, pInfo);
+        else if (ptype == VC1_B_FRAME)
+            status = vc1_ParseFieldHeader_InterlaceBpicture_Adv(ctxt, pInfo);
+        else if (ptype == VC1_SKIPPED_FRAME)
+            status = VC1_STATUS_OK;
+    }
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c
new file mode 100755
index 0000000..b921af0
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic.c
@@ -0,0 +1,148 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive P picture in simple
+//  or main profile bitstream.
+//
+*/
+
+#include "vc1parse.h"
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive P picture for simple
+ * or main profile bitstream.  This parser starts after PTYPE was parsed but
+ * stops before parsing of macroblock layer.
+ * Table 19 of SMPTE 421M after processing up to PTYPE for P picture.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture(void* ctxt, vc1_Info *pInfo)
+{
+    uint8_t bit_count;
+    const uint8_t *table;
+    uint32_t tempValue;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    /* rounding control is implied for simple and main profile, SMPTE 421M 8.3.7.
+       It toggles back and forth between 0 and 1 for P frames */
+    if (md->PROFILE != VC1_PROFILE_ADVANCED)
+    {
+        picLayerHeader->RNDCTRL = md->RNDCTRL ^ 1 ;
+        md->RNDCTRL = picLayerHeader->RNDCTRL;
+    }
+
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else picLayerHeader->HALFQP=0;
+
+    if (md->QUANTIZER == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+        picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+    }
+
+    /* MVRANGE. */
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->MULTIRES == 1)
+        VC1_GET_BITS9(2, tempValue); /* RESPIC. */
+
+    if (picLayerHeader->PQUANT > 12)
+        table = VC1_MVMODE_LOW_TBL;
+    else
+        table = VC1_MVMODE_HIGH_TBL;
+
+    bit_count = 0;
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+        bit_count++;
+    }
+    if (bit_count == 3)
+        bit_count += picLayerHeader->MVMODE;
+    picLayerHeader->MVMODE = table[bit_count];
+
+    if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+    {
+        bit_count = 0;
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+        while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+        {
+            VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+            bit_count++;
+        }
+        if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+            bit_count++;
+        picLayerHeader->MVMODE2 = table[bit_count];
+        VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+        VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+    }
+    else
+#ifdef VBP
+        picLayerHeader->MVMODE2 = 0;
+#else
+        picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+    if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+            ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+             (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+    {
+        if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                         md->widthMB, md->heightMB, BPP_MVTYPEMB))
+                != VC1_STATUS_OK)
+        {
+            return status;
+        }
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+    VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c
new file mode 100755
index 0000000..99edf6f
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_ppic_adv.c
@@ -0,0 +1,367 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 picture layer for progressive P picture in advanced
+//  profile bitstream.
+//
+*/
+#include <vbp_common.h>
+#include "vc1parse.h"
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses progressive P picture for advanced
+ * profile bitstream.
+ * Table 20 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_ProgressivePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint8_t bit_count;
+    const uint8_t *table;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    /* MVRANGE. */
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQUANT > 12)
+        table = VC1_MVMODE_LOW_TBL;
+    else
+        table = VC1_MVMODE_HIGH_TBL;
+
+    bit_count = 0;
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    while ((picLayerHeader->MVMODE == 0) && (bit_count < 3))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+        bit_count++;
+    }
+    if (bit_count == 3)
+        bit_count += picLayerHeader->MVMODE;
+    picLayerHeader->MVMODE = table[bit_count];
+
+    if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+    {
+        bit_count = 0;
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+        while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+        {
+            VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+            bit_count++;
+        }
+        if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+            bit_count++;
+        picLayerHeader->MVMODE2 = table[bit_count];
+        VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+        VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+        md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+        md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+    }
+    else
+#ifdef VBP
+        picLayerHeader->MVMODE2 = 0;
+#else
+        picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+    if ((picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV) ||
+            ((picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP) &&
+             (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)))
+    {
+        if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                         md->widthMB, md->heightMB, BPP_MVTYPEMB)) !=
+                VC1_STATUS_OK)
+        {
+            return status;
+        }
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB);
+    VC1_GET_BITS9(2, picLayerHeader->CBPTAB);
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace P frame for advanced
+ * profile bitstream.
+ * Table 83 of SMPTE 421M after processing up to POSTPROC by
+ * vc1_ParsePictureHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParsePictureHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    /* MVRANGE. */
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    /* DMVRANGE. */
+    if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    VC1_GET_BITS9(1, picLayerHeader->MV4SWITCH);
+
+    VC1_GET_BITS9(1, picLayerHeader->INTCOMP);
+    if (picLayerHeader->INTCOMP)
+    {
+        VC1_GET_BITS9(6, picLayerHeader->LUMSCALE);
+        VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT);
+        md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+        md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+    }
+
+    if ((status = vc1_DecodeBitplane(ctxt, pInfo,
+                                     md->widthMB, md->heightMB, BPP_SKIPMB)) != VC1_STATUS_OK)
+    {
+        return status;
+    }
+
+    VC1_GET_BITS9(2, picLayerHeader->MBMODETAB);
+    VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+    VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+    VC1_GET_BITS9(2, picLayerHeader->MV2BPTAB); /* 2MVBPTAB. */
+
+    if (picLayerHeader->MV4SWITCH == 1)
+    {
+        VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+    }
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Parse picture layer.  This function parses interlace P field for advanced
+ * profile bitstream.
+ * Table 88 of SMPTE 421M after processing up to BFRACTION by
+ * vc1_ParseFieldHeader_Adv() but stopping before processing of macroblock
+ * layer.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_ParseFieldHeader_InterlacePpicture_Adv(void* ctxt, vc1_Info *pInfo)
+{
+    uint8_t bit_count;
+    const uint8_t *table;
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+
+    VC1_GET_BITS9(5, picLayerHeader->PQINDEX);
+    if ((status = vc1_CalculatePQuant(pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQINDEX <= 8)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->HALFQP);
+    }
+    else
+        picLayerHeader->HALFQP = 0;
+
+
+    if (md->QUANTIZER == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->PQUANTIZER);
+        picLayerHeader->UniformQuant = picLayerHeader->PQUANTIZER;
+    }
+
+    if (md->POSTPROCFLAG == 1)
+    {
+        VC1_GET_BITS9(2, picLayerHeader->POSTPROC);
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->NUMREF);
+
+    if (picLayerHeader->NUMREF == 0)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->REFFIELD);
+    }
+
+    if ((status = vc1_MVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK) {
+        DEB("Error in vc1_MVRangeDecode \n");
+        return status;
+    }
+
+    if ((status = vc1_DMVRangeDecode(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (picLayerHeader->PQUANT > 12)
+        table = VC1_MVMODE_LOW_TBL;
+    else
+        table = VC1_MVMODE_HIGH_TBL;
+
+    bit_count = 0;
+    VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+    while ((picLayerHeader->MVMODE == 0) && (bit_count < 2))
+    {
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+        bit_count++;
+    }
+    if (bit_count == 2 && picLayerHeader->MVMODE == 0) {
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE);
+
+        if ( picLayerHeader->MVMODE == 1)
+            bit_count ++;
+
+        bit_count++;
+    }
+    picLayerHeader->MVMODE = table[bit_count];
+
+    if (picLayerHeader->MVMODE == VC1_MVMODE_INTENSCOMP)
+    {
+        bit_count = 0;
+        VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+        while ((picLayerHeader->MVMODE2 == 0) && (bit_count < 2))
+        {
+            VC1_GET_BITS9(1, picLayerHeader->MVMODE2);
+            bit_count++;
+        }
+        if (bit_count == 2 && picLayerHeader->MVMODE2 == 0)
+            bit_count++;
+        picLayerHeader->MVMODE2 = table[bit_count];
+
+        VC1_GET_BITS9(1, md->INTCOMPFIELD);
+        if (md->INTCOMPFIELD == 1)
+            md->INTCOMPFIELD = VC1_INTCOMP_BOTH_FIELD;
+        else
+        {
+            VC1_GET_BITS9(1, md->INTCOMPFIELD);
+            if (md->INTCOMPFIELD == 1)
+                md->INTCOMPFIELD = VC1_INTCOMP_BOTTOM_FIELD;
+            else
+                md->INTCOMPFIELD = VC1_INTCOMP_TOP_FIELD;
+        }
+        VC1_GET_BITS9(6, picLayerHeader->LUMSCALE); /* LUMSCALE1. */
+        VC1_GET_BITS9(6, picLayerHeader->LUMSHIFT); /* LUMSHIFT1. */
+        if ( md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD ) {
+            md->LUMSCALE2 = picLayerHeader->LUMSCALE;
+            md->LUMSHIFT2 = picLayerHeader->LUMSHIFT;
+        }
+        if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+        {
+            VC1_GET_BITS9(6, md->LUMSCALE2);
+            VC1_GET_BITS9(6, md->LUMSHIFT2);
+        }
+    }
+    else
+#ifdef VBP
+        picLayerHeader->MVMODE2 = 0;
+#else
+        picLayerHeader->MVMODE2 = picLayerHeader->MVMODE;
+#endif
+
+    VC1_GET_BITS9(3, picLayerHeader->MBMODETAB);
+
+    if (picLayerHeader->NUMREF)
+    {
+        VC1_GET_BITS9(3, picLayerHeader->MVTAB); /* IMVTAB. */
+    }
+    else
+    {
+        VC1_GET_BITS9(2, picLayerHeader->MVTAB); /* IMVTAB. */
+    }
+
+    VC1_GET_BITS9(3, picLayerHeader->CBPTAB); /* ICBPTAB. */
+
+#ifdef VBP
+    if (picLayerHeader->MVMODE == VC1_MVMODE_MIXED_MV)
+#else
+    if (picLayerHeader->MVMODE2 == VC1_MVMODE_MIXED_MV)
+#endif
+    {
+        VC1_GET_BITS9(2, picLayerHeader->MV4BPTAB); /* 4MVBPTAB. */
+    }
+
+    if ((status = vc1_VOPDQuant(ctxt, pInfo)) != VC1_STATUS_OK)
+        return status;
+
+    if (md->VSTRANSFORM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TTMBF);
+        if (picLayerHeader->TTMBF == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->TTFRM);
+        }
+    }
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+    if (picLayerHeader->TRANSACFRM == 1)
+    {
+        VC1_GET_BITS9(1, picLayerHeader->TRANSACFRM);
+        picLayerHeader->TRANSACFRM += 2;
+    }
+    picLayerHeader->TRANSACFRM2 = 0;
+
+    VC1_GET_BITS9(1, picLayerHeader->TRANSDCTAB);
+
+    /* Skip parsing of macroblock layer. */
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c
new file mode 100755
index 0000000..95b556c
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/vc1parse_vopdq.c
@@ -0,0 +1,130 @@
+/* ///////////////////////////////////////////////////////////////////////
+//
+//               INTEL CORPORATION PROPRIETARY INFORMATION
+//  This software is supplied under the terms of a license agreement or
+//  nondisclosure agreement with Intel Corporation and may not be copied
+//  or disclosed except in accordance with the terms of that agreement.
+//        Copyright (c) 2008 Intel Corporation. All Rights Reserved.
+//
+//  Description: Parses VC-1 syntax elements VOPDQUANT and DQUANT.
+//
+*/
+
+#include "vc1parse.h"
+
+#define VC1_UNDEF_PQUANT 0
+
+static const uint8_t MapPQIndToQuant_Impl[] =
+{
+    VC1_UNDEF_PQUANT,
+    1,  2,  3,  4,  5, 6,   7,  8,
+    6,  7,  8,  9, 10, 11, 12, 13,
+    14, 15, 16, 17, 18, 19, 20, 21,
+    22, 23, 24, 25, 27, 29, 31
+};
+
+/*------------------------------------------------------------------------------
+ * Parse syntax element VOPDQuant as defined in Table 24 of SMPTE 421M.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_VOPDQuant(void* ctxt, vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    if (md->DQUANT == 0)
+        return status;
+
+    if (md->DQUANT == 2)
+    {
+        VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+        if (picLayerHeader->PQDIFF == 7)
+        {
+            VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+        }
+    }
+    else
+    {
+        VC1_GET_BITS9(1, picLayerHeader->DQUANTFRM);
+        if (picLayerHeader->DQUANTFRM == 1)
+        {
+            VC1_GET_BITS9(2, picLayerHeader->DQPROFILE);
+            if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_SNGLEDGES)
+            {
+                VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE);
+            }
+            else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_DBLEDGES)
+            {
+#ifdef VBP
+                VC1_GET_BITS9(2, picLayerHeader->DQDBEDGE);
+#else
+                VC1_GET_BITS9(2, picLayerHeader->DQSBEDGE); /* DQDBEDGE. */
+#endif
+            }
+            else if (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS)
+            {
+                VC1_GET_BITS9(1, picLayerHeader->DQBILEVEL);
+            }
+            if (! (picLayerHeader->DQPROFILE == VC1_DQPROFILE_ALLMBLKS &&
+                    picLayerHeader->DQBILEVEL == 0))
+            {
+                VC1_GET_BITS9(3, picLayerHeader->PQDIFF);
+                if (picLayerHeader->PQDIFF == 7)
+                {
+                    VC1_GET_BITS9(5, picLayerHeader->ABSPQ);
+                }
+            }
+        }
+    }
+#ifdef VBP
+    if ((picLayerHeader->DQUANTFRM == 1 && md->DQUANT == 1) || (md->DQUANT == 2))
+    {
+        if (picLayerHeader->PQDIFF == 7)
+        {
+            picLayerHeader->ALTPQUANT = picLayerHeader->ABSPQ;
+        }
+        else
+        {
+            picLayerHeader->ALTPQUANT = picLayerHeader->PQUANT + picLayerHeader->PQDIFF + 1;
+        }
+    }
+#endif
+    return status;
+}
+
+/*------------------------------------------------------------------------------
+ * Compute value for PQUANT syntax element that does not exist in bitstreams for
+ * progressive I and BI pictures.
+ *------------------------------------------------------------------------------
+ */
+
+vc1_Status vc1_CalculatePQuant(vc1_Info *pInfo)
+{
+    vc1_Status status = VC1_STATUS_OK;
+    vc1_metadata_t *md = &pInfo->metadata;
+    vc1_PictureLayerHeader *picLayerHeader = &pInfo->picLayerHeader;
+
+    picLayerHeader->PQUANT = picLayerHeader->PQINDEX;
+    picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+
+    if (md->QUANTIZER == 0)
+    {
+        if (picLayerHeader->PQINDEX < 9)
+            picLayerHeader->UniformQuant = VC1_QUANTIZER_UNIFORM;
+        else
+        {
+            picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+            picLayerHeader->PQUANT =
+                MapPQIndToQuant_Impl[picLayerHeader->PQINDEX];
+        }
+    }
+    else
+    {
+        if (md->QUANTIZER == 2)
+            picLayerHeader->UniformQuant = VC1_QUANTIZER_NONUNIFORM;
+    }
+
+    return status;
+}
diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c
new file mode 100755
index 0000000..a9644d9
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_parse.c
@@ -0,0 +1,401 @@
+#include "viddec_parser_ops.h"  // For parser helper functions
+#include "vc1.h"                // For the parser structure
+#include "vc1parse.h"           // For vc1 parser helper functions
+#ifdef VBP
+#include "viddec_pm.h"
+#endif
+#define vc1_is_frame_start_code( ch )                                   \
+    (( vc1_SCField == ch ||vc1_SCSlice == ch || vc1_SCFrameHeader == ch ) ? 1 : 0)
+
+/* init function */
+#ifdef VBP
+void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#else
+static void viddec_vc1_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+#endif
+{
+    vc1_viddec_parser_t *parser = ctxt;
+    int i;
+
+    persist_mem = persist_mem;
+
+    for (i=0; i<VC1_NUM_REFERENCE_FRAMES; i++)
+    {
+        parser->ref_frame[i].id   = -1; /* first I frame checks that value */
+        parser->ref_frame[i].anchor[0] = 1;
+        parser->ref_frame[i].anchor[1] = 1;
+        parser->ref_frame[i].intcomp_top = 0;
+        parser->ref_frame[i].intcomp_bot = 0;
+        parser->ref_frame[i].tff=0;
+    }
+
+    parser->intcomp_top[0] = 0;
+    parser->intcomp_bot[0] = 0;
+    parser->intcomp_top[1] = 0;
+    parser->intcomp_bot[1] = 0;
+    parser->is_reference_picture = false;
+
+    memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+
+    if (preserve)
+    {
+        parser->sc_seen &= VC1_EP_MASK;
+        parser->sc_seen_since_last_wkld &= VC1_EP_MASK;
+    }
+    else
+    {
+        parser->sc_seen = VC1_SC_INVALID;
+        parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+        memset(&parser->info.metadata, 0, sizeof(parser->info.metadata));
+    }
+
+    return;
+} // viddec_vc1_init
+
+static void vc1_swap_intcomp(vc1_viddec_parser_t *parser)
+{
+    parser->intcomp_top[1] = parser->intcomp_top[0];
+    parser->intcomp_bot[1] = parser->intcomp_bot[0];
+    parser->intcomp_top[0] = 0;
+    parser->intcomp_bot[0] = 0;
+
+    return;
+} // vc1_swap_intcomp
+
+#ifdef VBP
+uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#else
+static uint32_t viddec_vc1_parse(void *parent, void *ctxt)
+#endif
+{
+    vc1_viddec_parser_t *parser = ctxt;
+    uint32_t sc=0x0;
+    int32_t ret=0, status=0;
+
+#ifdef VBP
+    /* This works only if there is one slice and no start codes */
+    /* A better fix would be to insert start codes it there aren't any. */
+    ret = viddec_pm_peek_bits(parent, &sc, 32);
+    if ((sc > 0x0100) && (sc < 0x0200)) /* a Start code will be in this range. */
+    {
+        ret = viddec_pm_get_bits(parent, &sc, 32);
+    }
+    else
+    {
+        /* In cases where we get a buffer with no start codes, we assume */
+        /* that this is a frame of data. We may have to fix this later. */
+        sc = vc1_SCFrameHeader;
+    }
+#else
+    ret = viddec_pm_get_bits(parent, &sc, 32);
+#endif
+    sc = sc & 0xFF;
+    parser->is_frame_start = 0;
+    parser->is_second_start = 0;
+    DEB("START_CODE = %02x\n", sc);
+    switch ( sc )
+    {
+    case vc1_SCSequenceHeader:
+    {
+        uint32_t data;
+        parser->ref_frame[0].anchor[0] = 1;
+        parser->ref_frame[0].anchor[1] = 1;
+        parser->ref_frame[1].anchor[0] = 1;
+        parser->ref_frame[1].anchor[1] = 1;
+        memset( &parser->info.metadata, 0, sizeof(parser->info.metadata));
+        /* look if we have a rcv header for main or simple profile */
+        ret = viddec_pm_peek_bits(parent,&data ,2);
+
+        if (data == 3)
+        {
+            status = vc1_ParseSequenceLayer(parent, &parser->info);
+        }
+        else
+        {
+            status = vc1_ParseRCVSequenceLayer(parent, &parser->info);
+        }
+        parser->sc_seen = VC1_SC_SEQ;
+        parser->sc_seen_since_last_wkld |= VC1_SC_SEQ;
+#ifdef VBP
+        parser->start_code = VC1_SC_SEQ;
+        if (parser->info.metadata.HRD_NUM_LEAKY_BUCKETS == 0)
+        {
+            if (parser->info.metadata.PROFILE == VC1_PROFILE_SIMPLE)
+            {
+                switch(parser->info.metadata.LEVEL)
+                {
+                case 0:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 96000;
+                    break;
+                case 1:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 384000;
+                    break;
+                }
+            }
+            else if (parser->info.metadata.PROFILE == VC1_PROFILE_MAIN)
+            {
+                switch(parser->info.metadata.LEVEL)
+                {
+                case 0:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000;
+                    break;
+                case 1:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000;
+                    break;
+                case 2:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000;
+                    break;
+                }
+            }
+            else if (parser->info.metadata.PROFILE == VC1_PROFILE_ADVANCED)
+            {
+                switch(parser->info.metadata.LEVEL)
+                {
+                case 0:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 2000000;
+                    break;
+                case 1:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 10000000;
+                    break;
+                case 2:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 20000000;
+                    break;
+                case 3:
+                    parser->info.metadata.hrd_initial_state.sLeakyBucket[0].HRD_RATE = 45000000;
+                    break;
+                }
+            }
+        }
+
+#endif
+        break;
+    }
+
+    case vc1_SCEntryPointHeader:
+    {
+        status = vc1_ParseEntryPointLayer(parent, &parser->info);
+        parser->sc_seen |= VC1_SC_EP;
+        // Clear all bits indicating data below ep header
+        parser->sc_seen &= VC1_EP_MASK;
+        parser->sc_seen_since_last_wkld |= VC1_SC_EP;
+#ifdef VBP
+        parser->start_code = VC1_SC_EP;
+#endif
+        break;
+    }
+
+    case vc1_SCFrameHeader:
+    {
+        memset(&parser->info.picLayerHeader, 0, sizeof(vc1_PictureLayerHeader));
+        status = vc1_ParsePictureLayer(parent, &parser->info);
+        if ((parser->info.picLayerHeader.PTypeField1 == VC1_I_FRAME) ||
+                (parser->info.picLayerHeader.PTypeField1 == VC1_P_FRAME) ||
+                (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME) ||
+                (parser->info.picLayerHeader.PTYPE == VC1_P_FRAME))
+        {
+            vc1_swap_intcomp(parser);
+        }
+        parser->sc_seen |= VC1_SC_FRM;
+        // Clear all bits indicating data below frm header
+        parser->sc_seen &= VC1_FRM_MASK;
+        parser->sc_seen_since_last_wkld |= VC1_SC_FRM;
+        //vc1_start_new_frame ( parent, parser );
+
+        parser->is_frame_start = 1;
+        vc1_parse_emit_frame_start( parent, parser );
+#ifdef VBP
+        parser->start_code = VC1_SC_FRM;
+#endif
+        break;
+    }
+
+    case vc1_SCSlice:
+    {
+        status = vc1_ParseSliceLayer(parent, &parser->info);
+        parser->sc_seen_since_last_wkld |= VC1_SC_SLC;
+
+        vc1_parse_emit_current_slice( parent, parser );
+
+#ifdef VBP
+        parser->start_code = VC1_SC_SLC;
+#endif
+        break;
+    }
+
+    case vc1_SCField:
+    {
+        parser->info.picLayerHeader.SLICE_ADDR = 0;
+        parser->info.picLayerHeader.CurrField = 1;
+        parser->info.picLayerHeader.REFFIELD = 0;
+        parser->info.picLayerHeader.NUMREF = 0;
+        parser->info.picLayerHeader.MBMODETAB = 0;
+        parser->info.picLayerHeader.MV4SWITCH = 0;
+        parser->info.picLayerHeader.DMVRANGE = 0;
+        parser->info.picLayerHeader.MVTAB = 0;
+        parser->info.picLayerHeader.MVMODE = 0;
+        parser->info.picLayerHeader.MVRANGE = 0;
+#ifdef VBP
+        parser->info.picLayerHeader.raw_MVTYPEMB = 0;
+        parser->info.picLayerHeader.raw_DIRECTMB = 0;
+        parser->info.picLayerHeader.raw_SKIPMB = 0;
+        parser->info.picLayerHeader.raw_ACPRED = 0;
+        parser->info.picLayerHeader.raw_FIELDTX = 0;
+        parser->info.picLayerHeader.raw_OVERFLAGS = 0;
+        parser->info.picLayerHeader.raw_FORWARDMB = 0;
+
+        memset(&(parser->info.picLayerHeader.MVTYPEMB), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.DIRECTMB), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.SKIPMB), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.ACPRED), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.FIELDTX), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.OVERFLAGS), 0, sizeof(vc1_Bitplane));
+        memset(&(parser->info.picLayerHeader.FORWARDMB), 0, sizeof(vc1_Bitplane));
+
+        parser->info.picLayerHeader.ALTPQUANT = 0;
+        parser->info.picLayerHeader.DQDBEDGE = 0;
+#endif
+
+        status = vc1_ParseFieldLayer(parent, &parser->info);
+        if ((parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME) ||
+                (parser->info.picLayerHeader.PTypeField2 == VC1_P_FRAME))
+        {
+            //vc1_swap_intcomp(parser);
+        }
+        parser->sc_seen |= VC1_SC_FLD;
+        parser->sc_seen_since_last_wkld |= VC1_SC_FLD;
+
+        parser->is_second_start = 1;
+        vc1_parse_emit_second_field_start( parent, parser );
+#ifdef VBP
+        parser->start_code = VC1_SC_FLD;
+#endif
+        break;
+    }
+
+    case vc1_SCSequenceUser:
+    case vc1_SCEntryPointUser:
+    case vc1_SCFrameUser:
+    case vc1_SCSliceUser:
+    case vc1_SCFieldUser:
+    {/* Handle user data */
+        status = vc1_ParseAndAppendUserData(parent, sc); //parse and add items
+        parser->sc_seen_since_last_wkld |= VC1_SC_UD;
+#ifdef VBP
+        parser->start_code = VC1_SC_UD;
+#endif
+        break;
+    }
+
+    case vc1_SCEndOfSequence:
+    {
+        parser->sc_seen = VC1_SC_INVALID;
+        parser->sc_seen_since_last_wkld |= VC1_SC_INVALID;
+#ifdef VBP
+        parser->start_code = VC1_SC_INVALID;
+#endif
+        break;
+    }
+    default: /* Any other SC that is not handled */
+    {
+        DEB("SC = %02x - unhandled\n", sc );
+#ifdef VBP
+        parser->start_code = VC1_SC_INVALID;
+#endif
+        break;
+    }
+    }
+
+
+
+    return VIDDEC_PARSE_SUCESS;
+} // viddec_vc1_parse
+
+/**
+   If a picture header was seen and the next start code is a sequence header, entrypoint header,
+   end of sequence or another frame header, this api returns frame done.
+   If a sequence header and a frame header was not seen before this point, all the
+   information needed for decode is not present and parser errors are reported.
+*/
+#ifdef VBP
+uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#else
+static uint32_t viddec_vc1_wkld_done(void *parent, void *ctxt, unsigned int next_sc, uint32_t *codec_specific_errors)
+#endif
+{
+    vc1_viddec_parser_t *parser = ctxt;
+    int ret = VIDDEC_PARSE_SUCESS;
+    parent = parent;
+    switch (next_sc)
+    {
+    case vc1_SCFrameHeader:
+        if (((parser->sc_seen_since_last_wkld & VC1_SC_EP) ||
+                (parser->sc_seen_since_last_wkld & VC1_SC_SEQ)) &&
+                (!(parser->sc_seen_since_last_wkld & VC1_SC_FRM)))
+        {
+            break;
+        }
+        // Deliberate fall-thru case
+    case vc1_SCEntryPointHeader:
+        if ((next_sc == vc1_SCEntryPointHeader) &&
+                (parser->sc_seen_since_last_wkld & VC1_SC_SEQ) &&
+                (!(parser->sc_seen_since_last_wkld & VC1_SC_EP)))
+        {
+            break;
+        }
+        // Deliberate fall-thru case
+    case vc1_SCSequenceHeader:
+    case vc1_SCEndOfSequence:
+    case VIDDEC_PARSE_EOS:
+    case VIDDEC_PARSE_DISCONTINUITY:
+        ret = VIDDEC_PARSE_FRMDONE;
+        // Set errors for progressive
+        if ((parser->sc_seen & VC1_SC_SEQ) && (parser->sc_seen & VC1_SC_FRM))
+            *codec_specific_errors = 0;
+        else
+            *codec_specific_errors |= VIDDEC_FW_WORKLOAD_ERR_NOTDECODABLE;
+        vc1_end_frame(parser);
+        parser->sc_seen_since_last_wkld = VC1_SC_INVALID;
+        // TODO: Need to check for interlaced
+        break;
+    default:
+        ret = VIDDEC_PARSE_SUCESS;
+        break;
+    } //switch
+    DEB("sc: 0x%x, sc_seen: 0x%x, sc_since_last_wkld:%d, error:%d, ret: %d\n",
+        next_sc, parser->sc_seen, parser->sc_seen_since_last_wkld,
+        *codec_specific_errors, ret);
+
+    return ret;
+} // viddec_vc1_wkld_done
+
+#ifdef VBP
+void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#else
+static void viddec_vc1_get_context_size(viddec_parser_memory_sizes_t *size)
+#endif
+{
+    size->context_size = sizeof(vc1_viddec_parser_t);
+    size->persist_size = 0;
+    return;
+} // viddec_vc1_get_context_size
+
+#ifdef VBP
+uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#else
+static uint32_t viddec_vc1_is_start_frame(void *ctxt)
+#endif
+{
+    vc1_viddec_parser_t *parser = (vc1_viddec_parser_t *) ctxt;
+    return parser->is_frame_start;
+} // viddec_vc1_is_start_frame
+
+void viddec_vc1_get_ops(viddec_parser_ops_t *ops)
+{
+    ops->init = viddec_vc1_init;
+    ops->parse_syntax = viddec_vc1_parse;
+    ops->get_cxt_size = viddec_vc1_get_context_size;
+    ops->is_wkld_done = viddec_vc1_wkld_done;
+    ops->is_frame_start = viddec_vc1_is_start_frame;
+    return;
+} // viddec_vc1_get_ops
+
diff --git a/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c b/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c
new file mode 100755
index 0000000..cf6fa7f
--- /dev/null
+++ b/mixvbp/vbp_plugin/vc1/viddec_vc1_workload.c
@@ -0,0 +1,960 @@
+/* Any workload management goes in this file */
+
+#include "viddec_fw_debug.h"
+#include "vc1.h"
+#include "vc1parse.h"
+#include "viddec_fw_workload.h"
+#include <auto_eas/gen4_mfd.h>
+#include "viddec_pm_utils_bstream.h"
+
+/* this function returns workload frame types corresponding to VC1 PTYPES (frame types)
+ * VC1 frame types: can be found in vc1parse_common_defs.h
+ * workload frame types are in viddec_workload.h
+*/
+static inline uint32_t vc1_populate_frame_type(uint32_t vc1_frame_type)
+{
+    uint32_t viddec_frame_type;
+
+    switch (vc1_frame_type)
+    {
+    case VC1_I_FRAME:
+        viddec_frame_type = VIDDEC_FRAME_TYPE_I;
+        break;
+    case VC1_P_FRAME:
+        viddec_frame_type = VIDDEC_FRAME_TYPE_P;
+        break;
+    case VC1_B_FRAME:
+        viddec_frame_type = VIDDEC_FRAME_TYPE_B;
+        break;
+    case VC1_BI_FRAME:
+        viddec_frame_type = VIDDEC_FRAME_TYPE_BI;
+        break;
+    case VC1_SKIPPED_FRAME :
+        viddec_frame_type =  VIDDEC_FRAME_TYPE_SKIP;
+        break;
+    default:
+        viddec_frame_type = VIDDEC_FRAME_TYPE_INVALID;
+        break;
+    } // switch on vc1 frame type
+
+    return(viddec_frame_type);
+} // vc1_populate_frame_type
+
+static void translate_parser_info_to_frame_attributes(void *parent, vc1_viddec_parser_t *parser)
+{
+    viddec_workload_t        *wl = viddec_pm_get_header( parent );
+    viddec_frame_attributes_t *attrs = &wl->attrs;
+    vc1_Info        *info = &parser->info;
+    unsigned i;
+
+    /* typical sequence layer and entry_point data */
+    attrs->cont_size.height       = info->metadata.height * 2 + 2;
+    attrs->cont_size.width        = info->metadata.width  * 2 + 2;
+
+    /* frame type */
+    /* we can have two fileds with different types for field interlace coding mode */
+    if (info->picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE) {
+        attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField1);
+        attrs->bottom_field_type = vc1_populate_frame_type(info->picLayerHeader.PTypeField2);
+    } else {
+        attrs->frame_type = vc1_populate_frame_type(info->picLayerHeader.PTYPE);
+        attrs->bottom_field_type = VIDDEC_FRAME_TYPE_INVALID; //unknown
+    }
+
+    /* frame counter */
+    attrs->vc1.tfcntr = info->picLayerHeader.TFCNTR;
+
+    /* TFF, repeat frame, field */
+    attrs->vc1.tff = info->picLayerHeader.TFF;
+    attrs->vc1.rptfrm = info->picLayerHeader.RPTFRM;
+    attrs->vc1.rff = info->picLayerHeader.RFF;
+
+    /* PAN Scan */
+    attrs->vc1.ps_present = info->picLayerHeader.PS_PRESENT;
+    attrs->vc1.num_of_pan_scan_windows = info->picLayerHeader.number_of_pan_scan_window;
+    for (i=0; i<attrs->vc1.num_of_pan_scan_windows; i++) {
+        attrs->vc1.pan_scan_window[i].hoffset =  info->picLayerHeader.PAN_SCAN_WINDOW[i].hoffset;
+        attrs->vc1.pan_scan_window[i].voffset =  info->picLayerHeader.PAN_SCAN_WINDOW[i].voffset;
+        attrs->vc1.pan_scan_window[i].width =  info->picLayerHeader.PAN_SCAN_WINDOW[i].width;
+        attrs->vc1.pan_scan_window[i].height =  info->picLayerHeader.PAN_SCAN_WINDOW[i].height;
+    } //end for i
+
+    return;
+} // translate_parser_info_to_frame_attributes
+
+/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */
+static inline void vc1_send_past_ref_items(void *parent)
+{
+    viddec_workload_item_t wi;
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_PAST_FRAME;
+    wi.ref_frame.reference_id = 0;
+    wi.ref_frame.luma_phys_addr = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+}
+
+/* send future frame item */
+static inline void vc1_send_future_ref_items(void *parent)
+{
+    viddec_workload_item_t wi;
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_FUTURE_FRAME;
+    wi.ref_frame.reference_id = 0;
+    wi.ref_frame.luma_phys_addr = 0;
+    wi.ref_frame.chroma_phys_addr = 0;
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+}
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_reorder_ref_items(void *parent)
+{
+    viddec_workload_item_t wi;
+    wi.vwi_type = VIDDEC_WORKLOAD_REFERENCE_FRAME_REORDER;
+    wi.ref_reorder.ref_table_offset = 0;
+    wi.ref_reorder.ref_reorder_00010203 = 0x01010203; //put reference frame index 1 as reference index 0
+    wi.ref_reorder.ref_reorder_04050607 = 0x04050607; // index 4,5,6,7 stay the same
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+
+/* sends VIDDEC_WORKLOAD_VC1_PAST_FRAME item */
+static inline void vc1_send_ref_fcm_items(void *parent, uint32_t past_fcm, uint32_t future_fcm)
+{
+    viddec_workload_item_t wi;
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_REF_FRAME_TYPE;
+    wi.vwi_payload[0]= 0;
+    wi.vwi_payload[1]= past_fcm;
+    wi.vwi_payload[2]= future_fcm;
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+}
+
+
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_SEQ_ENTRY_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t stream_format1 = 0;
+    uint32_t stream_format2 = 0;
+    uint32_t entrypoint1 = 0;
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+
+
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, PROFILE, stream_format1, md->PROFILE);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, LEVEL, stream_format1, md->LEVEL);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, CHROMAFORMAT, stream_format1, md->CHROMAFORMAT);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, FRMRTQ, stream_format1, md->FRMRTQ);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, BITRTQ, stream_format1, md->BITRTQ);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_1, POSTPRO, stream_format1, md->POSTPROCFLAG);
+
+
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PULLDOWN, stream_format2, md->PULLDOWN);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, INTERLACE, stream_format2, md->INTERLACE);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, TFCNTRFLAG, stream_format2, md->TFCNTRFLAG);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, FINTERPFLAG, stream_format2, md->FINTERPFLAG);
+    BF_WRITE(VC1_0_SEQPIC_STREAM_FORMAT_2, PSF, stream_format2, md->PSF);
+
+
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, BROKEN_LINK,   entrypoint1, md->BROKEN_LINK);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, CLOSED_ENTRY,  entrypoint1, md->CLOSED_ENTRY);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, PANSCAN_FLAG,  entrypoint1, md->PANSCAN_FLAG);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, REFDIST_FLAG,  entrypoint1, md->REFDIST_FLAG);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, LOOPFILTER,    entrypoint1, md->LOOPFILTER);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, FASTUVMC,      entrypoint1, md->FASTUVMC);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_MV,   entrypoint1, md->EXTENDED_MV);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, DQUANT,        entrypoint1, md->DQUANT);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, VS_TRANSFORM,  entrypoint1, md->VSTRANSFORM);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, OVERLAP,       entrypoint1, md->OVERLAP);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, QUANTIZER,     entrypoint1, md->QUANTIZER);
+    BF_WRITE(VC1_0_SEQPIC_ENTRY_POINT_1, EXTENDED_DMV,  entrypoint1, md->EXTENDED_DMV);
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SEQ_ENTRY;
+
+
+    wi.vwi_payload[0] = stream_format1;
+    wi.vwi_payload[1] = stream_format2;
+    wi.vwi_payload[2] = entrypoint1;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_SIZE_AND_AP_RANGEMAP_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t coded_size = 0;
+    uint32_t ap_range_map = 0;
+
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+
+    BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, WIDTH, coded_size, md->width);
+    BF_WRITE(VC1_0_SEQPIC_CODED_SIZE, HEIGHT, coded_size, md->height);
+
+
+    /* if range reduction is indicated at seq. layer, populate range reduction registers for the frame*/
+    if (VC1_PROFILE_ADVANCED == md->PROFILE)
+    {
+
+
+        BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, ap_range_map, md->RANGE_MAPY_FLAG);
+        BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y, ap_range_map, md->RANGE_MAPY);
+        BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, ap_range_map, md->RANGE_MAPUV_FLAG);
+        BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV, ap_range_map, md->RANGE_MAPUV);
+
+
+
+
+    }
+    else
+    {
+        ap_range_map = 0;
+    }
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SIZE_AND_AP_RANGEMAP;
+
+
+    wi.vwi_payload[0] = 0;
+    wi.vwi_payload[1] = coded_size;
+    wi.vwi_payload[2] = ap_range_map;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_SLICE_FRAME_TYPE_INFO_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t alt_frame_type = 0;
+    uint32_t frame_type = 0;
+
+    vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader);
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+
+    BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, FCM, frame_type, pic->FCM);
+    BF_WRITE(VC1_0_SEQPIC_FRAME_TYPE, PTYPE, frame_type, pic->PTYPE);
+
+    alt_frame_type = frame_type;
+
+    if (VC1_PROFILE_ADVANCED == md->PROFILE)
+    {
+        if ( (VC1_P_FRAME == pic->PTYPE)||(VC1_B_FRAME == pic->PTYPE) )
+        {
+            BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT);
+        }
+    }
+    else
+    {
+        if ( VC1_SKIPPED_FRAME== pic->PTYPE)
+        {
+            BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, 0);
+        } else {
+            BF_WRITE(VC1_0_SEQPIC_ALT_FRAME_TYPE, PQUANT, alt_frame_type, pic->PQUANT);
+        }
+    }
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_FRAME_TYPE_INFO;
+
+
+    wi.vwi_payload[0] = 0;
+    wi.vwi_payload[1] = frame_type;
+    wi.vwi_payload[2] = alt_frame_type;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_SLICE_CONTROL_INFO_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t recon_control = 0;
+    uint32_t mv_control = 0;
+    uint32_t blk_control = 0;
+
+    vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader);
+    viddec_workload_item_t wi;
+
+    int is_previous_ref_rr=0;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, RNDCTRL, recon_control, md->RNDCTRL);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UVSAMP, recon_control, pic->UVSAMP);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQUANT, recon_control, pic->PQUANT);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, HALFQP, recon_control, pic->HALFQP);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, UNIFORM_QNT, recon_control, pic->UniformQuant);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, POSTPROC, recon_control, pic->POSTPROC);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, CONDOVER, recon_control, pic->CONDOVER);
+    BF_WRITE( VC1_0_SEQPIC_RECON_CONTROL, PQINDEX_LE8, recon_control, (pic->PQINDEX <= 8));
+
+    /* Get the range reduced status of the previous frame */
+    switch (pic->PTYPE)
+    {
+    case VC1_P_FRAME:
+    {
+        is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].rr_frm;
+        break;
+    }
+    case VC1_B_FRAME:
+    {
+        is_previous_ref_rr = parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].rr_frm;
+        break;
+    }
+    default:
+    {
+        break;
+    }
+    }
+
+    if (pic->RANGEREDFRM)
+    {
+
+        if (!is_previous_ref_rr)
+        {
+            BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1);
+            BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 1);
+        }
+    }
+    else
+    {
+        /* if current frame is not RR but previous was RR,  scale up the reference frame ( RANGE_REF_RED_TYPE = 0) */
+        if (is_previous_ref_rr)
+        {
+            BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_EN, recon_control, 1);
+            BF_WRITE(VC1_0_SEQPIC_RECON_CONTROL, RANGE_REF_RED_TYPE, recon_control, 0);
+        }
+    } // end for RR upscale
+
+
+
+
+
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVRANGE,   mv_control, pic->MVRANGE);
+    if ( pic->MVMODE == VC1_MVMODE_INTENSCOMP)
+        BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE,    mv_control, pic->MVMODE2);
+    else
+        BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVMODE,    mv_control, pic->MVMODE);
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MVTAB,  mv_control,  pic->MVTAB);
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, DMVRANGE,  mv_control, pic->DMVRANGE);
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MV4SWITCH, mv_control, pic->MV4SWITCH);
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, MBMODETAB, mv_control, pic->MBMODETAB);
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, NUMREF,    mv_control,
+              pic->NUMREF || ((pic->PTYPE == VC1_B_FRAME) && ( pic->FCM == VC1_FCM_FIELD_INTERLACE )  ));
+    BF_WRITE( VC1_0_SEQPIC_MOTION_VECTOR_CONTROL, REFFIELD,  mv_control, pic->REFFIELD);
+
+
+
+    // BLOCK CONTROL REGISTER Offset 0x2C
+    BF_WRITE( VC1_0_SEQPIC_BLOCK_CONTROL, CBPTAB, blk_control, pic->CBPTAB);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTMFB, blk_control, pic->TTMBF);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, TTFRM, blk_control, pic->TTFRM);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV2BPTAB, blk_control, pic->MV2BPTAB);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, MV4BPTAB, blk_control, pic->MV4BPTAB);
+    if ((pic->CurrField == 1) && (pic->SLICE_ADDR))
+    {
+        int mby = md->height * 2 + 2;
+        mby = (mby + 15 ) / 16;
+        pic->SLICE_ADDR -= (mby/2);
+    }
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, INITIAL_MV_Y, blk_control, pic->SLICE_ADDR);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID2, blk_control, md->bp_raw[0]);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID1, blk_control, md->bp_raw[1]);
+    BF_WRITE(VC1_0_SEQPIC_BLOCK_CONTROL, BP_RAW_ID0, blk_control, md->bp_raw[2]);
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_CONTROL_INFO;
+
+
+    wi.vwi_payload[0] = recon_control;
+    wi.vwi_payload[1] = mv_control;
+    wi.vwi_payload[2] = blk_control;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_SLICE_OTHER_INFO_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t trans_data = 0;
+    uint32_t vop_dquant = 0;
+    uint32_t ref_bfraction = 0;
+
+    vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader);
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+    BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_DEN, ref_bfraction, pic->BFRACTION_DEN);
+    BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, BFRACTION_NUM, ref_bfraction, pic->BFRACTION_NUM);
+    BF_WRITE(VC1_0_SEQPIC_REFERENCE_B_FRACTION, REFDIST, ref_bfraction, md->REFDIST);
+
+    if (md->DQUANT)
+    {
+        if (pic->PQDIFF == 7)
+            BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->ABSPQ);
+        else if (pic->DQUANTFRM == 1)
+            BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, PQUANT_ALT, vop_dquant, pic->PQUANT + pic->PQDIFF + 1);
+    }
+    BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQUANTFRM, vop_dquant, pic->DQUANTFRM);
+    BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQPROFILE, vop_dquant, pic->DQPROFILE);
+    BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQES,      vop_dquant, pic->DQSBEDGE);
+    BF_WRITE( VC1_0_SEQPIC_VOP_DEQUANT, DQBILEVEL, vop_dquant, pic->DQBILEVEL);
+
+    BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM,  trans_data, pic->TRANSACFRM);
+    BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSACFRM2, trans_data, pic->TRANSACFRM2);
+    BF_WRITE( VC1_0_SEQPIC_TRANSFORM_DATA, TRANSDCTAB,  trans_data, pic->TRANSDCTAB);
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_SLICE_OTHER_INFO;
+
+
+    wi.vwi_payload[0] = trans_data;
+    wi.vwi_payload[1] = vop_dquant;
+    wi.vwi_payload[2] = ref_bfraction;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t imgstruct = 0;
+    uint32_t fieldref_ctrl_id = 0;
+    uint32_t smp_rangemap = 0;
+
+    vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader);
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+    if ( pic->FCM == VC1_FCM_FIELD_INTERLACE ) {
+        BF_WRITE(VC1_0_SEQPIC_IMAGE_STRUCTURE, IMG_STRUC, imgstruct, (pic->BottomField) ? 2 : 1);
+    }
+
+    BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, TOP_FIELD,    fieldref_ctrl_id, pic->BottomField);
+    BF_WRITE( VC1_0_SEQPIC_FIELD_REF_FRAME_ID, SECOND_FIELD, fieldref_ctrl_id, pic->CurrField);
+    if (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME)
+    {
+        BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, 1);
+    }
+    else
+    {
+        BF_WRITE(VC1_0_SEQPIC_FIELD_REF_FRAME_ID, ANCHOR, fieldref_ctrl_id, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[pic->CurrField]);
+    }
+
+    if (VC1_PROFILE_ADVANCED != md->PROFILE)
+    {
+        if (pic->RANGEREDFRM)
+        {
+            //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_Y_FLAG, smp_rangemap, md->RANGE_MAPY_FLAG);
+            //BF_WRITE( VC1_0_SEQPIC_RANGE_MAP, RANGE_MAP_UV_FLAG, smp_rangemap, md->RANGE_MAPUV_FLAG);
+            smp_rangemap = 0x11;
+        }
+
+    }
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO;
+
+
+    wi.vwi_payload[0] = imgstruct;
+    wi.vwi_payload[1] = fieldref_ctrl_id;
+    wi.vwi_payload[2] = smp_rangemap;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+    return;
+} // send_reorder_ref_items
+
+
+/* send reorder frame item to host
+ * future frame gets push to past   */
+static inline void send_INT_COM_registers(void *parent, vc1_viddec_parser_t *parser)
+{
+    uint32_t intcomp_fwd_top = 0;
+    uint32_t intcomp_fwd_bot = 0;
+    uint32_t intcomp_bwd_top = 0;
+    uint32_t intcomp_bwd_bot = 0;
+    uint32_t intcomp_cur = 0;
+
+    uint32_t POS_2nd_INTCOMP = 13;
+    uint32_t MASK_1st_INTCOMP = 0x1fff;
+    uint32_t MASK_2nd_INTCOMP = 0x3ffe000;
+
+    vc1_PictureLayerHeader *pic = &(parser->info.picLayerHeader);
+    viddec_workload_item_t wi;
+
+    vc1_metadata_t *md = &(parser->info.metadata);
+
+
+
+    if (VC1_SKIPPED_FRAME == pic->PTYPE)
+    {
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top =0;
+        return;
+    }
+
+    if ( VC1_FCM_FIELD_INTERLACE != pic->FCM )
+    {
+
+        BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1);
+        BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE);
+        BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT);
+
+        if ( !((pic->MVMODE == VC1_MVMODE_INTENSCOMP) || (pic->INTCOMP)) )
+            intcomp_cur = 0;
+
+        if ( (VC1_BI_FRAME==pic->PTYPE)||(VC1_B_FRAME==pic->PTYPE)  )
+        {
+            parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = 0;
+            parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = 0;
+
+            intcomp_bwd_top = parser->intcomp_top[0];
+            intcomp_bwd_bot = parser->intcomp_bot[0];
+            intcomp_fwd_bot = parser->intcomp_bot[1];
+
+
+            if ( parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != (-1) )
+            {
+                if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].type)
+                    intcomp_fwd_top = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].intcomp_top;
+            }
+            else
+            {
+                if (VC1_SKIPPED_FRAME != parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type)
+                    intcomp_fwd_top = parser->intcomp_top[1];
+            }
+        }
+        else
+        {  //I,P TYPE
+
+            parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur;
+
+            if (VC1_FCM_FIELD_INTERLACE == parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm)
+            {
+                intcomp_fwd_top = parser->intcomp_top[1];
+                intcomp_fwd_top |= intcomp_cur << POS_2nd_INTCOMP;
+
+                intcomp_fwd_bot = parser->intcomp_bot[1];
+                intcomp_fwd_bot |= intcomp_cur << POS_2nd_INTCOMP;
+            }
+            else
+            {
+                intcomp_fwd_top = intcomp_cur;// << POS_2nd_INTCOMP;
+                intcomp_fwd_bot = 0;
+            }
+        }
+    }
+    else
+    {
+        //FIELD INTERLACE
+        //if(0!=md->INTCOMPFIELD)
+        //No debugging
+
+        if (md->INTCOMPFIELD == VC1_INTCOMP_BOTTOM_FIELD)
+        {
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2);
+        }
+        else
+        {
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_1, intcomp_cur, 1);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_1, intcomp_cur, pic->LUMSCALE);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_1, intcomp_cur, pic->LUMSHIFT);
+        }
+
+        if (md->INTCOMPFIELD == VC1_INTCOMP_BOTH_FIELD)
+        {
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, INT_COMP_2, intcomp_cur, 1);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SCALE_2, intcomp_cur, md->LUMSCALE2);
+            BF_WRITE(VC1_0_SEQPIC_INTENSITY_COMPENSATION, LUMA_SHIFT_2, intcomp_cur, md->LUMSHIFT2);
+        }
+
+        if (pic->MVMODE != VC1_MVMODE_INTENSCOMP)
+        {
+            intcomp_cur = 0;
+        }
+
+        if (pic->CurrField == 0)
+        {
+            if (pic->TFF)
+            {
+                parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur;
+            }
+            else
+            {
+                parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur;
+            }
+        }
+        else
+        {
+            if (pic->TFF)
+            {
+                parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_bot = intcomp_cur;
+            }
+            else
+            {
+                parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].intcomp_top = intcomp_cur;
+            }
+        }
+
+        if (pic->CurrField == 1)
+        {  //SECOND FIELD
+
+            if (VC1_B_FRAME != pic->PTYPE)
+            {
+                if (pic->TFF)
+                {
+                    intcomp_bwd_top = intcomp_cur & MASK_1st_INTCOMP;
+
+                    intcomp_fwd_bot = (parser->intcomp_bot[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP;  //???????
+                    intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP);
+
+                    intcomp_fwd_top = parser->intcomp_top[1];
+                }
+                else
+                {
+                    intcomp_bwd_bot= (intcomp_cur & MASK_2nd_INTCOMP)>>POS_2nd_INTCOMP;
+
+                    intcomp_fwd_top = (parser->intcomp_top[1] & MASK_2nd_INTCOMP) >> POS_2nd_INTCOMP;
+                    intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP) << POS_2nd_INTCOMP;
+
+                    intcomp_fwd_bot = parser->intcomp_bot[1];
+                }
+            }
+            else
+            {    //B TYPE
+                intcomp_fwd_top = parser->intcomp_top[1];
+                intcomp_fwd_bot = parser->intcomp_bot[1];
+
+                intcomp_bwd_top = parser->intcomp_top[0];
+                intcomp_bwd_bot = parser->intcomp_bot[0];
+            }
+        }
+        else
+        {  //FIRST FILED
+
+            if ( (VC1_B_FRAME==pic->PTYPE)||(VC1_BI_FRAME==pic->PTYPE) )
+            {
+                if (VC1_SKIPPED_FRAME!=parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].type)
+                {
+                    intcomp_fwd_top = parser->intcomp_top[1];
+                    intcomp_fwd_bot = parser->intcomp_bot[1];
+                }
+
+                intcomp_bwd_top = parser->intcomp_top[0];
+                intcomp_bwd_bot = parser->intcomp_bot[0];
+
+            }
+            else
+            {  //I,P TYPE
+
+                intcomp_fwd_top = parser->intcomp_top[1] & MASK_1st_INTCOMP;
+                intcomp_fwd_top |= (intcomp_cur&MASK_1st_INTCOMP)<<POS_2nd_INTCOMP;
+
+                intcomp_fwd_bot = parser->intcomp_bot[1] & MASK_1st_INTCOMP;
+                intcomp_fwd_bot |= (intcomp_cur & MASK_2nd_INTCOMP);
+            }   //pic->PTYPE == I,P TYPE
+        }   //pic->CurrField == 0
+    }  //VC1_FCM_FIELD_INTERLACE != pic->FCM
+
+    if ( (VC1_B_FRAME != pic->PTYPE) && (VC1_BI_FRAME != pic->PTYPE) )
+    {
+        parser->intcomp_top[1] = intcomp_fwd_top;
+        parser->intcomp_bot[1] = intcomp_fwd_bot;
+
+        parser->intcomp_top[0] = intcomp_bwd_top;
+        parser->intcomp_bot[0] = intcomp_bwd_bot;
+    }
+
+    //OS_INFO("intcomp_fwd_top = %d\n", intcomp_fwd_top);
+    //OS_INFO("intcomp_fwd_bot = %d\n", intcomp_fwd_bot);
+
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_FW;
+
+    wi.vwi_payload[0] = 0;
+    wi.vwi_payload[1] = intcomp_fwd_top;
+    wi.vwi_payload[2] = intcomp_fwd_bot;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+
+    wi.vwi_type = VIDDEC_WORKLOAD_VC1_REGS_INT_COM_BW;
+
+    wi.vwi_payload[0] = 0;
+    wi.vwi_payload[1] = intcomp_bwd_top;
+    wi.vwi_payload[2] = intcomp_bwd_bot;
+
+    viddec_pm_append_workitem( parent, &wi, false );
+
+
+    return;
+} // send_reorder_ref_items
+
+
+/** update workload with more workload items for ref and update values to store...
+ */
+void vc1_parse_emit_frame_start(void *parent, vc1_viddec_parser_t *parser)
+{
+    vc1_metadata_t *md = &(parser->info.metadata);
+    viddec_workload_t *wl = viddec_pm_get_header(parent);
+    int frame_type = parser->info.picLayerHeader.PTYPE;
+    int frame_id = 1; // new reference frame is assigned index 1
+
+    /* init */
+    memset(&parser->spr, 0, sizeof(parser->spr));
+    wl->is_reference_frame = 0;
+
+    /* set flag - extra ouput frame needed for range adjustment (range mapping or range reduction */
+    if (parser->info.metadata.RANGE_MAPY_FLAG ||
+            parser->info.metadata.RANGE_MAPUV_FLAG ||
+            parser->info.picLayerHeader.RANGEREDFRM)
+    {
+        wl->is_reference_frame |= WORKLOAD_FLAGS_RA_FRAME;
+    }
+
+    LOG_CRIT("vc1_start_new_frame: frame_type=%d \n",frame_type);
+
+    parser->is_reference_picture = ((VC1_B_FRAME != frame_type) && (VC1_BI_FRAME != frame_type));
+
+    /* reference / anchor frames processing
+     * we need to send reorder before reference frames */
+    if (parser->is_reference_picture)
+    {
+        /* one frame has been sent */
+        if (parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].id != -1)
+        {
+            /* there is a frame in the reference buffer, move it to the past */
+            send_reorder_ref_items(parent);
+        }
+    }
+
+    /* send workitems for reference frames */
+    switch ( frame_type )
+    {
+    case VC1_B_FRAME:
+    {
+        vc1_send_past_ref_items(parent);
+        vc1_send_future_ref_items(parent);
+        vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_2].fcm, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm);
+        break;
+    }
+    case VC1_SKIPPED_FRAME:
+    {
+        wl->is_reference_frame |= WORKLOAD_SKIPPED_FRAME;
+        vc1_send_past_ref_items(parent);
+        vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone);
+        break;
+    }
+    case VC1_P_FRAME:
+    {
+        vc1_send_past_ref_items( parent);
+        vc1_send_ref_fcm_items(parent, parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].fcm, vc1_PictureFormatNone);
+        break;
+    }
+    default:
+        break;
+    }
+
+    /* reference / anchor frames from previous code
+     * we may need it for frame reduction */
+    if (parser->is_reference_picture)
+    {
+        wl->is_reference_frame |= WORKLOAD_REFERENCE_FRAME | (frame_id & WORKLOAD_REFERENCE_FRAME_BMASK);
+
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].id      = frame_id;
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].fcm     = parser->info.picLayerHeader.FCM;
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0]  = (parser->info.picLayerHeader.PTYPE == VC1_I_FRAME);
+        if (parser->info.picLayerHeader.FCM == VC1_FCM_FIELD_INTERLACE)
+        {
+            parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = (parser->info.picLayerHeader.PTypeField2 == VC1_I_FRAME);
+        }
+        else
+        {
+            parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].anchor[0];
+        }
+
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].type = parser->info.picLayerHeader.PTYPE;
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_en = md->RANGERED;
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].rr_frm = parser->info.picLayerHeader.RANGEREDFRM;
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_0].tff = parser->info.picLayerHeader.TFF;
+
+        LOG_CRIT("anchor[0] = %d, anchor[1] = %d",
+                 parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[0],
+                 parser->ref_frame[VC1_REF_FRAME_T_MINUS_1].anchor[1] );
+    }
+
+    if ( parser->info.picLayerHeader.PTYPE == VC1_SKIPPED_FRAME )
+    {
+        translate_parser_info_to_frame_attributes( parent, parser );
+        return;
+    }
+
+    translate_parser_info_to_frame_attributes( parent, parser );
+
+
+    send_SEQ_ENTRY_registers(parent, parser);
+    send_SIZE_AND_AP_RANGEMAP_registers(parent, parser);
+    send_SLICE_FRAME_TYPE_INFO_registers(parent, parser);
+    send_SLICE_CONTROL_INFO_registers(parent, parser);
+    send_SLICE_OTHER_INFO_registers(parent, parser);
+    send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser);
+    send_INT_COM_registers(parent, parser);
+
+    {
+        viddec_workload_item_t wi;
+        uint32_t bit, byte;
+        uint8_t is_emul = 0;
+
+        viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul);
+
+        // Send current bit offset and current slice
+        wi.vwi_type          = VIDDEC_WORKLOAD_VC1_BITOFFSET;
+
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Special Case1----[is_emul = 1]:
+        // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data
+        // to the decoder starting at the first byte of 0s so that the decoder can detect the
+        // emulation prevention. But the actual data starts are offset 8 in this bit sequence.
+
+        // Specail Case 2----[is_emul = 2]:
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte.
+        //
+
+        wi.vwi_payload[0]    = bit + (is_emul*8) ;
+        wi.vwi_payload[1]    = 0xdeaddead;
+        wi.vwi_payload[2]    = 0xdeaddead;
+        viddec_pm_append_workitem( parent, &wi, false );
+    }
+
+
+    viddec_pm_append_pixeldata( parent );
+
+    return;
+} // vc1_start_new_frame
+
+
+void vc1_parse_emit_second_field_start(void *parent, vc1_viddec_parser_t *parser)
+{
+
+    send_SLICE_FRAME_TYPE_INFO_registers(parent, parser);
+    send_SLICE_CONTROL_INFO_registers(parent, parser);
+    send_SLICE_OTHER_INFO_registers(parent, parser);
+    send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser);
+    send_INT_COM_registers(parent, parser);
+
+    {
+        viddec_workload_item_t wi;
+        uint32_t bit, byte;
+        uint8_t is_emul = 0;
+
+        viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul);
+
+
+        // Send current bit offset and current slice
+        wi.vwi_type          = VIDDEC_WORKLOAD_VC1_BITOFFSET;
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Special Case1----[is_emul = 1]:
+        // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data
+        // to the decoder starting at the first byte of 0s so that the decoder can detect the
+        // emulation prevention. But the actual data starts are offset 8 in this bit sequence.
+
+        // Specail Case 2----[is_emul = 2]:
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte.
+        //
+
+
+        wi.vwi_payload[0]	 = bit + (is_emul*8);
+        wi.vwi_payload[1]	 = 0xdeaddead;
+        wi.vwi_payload[2]	 = 0xdeaddead;
+        viddec_pm_append_workitem( parent, &wi, false );
+    }
+
+    viddec_pm_append_pixeldata( parent );
+
+    return;
+
+}
+
+
+void vc1_parse_emit_current_slice(void *parent, vc1_viddec_parser_t *parser)
+{
+    send_SLICE_FRAME_TYPE_INFO_registers(parent, parser);
+    send_SLICE_CONTROL_INFO_registers(parent, parser);
+    send_SLICE_OTHER_INFO_registers(parent, parser);
+    //send_STRUCT_FIELD_AND_SMP_RANGEMAP_INFO_registers(parent, parser);
+    //send_INT_COM_registers(parent, parser);
+
+    {
+        viddec_workload_item_t wi;
+        uint32_t bit, byte;
+        uint8_t is_emul = 0;
+
+        viddec_pm_get_au_pos(parent, &bit, &byte, &is_emul);
+
+        // Send current bit offset and current slice
+        wi.vwi_type          = VIDDEC_WORKLOAD_VC1_BITOFFSET;
+
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Special Case1----[is_emul = 1]:
+        // Eg: 00 00 03 01 - slice data starts at the second byte of 0s, we still feed the data
+        // to the decoder starting at the first byte of 0s so that the decoder can detect the
+        // emulation prevention. But the actual data starts are offset 8 in this bit sequence.
+
+        // Specail Case 2----[is_emul = 2]:
+        // If slice data starts in the middle of the emulation prevention sequence -
+        // Eg: [00 00] 03 00 - slice data starts at the third byte (0x03), we need readout this byte.
+        //
+
+        wi.vwi_payload[0]    = bit + (is_emul*8);
+        wi.vwi_payload[1]    = 0xdeaddead;
+        wi.vwi_payload[2]    = 0xdeaddead;
+        viddec_pm_append_workitem( parent, &wi, false );
+    }
+
+    viddec_pm_append_pixeldata( parent );
+
+    return;
+}
+
+
+void vc1_end_frame(vc1_viddec_parser_t *parser)
+{
+    /* update status of reference frames */
+    if (parser->is_reference_picture)
+    {
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_2] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_1];
+        parser->ref_frame[VC1_REF_FRAME_T_MINUS_1] = parser->ref_frame[VC1_REF_FRAME_T_MINUS_0];
+    }
+
+    return;
+} // vc1_end_frame
+
diff --git a/mixvbp/vbp_plugin/vp8/Android.mk b/mixvbp/vbp_plugin/vp8/Android.mk
new file mode 100755
index 0000000..03de2cf
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/Android.mk
@@ -0,0 +1,24 @@
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_SRC_FILES := \
+        vp8parse.c \
+        bool_coder.c \
+        viddec_vp8_parse.c
+
+LOCAL_CFLAGS := -DVBP -DHOST_ONLY
+
+LOCAL_C_INCLUDES := \
+        $(MIXVBP_DIR)/include \
+        $(MIXVBP_DIR)/vbp_manager/include \
+        $(LOCAL_PATH)/include
+
+LOCAL_MODULE_TAGS := optional
+LOCAL_MODULE := libmixvbp_vp8
+
+LOCAL_SHARED_LIBRARIES := \
+        libmixvbp \
+        liblog
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/mixvbp/vbp_plugin/vp8/bool_coder.c b/mixvbp/vbp_plugin/vp8/bool_coder.c
new file mode 100755
index 0000000..746d63e
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/bool_coder.c
@@ -0,0 +1,95 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include "bool_coder.h"
+
+uint32_t vp8_read_bits(BOOL_CODER *br, int32_t bits)
+{
+    uint32_t z = 0;
+    int bit;
+    for (bit=bits-1; bit>=0; bit--)
+    {
+        z |= (vp8_decode_bool(br, 128)<<bit);
+    }
+    return z;
+}
+
+void vp8_start_decode(BOOL_CODER *br, uint8_t *source)
+{
+    br->range    = 255;
+    br->count    = 8;
+    br->buffer   = source;
+    br->pos      = 0;
+    br->value    = (br->buffer[0]<<24)+(br->buffer[1]<<16)+(br->buffer[2]<<8)+(br->buffer[3]);
+    br->pos     += 4;
+}
+
+int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability)
+{
+    uint32_t bit=0;
+    uint32_t split;
+    uint32_t bigsplit;
+    uint32_t count = br->count;
+    uint32_t range = br->range;
+    uint32_t value = br->value;
+
+    split = 1 +  (((range-1) * probability) >> 8);
+    bigsplit = (split<<24);
+
+    range = split;
+    if(value >= bigsplit)
+    {
+        range = br->range-split;
+        value = value-bigsplit;
+        bit = 1;
+    }
+
+    if(range>=0x80)
+    {
+        br->value = value;
+        br->range = range;
+        return bit;
+    }
+    else
+    {
+        do
+        {
+            range +=range;
+            value +=value;
+
+            if (!--count)
+            {
+                count = 8;
+                value |= br->buffer[br->pos];
+                br->pos++;
+            }
+        }
+        while(range < 0x80 );
+    }
+    br->count = count;
+    br->value = value;
+    br->range = range;
+    return bit;
+}
diff --git a/mixvbp/vbp_plugin/vp8/include/bool_coder.h b/mixvbp/vbp_plugin/vp8/include/bool_coder.h
new file mode 100755
index 0000000..57660b7
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/include/bool_coder.h
@@ -0,0 +1,54 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef _BOOL_CODER_H_
+#define _BOOL_CODER_H_
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <stddef.h>
+
+typedef struct _BOOL_CODER
+{
+    uint32_t  range; // always idential to encoder's range
+    uint32_t  value; // contains at least 24 significant bits
+    int32_t   count; // # of bits shifted out of value, at most 7
+    uint32_t  pos;
+    uint8_t   *buffer; // pointer to next compressed data byte to be read
+} BOOL_CODER;
+
+typedef struct _BITREADER
+{
+    int32_t        bitsinremainder; // # of bits still used in remainder
+    uint32_t       remainder;       // remaining bits from original long
+    const uint8_t *position;        // character pointer position within data
+} BITREADER;
+
+void vp8_start_decode(BOOL_CODER *br, uint8_t *source);
+int32_t vp8_decode_bool(BOOL_CODER *br, int32_t probability);
+uint32_t vp8_read_bits(BOOL_CODER *br, int32_t bits);
+
+#endif
diff --git a/mixvbp/vbp_plugin/vp8/include/vp8.h b/mixvbp/vbp_plugin/vp8/include/vp8.h
new file mode 100755
index 0000000..06a7e61
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/include/vp8.h
@@ -0,0 +1,356 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef _VP8_H_
+#define _VP8_H_
+#include "bool_coder.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* VP8 specifies only frame is supported */
+#define VP8_MAX_NUM_PICTURES    1
+/* VP8 has no definition of slice */
+#define VP8_MAX_NUM_SLICES      1
+
+#define MAX_MB_SEGMENTS         4
+#define MB_FEATURE_TREE_PROBS   3
+#define MAX_REF_LF_DELTAS       4
+#define MAX_MODE_LF_DELTAS      4
+#define MAX_PARTITIONS          9
+#define BLOCK_TYPES             4
+#define COEF_BANDS              8
+#define PREV_COEF_CONTEXTS      3
+#define MAX_COEF_TOKENS         12
+#define MAX_ENTROPY_TOKENS      12
+#define SEGMENT_DELTADATA       0
+#define SEGMENT_ABSDATA         1
+#define MAX_LOOP_FILTER         63
+#define MAX_QINDEX              127
+
+    typedef uint8_t vp8_prob;
+
+    typedef enum
+    {
+        /*!\brief Operation completed without error */
+        VP8_NO_ERROR,
+
+        /*!\brief Unspecified error */
+        VP8_UNKNOWN_ERROR,
+
+        /*!\brief Memory operation failed */
+        VP8_MEMORY_ERROR,
+
+        VP8_NO_INITIALIZATION,
+
+        VP8_CORRUPT_FRAME,
+
+        VP8_UNSUPPORTED_BITSTREAM,
+
+        VP8_UNSUPPORTED_VERSION,
+
+        VP8_INVALID_FRAME_SYNC_CODE,
+
+        VP8_UNEXPECTED_END_OF_BITSTREAM,
+
+    } vp8_Status;
+
+    enum
+    {
+        VP8_MV_max  = 1023,                   /* max absolute value of a MV component */
+        VP8_MV_vals = (2 * VP8_MV_max) + 1,   /* # possible values "" */
+
+        VP8_MV_long_width = 10,       /* Large MVs have 9 bit magnitudes */
+        VP8_MV_num_short = 8,         /* magnitudes 0 through 7 */
+
+        /* probability offsets for coding each MV component */
+        VP8_MV_pis_short = 0,        /* short (<= 7) vs long (>= 8) */
+        VP8_MV_Psign,                /* sign for non-zero */
+        VP8_MV_Pshort,               /* 8 short values = 7-position tree */
+
+        VP8_MV_Pbits = VP8_MV_Pshort + VP8_MV_num_short - 1, /* mvlong_width long value bits */
+        VP8_MV_Pcount = VP8_MV_Pbits + VP8_MV_long_width     /* (with independent probabilities) */
+    };
+
+    typedef enum
+    {
+        DC_PRED,            // average of above and left pixels
+        V_PRED,             // vertical prediction
+        H_PRED,             // horizontal prediction
+        TM_PRED,            // Truemotion prediction
+        B_PRED,             // block based prediction, each block has its own prediction mode
+        NEARESTMV,
+        NEARMV,
+        ZEROMV,
+        NEWMV,
+        SPLITMV,
+        MB_MODE_COUNT
+    } VP8_MB_PREDICTION_MODE;
+
+// Segment Feature Masks
+#define VP8_SEGMENT_ALTQ    0x01
+#define VP8_SEGMENT_ALT_LF  0x02
+
+#define VP8_YMODES  (B_PRED + 1)
+#define VP8_UV_MODES (TM_PRED + 1)
+
+#define VP8_MVREFS (1 + SPLITMV - NEARESTMV)
+
+    typedef enum
+    {
+        B_DC_PRED,          // average of above and left pixels
+        B_TM_PRED,
+
+        B_VE_PRED,           // vertical prediction
+        B_HE_PRED,           // horizontal prediction
+
+        B_LD_PRED,
+        B_RD_PRED,
+
+        B_VR_PRED,
+        B_VL_PRED,
+        B_HD_PRED,
+        B_HU_PRED,
+
+        LEFT4X4,
+        ABOVE4X4,
+        ZERO4X4,
+        NEW4X4,
+
+        B_MODE_COUNT
+    } VP8_B_PREDICTION_MODE;
+
+#define VP8_BINTRAMODES (B_HU_PRED + 1)  /* 10 */
+#define VP8_SUBMVREFS (1 + NEW4X4 - LEFT4X4)
+
+// frame type
+    typedef enum
+    {
+        KEY_FRAME = 0,
+        INTER_FRAME,
+        SKIPPED_FRAME
+    } FRAME_TYPE;
+
+
+// Color Space
+    typedef enum
+    {
+        REG_YUV = 0,    /* Regular yuv */
+        INT_YUV = 1     /* The type of yuv that can be tranfer to and from RGB through integer transform */
+    } YUV_TYPE;
+
+// Clamp type
+    typedef enum
+    {
+        RECON_CLAMP_REQUIRED        = 0,
+        RECON_CLAMP_NOTREQUIRED     = 1
+    } CLAMP_TYPE;
+
+    /* Token partition */
+    typedef enum
+    {
+        ONE_PARTITION  = 0,
+        TWO_PARTITION  = 1,
+        FOUR_PARTITION = 2,
+        EIGHT_PARTITION = 3
+    } TOKEN_PARTITION;
+
+// Buffer copied
+    typedef enum
+    {
+        BufferCopied_NoneToGolden   = 0,
+        BufferCopied_LastToGolden   = 1,
+        BufferCopied_AltRefToGolden = 2
+    } GoldenBufferCopiedType;
+
+    typedef enum
+    {
+        BufferCopied_NoneToAltref   = 0,
+        BufferCopied_LastToAltRef   = 1,
+        BufferCopied_GoldenToAltRef = 2
+    } AltRefBufferCopiedType;
+
+// Macroblock level features
+    typedef enum
+    {
+        MB_LVL_ALT_Q = 0,   /* Use alternate Quantizer .... */
+        MB_LVL_ALT_LF = 1,  /* Use alternate loop filter value... */
+        MB_LVL_MAX = 2      /* Number of MB level features supported */
+    } MB_LVL_FEATURES;
+
+// Loop filter Type
+    typedef enum
+    {
+        NORMAL_LOOPFILTER = 0,
+        SIMPLE_LOOPFILTER = 1
+    } LoopFilterType;
+
+// Segmentation data
+    typedef struct
+    {
+        uint8_t              Enabled;
+        uint8_t              UpdateMap;
+        uint8_t              UpdateData;
+        uint8_t              AbsDelta;
+        int8_t               FeatureData[MB_LVL_MAX][MAX_MB_SEGMENTS];
+        vp8_prob             TreeProbs[MB_FEATURE_TREE_PROBS];
+    } SegmentationData;
+
+// Loop filter data
+    typedef struct
+    {
+        LoopFilterType       Type;
+        uint8_t              Level;
+        uint8_t              Sharpness;
+        uint8_t              DeltaEnabled;
+        uint8_t              DeltaUpdate;
+        int8_t               DeltasRef[MAX_REF_LF_DELTAS];
+        int8_t               DeltasMode[MAX_MODE_LF_DELTAS];
+    } LoopFilterData;
+
+// Quantization data
+    typedef struct
+    {
+        int8_t               Y1_AC;
+        int8_t               Y1_DC_Delta;
+        int8_t               Y2_DC_Delta;
+        int8_t               Y2_AC_Delta;
+        int8_t               UV_DC_Delta;
+        int8_t               UV_AC_Delta;
+    } QuantizationData;
+
+// Frame context
+    typedef struct
+    {
+        vp8_prob            B_Mode_Prob[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1];
+        vp8_prob            Y_Mode_Prob [VP8_YMODES-1];   /* interframe intra mode probs */
+        vp8_prob            UV_Mode_Prob [VP8_UV_MODES-1];
+        vp8_prob            DCT_Coefficients [BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1];
+        vp8_prob            MVContext[2][VP8_MV_Pcount];
+        vp8_prob            Pre_MVContext[2][VP8_MV_Pcount];  //not to caculate the mvcost for the frame if mvc doesn't change.
+    } FrameContextData;
+
+// Extern to tables
+    extern const vp8_prob    VP8_Coefficient_Default_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1];
+    extern const vp8_prob    VP8_Coefficient_Update_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1];
+    extern const int         VP8_MB_FeatureDataBits[MB_LVL_MAX];
+    extern const vp8_prob    VP8_BMode_Const[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1];
+    extern const vp8_prob    VP8_YMode_Const[VP8_YMODES-1];
+    extern const vp8_prob    VP8_UVMode_Const[VP8_UV_MODES-1];
+    extern const vp8_prob    VP8_MV_UpdateProbs[2][VP8_MV_Pcount], VP8_MV_DefaultMVContext[2][VP8_MV_Pcount];
+
+    typedef struct
+    {
+        FRAME_TYPE            frame_type;
+        uint8_t               version;
+        uint8_t               show_frame;
+        uint32_t              first_part_size;
+    } FrameTagHeader;
+
+    typedef struct _vp8_Info
+    {
+        // Frame Tag Header
+        FrameTagHeader         frame_tag;
+
+        // Key Frame data
+        uint32_t               width;
+        uint32_t               height;
+        uint32_t               horiz_scale;
+        uint32_t               vert_scale;
+        YUV_TYPE               clr_type;
+        CLAMP_TYPE             clamp_type;
+
+        vp8_prob               prob_intra;
+        vp8_prob               prob_lf;
+        vp8_prob               prob_gf;
+
+        uint8_t                y_prob_valid;
+        uint8_t                c_prob_valid;
+
+        uint32_t               header_bits;
+        uint32_t               frame_data_offset;
+
+        uint8_t                *source;
+        uint32_t               source_sz;
+
+        // Decoded picture number
+        uint32_t               decoded_frame_number;
+
+        BOOL_CODER             bool_coder;
+
+        // Refresh flags
+        uint8_t                refresh_lf;
+
+        uint8_t                refresh_gf;
+        uint8_t                refresh_af;
+        uint8_t                sign_bias_golden;
+        uint8_t                sign_bias_alternate;
+
+        GoldenBufferCopiedType golden_copied;
+        AltRefBufferCopiedType altref_copied;
+
+        // Degmentation data
+        SegmentationData       Segmentation;
+
+        // Loop filter data
+        LoopFilterData         LoopFilter;
+
+        // Partitions
+        uint8_t                partition_count;
+        uint8_t                partition_number;
+        uint32_t               partition_size[1<<EIGHT_PARTITION];
+
+        // Quantization
+        QuantizationData       Quantization;
+
+        // Refresh entropy
+        uint8_t                refresh_entropy;
+        // Refresh entropy
+        uint8_t                refresh_entropy_lf;
+
+        // Macroblock No Coeff Skip
+        uint8_t                mb_no_coeff_skip;
+        vp8_prob               prob_skip_false;
+        vp8_prob               mb_skip_coeff;
+
+        // Frame context
+        FrameContextData       FrameContext;
+        // Same thing exist in the reference.
+        // The variable RefreshEntropy is controling storage/saving of that.
+        FrameContextData       LastFrameContext;
+    } vp8_Info;
+
+    typedef struct _vp8_viddec_parser
+    {
+        int got_start;
+
+        vp8_Info info;
+    } vp8_viddec_parser;
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/mixvbp/vbp_plugin/vp8/include/vp8_tables.h b/mixvbp/vbp_plugin/vp8/include/vp8_tables.h
new file mode 100755
index 0000000..6980834
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/include/vp8_tables.h
@@ -0,0 +1,538 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef _VP8_TABLES_H_
+#define _VP8_TABLES_H_
+
+#include "vp8.h"
+const int VP8_MB_FeatureDataBits[MB_LVL_MAX] = {7, 6};
+
+const vp8_prob VP8_BMode_Const[VP8_BINTRAMODES][VP8_BINTRAMODES][VP8_BINTRAMODES-1] =
+{
+    {
+        { 231, 120, 48, 89, 115, 113, 120, 152, 112},
+        { 152, 179, 64, 126, 170, 118, 46, 70, 95},
+        { 175, 69, 143, 80, 85, 82, 72, 155, 103},
+        { 56, 58, 10, 171, 218, 189, 17, 13, 152},
+        { 144, 71, 10, 38, 171, 213, 144, 34, 26},
+        { 114, 26, 17, 163, 44, 195, 21, 10, 173},
+        { 121, 24, 80, 195, 26, 62, 44, 64, 85},
+        { 170, 46, 55, 19, 136, 160, 33, 206, 71},
+        { 63, 20, 8, 114, 114, 208, 12, 9, 226},
+        { 81, 40, 11, 96, 182, 84, 29, 16, 36}
+    },
+    {
+        { 134, 183, 89, 137, 98, 101, 106, 165, 148},
+        { 72, 187, 100, 130, 157, 111, 32, 75, 80},
+        { 66, 102, 167, 99, 74, 62, 40, 234, 128},
+        { 41, 53, 9, 178, 241, 141, 26, 8, 107},
+        { 104, 79, 12, 27, 217, 255, 87, 17, 7},
+        { 74, 43, 26, 146, 73, 166, 49, 23, 157},
+        { 65, 38, 105, 160, 51, 52, 31, 115, 128},
+        { 87, 68, 71, 44, 114, 51, 15, 186, 23},
+        { 47, 41, 14, 110, 182, 183, 21, 17, 194},
+        { 66, 45, 25, 102, 197, 189, 23, 18, 22}
+    },
+    {
+        { 88, 88, 147, 150, 42, 46, 45, 196, 205},
+        { 43, 97, 183, 117, 85, 38, 35, 179, 61},
+        { 39, 53, 200, 87, 26, 21, 43, 232, 171},
+        { 56, 34, 51, 104, 114, 102, 29, 93, 77},
+        { 107, 54, 32, 26, 51, 1, 81, 43, 31},
+        { 39, 28, 85, 171, 58, 165, 90, 98, 64},
+        { 34, 22, 116, 206, 23, 34, 43, 166, 73},
+        { 68, 25, 106, 22, 64, 171, 36, 225, 114},
+        { 34, 19, 21, 102, 132, 188, 16, 76, 124},
+        { 62, 18, 78, 95, 85, 57, 50, 48, 51}
+    },
+    {
+        { 193, 101, 35, 159, 215, 111, 89, 46, 111},
+        { 60, 148, 31, 172, 219, 228, 21, 18, 111},
+        { 112, 113, 77, 85, 179, 255, 38, 120, 114},
+        { 40, 42, 1, 196, 245, 209, 10, 25, 109},
+        { 100, 80, 8, 43, 154, 1, 51, 26, 71},
+        { 88, 43, 29, 140, 166, 213, 37, 43, 154},
+        { 61, 63, 30, 155, 67, 45, 68, 1, 209},
+        { 142, 78, 78, 16, 255, 128, 34, 197, 171},
+        { 41, 40, 5, 102, 211, 183, 4, 1, 221},
+        { 51, 50, 17, 168, 209, 192, 23, 25, 82}
+    },
+    {
+        { 125, 98, 42, 88, 104, 85, 117, 175, 82},
+        { 95, 84, 53, 89, 128, 100, 113, 101, 45},
+        { 75, 79, 123, 47, 51, 128, 81, 171, 1},
+        { 57, 17, 5, 71, 102, 57, 53, 41, 49},
+        { 115, 21, 2, 10, 102, 255, 166, 23, 6},
+        { 38, 33, 13, 121, 57, 73, 26, 1, 85},
+        { 41, 10, 67, 138, 77, 110, 90, 47, 114},
+        { 101, 29, 16, 10, 85, 128, 101, 196, 26},
+        { 57, 18, 10, 102, 102, 213, 34, 20, 43},
+        { 117, 20, 15, 36, 163, 128, 68, 1, 26}
+    },
+    {
+        { 138, 31, 36, 171, 27, 166, 38, 44, 229},
+        { 67, 87, 58, 169, 82, 115, 26, 59, 179},
+        { 63, 59, 90, 180, 59, 166, 93, 73, 154},
+        { 40, 40, 21, 116, 143, 209, 34, 39, 175},
+        { 57, 46, 22, 24, 128, 1, 54, 17, 37},
+        { 47, 15, 16, 183, 34, 223, 49, 45, 183},
+        { 46, 17, 33, 183, 6, 98, 15, 32, 183},
+        { 65, 32, 73, 115, 28, 128, 23, 128, 205},
+        { 40, 3, 9, 115, 51, 192, 18, 6, 223},
+        { 87, 37, 9, 115, 59, 77, 64, 21, 47}
+    },
+    {
+        { 104, 55, 44, 218, 9, 54, 53, 130, 226},
+        { 64, 90, 70, 205, 40, 41, 23, 26, 57},
+        { 54, 57, 112, 184, 5, 41, 38, 166, 213},
+        { 30, 34, 26, 133, 152, 116, 10, 32, 134},
+        { 75, 32, 12, 51, 192, 255, 160, 43, 51},
+        { 39, 19, 53, 221, 26, 114, 32, 73, 255},
+        { 31, 9, 65, 234, 2, 15, 1, 118, 73},
+        { 88, 31, 35, 67, 102, 85, 55, 186, 85},
+        { 56, 21, 23, 111, 59, 205, 45, 37, 192},
+        { 55, 38, 70, 124, 73, 102, 1, 34, 98}
+    },
+    {
+        { 102, 61, 71, 37, 34, 53, 31, 243, 192},
+        { 69, 60, 71, 38, 73, 119, 28, 222, 37},
+        { 68, 45, 128, 34, 1, 47, 11, 245, 171},
+        { 62, 17, 19, 70, 146, 85, 55, 62, 70},
+       { 75, 15, 9, 9, 64, 255, 184, 119, 16},
+        { 37, 43, 37, 154, 100, 163, 85, 160, 1},
+        { 63, 9, 92, 136, 28, 64, 32, 201, 85},
+        { 86, 6, 28, 5, 64, 255, 25, 248, 1},
+        { 56, 8, 17, 132, 137, 255, 55, 116, 128},
+        { 58, 15, 20, 82, 135, 57, 26, 121, 40}
+    },
+    {
+        { 164, 50, 31, 137, 154, 133, 25, 35, 218},
+        { 51, 103, 44, 131, 131, 123, 31, 6, 158},
+        { 86, 40, 64, 135, 148, 224, 45, 183, 128},
+        { 22, 26, 17, 131, 240, 154, 14, 1, 209},
+        { 83, 12, 13, 54, 192, 255, 68, 47, 28},
+        { 45, 16, 21, 91, 64, 222, 7, 1, 197},
+        { 56, 21, 39, 155, 60, 138, 23, 102, 213},
+        { 85, 26, 85, 85, 128, 128, 32, 146, 171},
+        { 18, 11, 7, 63, 144, 171, 4, 4, 246},
+        { 35, 27, 10, 146, 174, 171, 12, 26, 128}
+    },
+    {
+        { 190, 80, 35, 99, 180, 80, 126, 54, 45},
+        { 85, 126, 47, 87, 176, 51, 41, 20, 32},
+        { 101, 75, 128, 139, 118, 146, 116, 128, 85},
+        { 56, 41, 15, 176, 236, 85, 37, 9, 62},
+        { 146, 36, 19, 30, 171, 255, 97, 27, 20},
+        { 71, 30, 17, 119, 118, 255, 17, 18, 138},
+        { 101, 38, 60, 138, 55, 70, 43, 26, 142},
+        { 138, 45, 61, 62, 219, 1, 81, 188, 64},
+        { 32, 41, 20, 117, 151, 142, 20, 21, 163},
+        { 112, 19, 12, 61, 195, 128, 48, 4, 24}
+    }
+};
+
+const vp8_prob VP8_Coefficient_Update_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1] =
+{
+    {
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {176, 246, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {223, 241, 252, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {249, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 244, 252, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {234, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 246, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {239, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 248, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {251, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {251, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 253, 255, 254, 255, 255, 255, 255, 255, 255, },
+            {250, 255, 254, 255, 254, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+    },
+    {
+        {
+            {217, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {225, 252, 241, 253, 255, 255, 254, 255, 255, 255, 255, },
+            {234, 250, 241, 250, 253, 255, 253, 254, 255, 255, 255, },
+        },
+        {
+            {255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {223, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {238, 253, 254, 254, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 248, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {249, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 253, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {247, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {252, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {253, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+    },
+    {
+        {
+            {186, 251, 250, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {234, 251, 244, 254, 255, 255, 255, 255, 255, 255, 255, },
+            {251, 251, 243, 253, 254, 255, 254, 255, 255, 255, 255, },
+        },
+        {
+            {255, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {236, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {251, 253, 253, 254, 254, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 254, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+    },
+    {
+        {
+            {248, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {250, 254, 252, 254, 255, 255, 255, 255, 255, 255, 255, },
+            {248, 254, 249, 253, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {246, 253, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {252, 254, 251, 254, 254, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 254, 252, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {248, 254, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {253, 255, 254, 254, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 251, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {245, 251, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {253, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 251, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {252, 253, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 254, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 252, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {249, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 254, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 253, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {250, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+        {
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {254, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+            {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, },
+        },
+    },
+};
+
+const vp8_prob VP8_MV_UpdateProbs[2][VP8_MV_Pcount] =
+{
+    {
+            237,
+            246,
+            253, 253, 254, 254, 254, 254, 254,
+            254, 254, 254, 254, 254, 250, 250, 252, 254, 254
+    },
+    {
+            231,
+            243,
+            245, 253, 254, 254, 254, 254, 254,
+            254, 254, 254, 254, 254, 251, 251, 254, 254, 254
+    }
+};
+
+const vp8_prob VP8_MV_DefaultMVContext[2][VP8_MV_Pcount] =
+{
+    {
+            // row
+            162,                                        // is short
+            128,                                        // sign
+            225, 146, 172, 147, 214,  39, 156,          // short tree
+            128, 129, 132,  75, 145, 178, 206, 239, 254, 254 // long bits
+    },
+    {
+            // same for column
+            164,                                        // is short
+            128,
+            204, 170, 119, 235, 140, 230, 228,
+            128, 130, 130,  74, 148, 180, 203, 236, 254, 254 // long bits
+    }
+};
+
+const vp8_prob VP8_Coefficient_Default_Probabilites[BLOCK_TYPES] [COEF_BANDS] [PREV_COEF_CONTEXTS] [MAX_COEF_TOKENS-1] =
+{
+    {
+        {
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128}
+        },
+        {
+            { 253, 136, 254, 255, 228, 219, 128, 128, 128, 128, 128},
+            { 189, 129, 242, 255, 227, 213, 255, 219, 128, 128, 128},
+            { 106, 126, 227, 252, 214, 209, 255, 255, 128, 128, 128}
+        },
+        {
+            { 1, 98, 248, 255, 236, 226, 255, 255, 128, 128, 128},
+            { 181, 133, 238, 254, 221, 234, 255, 154, 128, 128, 128},
+            { 78, 134, 202, 247, 198, 180, 255, 219, 128, 128, 128}
+        },
+        {
+            { 1, 185, 249, 255, 243, 255, 128, 128, 128, 128, 128},
+            { 184, 150, 247, 255, 236, 224, 128, 128, 128, 128, 128},
+            { 77, 110, 216, 255, 236, 230, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 101, 251, 255, 241, 255, 128, 128, 128, 128, 128},
+            { 170, 139, 241, 252, 236, 209, 255, 255, 128, 128, 128},
+            { 37, 116, 196, 243, 228, 255, 255, 255, 128, 128, 128}
+        },
+        {
+            { 1, 204, 254, 255, 245, 255, 128, 128, 128, 128, 128},
+            { 207, 160, 250, 255, 238, 128, 128, 128, 128, 128, 128},
+            { 102, 103, 231, 255, 211, 171, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 152, 252, 255, 240, 255, 128, 128, 128, 128, 128},
+            { 177, 135, 243, 255, 234, 225, 128, 128, 128, 128, 128},
+            { 80, 129, 211, 255, 194, 224, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 246, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 255, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128}
+        }
+    },
+    {
+        {
+            { 198, 35, 237, 223, 193, 187, 162, 160, 145, 155, 62},
+            { 131, 45, 198, 221, 172, 176, 220, 157, 252, 221, 1},
+            { 68, 47, 146, 208, 149, 167, 221, 162, 255, 223, 128}
+        },
+        {
+            { 1, 149, 241, 255, 221, 224, 255, 255, 128, 128, 128},
+            { 184, 141, 234, 253, 222, 220, 255, 199, 128, 128, 128},
+            { 81, 99, 181, 242, 176, 190, 249, 202, 255, 255, 128}
+        },
+        {
+            { 1, 129, 232, 253, 214, 197, 242, 196, 255, 255, 128},
+            { 99, 121, 210, 250, 201, 198, 255, 202, 128, 128, 128},
+            { 23, 91, 163, 242, 170, 187, 247, 210, 255, 255, 128}
+        },
+        {
+            { 1, 200, 246, 255, 234, 255, 128, 128, 128, 128, 128},
+            { 109, 178, 241, 255, 231, 245, 255, 255, 128, 128, 128},
+            { 44, 130, 201, 253, 205, 192, 255, 255, 128, 128, 128}
+        },
+        {
+            { 1, 132, 239, 251, 219, 209, 255, 165, 128, 128, 128},
+            { 94, 136, 225, 251, 218, 190, 255, 255, 128, 128, 128},
+            { 22, 100, 174, 245, 186, 161, 255, 199, 128, 128, 128}
+        },
+        {
+            { 1, 182, 249, 255, 232, 235, 128, 128, 128, 128, 128},
+            { 124, 143, 241, 255, 227, 234, 128, 128, 128, 128, 128},
+            { 35, 77, 181, 251, 193, 211, 255, 205, 128, 128, 128}
+        },
+        {
+            { 1, 157, 247, 255, 236, 231, 255, 255, 128, 128, 128},
+            { 121, 141, 235, 255, 225, 227, 255, 255, 128, 128, 128},
+            { 45, 99, 188, 251, 195, 217, 255, 224, 128, 128, 128}
+        },
+        {
+            { 1, 1, 251, 255, 213, 255, 128, 128, 128, 128, 128},
+            { 203, 1, 248, 255, 255, 128, 128, 128, 128, 128, 128},
+            { 137, 1, 177, 255, 224, 255, 128, 128, 128, 128, 128}
+        }
+    },
+    {
+        {
+            { 253, 9, 248, 251, 207, 208, 255, 192, 128, 128, 128},
+            { 175, 13, 224, 243, 193, 185, 249, 198, 255, 255, 128},
+            { 73, 17, 171, 221, 161, 179, 236, 167, 255, 234, 128}
+        },
+        {
+            { 1, 95, 247, 253, 212, 183, 255, 255, 128, 128, 128},
+            { 239, 90, 244, 250, 211, 209, 255, 255, 128, 128, 128},
+            { 155, 77, 195, 248, 188, 195, 255, 255, 128, 128, 128}
+        },
+        {
+            { 1, 24, 239, 251, 218, 219, 255, 205, 128, 128, 128},
+            { 201, 51, 219, 255, 196, 186, 128, 128, 128, 128, 128},
+            { 69, 46, 190, 239, 201, 218, 255, 228, 128, 128, 128}
+        },
+        {
+            { 1, 191, 251, 255, 255, 128, 128, 128, 128, 128, 128},
+            { 223, 165, 249, 255, 213, 255, 128, 128, 128, 128, 128},
+            { 141, 124, 248, 255, 255, 128, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 16, 248, 255, 255, 128, 128, 128, 128, 128, 128},
+            { 190, 36, 230, 255, 236, 255, 128, 128, 128, 128, 128},
+            { 149, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 226, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 247, 192, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 240, 128, 255, 128, 128, 128, 128, 128, 128, 128, 128}
+        },
+        {
+            { 1, 134, 252, 255, 255, 128, 128, 128, 128, 128, 128},
+            { 213, 62, 250, 255, 255, 128, 128, 128, 128, 128, 128},
+            { 55, 93, 255, 128, 128, 128, 128, 128, 128, 128, 128}
+        },
+        {
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128}
+        }
+    },
+    {
+        {
+            { 202, 24, 213, 235, 186, 191, 220, 160, 240, 175, 255},
+            { 126, 38, 182, 232, 169, 184, 228, 174, 255, 187, 128},
+            { 61, 46, 138, 219, 151, 178, 240, 170, 255, 216, 128}
+        },
+        {
+            { 1, 112, 230, 250, 199, 191, 247, 159, 255, 255, 128},
+            { 166, 109, 228, 252, 211, 215, 255, 174, 128, 128, 128},
+            { 39, 77, 162, 232, 172, 180, 245, 178, 255, 255, 128}
+        },
+        {
+            { 1, 52, 220, 246, 198, 199, 249, 220, 255, 255, 128},
+            { 124, 74, 191, 243, 183, 193, 250, 221, 255, 255, 128},
+            { 24, 71, 130, 219, 154, 170, 243, 182, 255, 255, 128}
+        },
+        {
+            { 1, 182, 225, 249, 219, 240, 255, 224, 128, 128, 128},
+            { 149, 150, 226, 252, 216, 205, 255, 171, 128, 128, 128},
+            { 28, 108, 170, 242, 183, 194, 254, 223, 255, 255, 128}
+        },
+        {
+            { 1, 81, 230, 252, 204, 203, 255, 192, 128, 128, 128},
+            { 123, 102, 209, 247, 188, 196, 255, 233, 128, 128, 128},
+            { 20, 95, 153, 243, 164, 173, 255, 203, 128, 128, 128}
+        },
+        {
+            { 1, 222, 248, 255, 216, 213, 128, 128, 128, 128, 128},
+            { 168, 175, 246, 252, 235, 205, 255, 255, 128, 128, 128},
+            { 47, 116, 215, 255, 211, 212, 255, 255, 128, 128, 128}
+        },
+        {
+            { 1, 121, 236, 253, 212, 214, 255, 255, 128, 128, 128},
+            { 141, 84, 213, 252, 201, 202, 255, 219, 128, 128, 128},
+            { 42, 80, 160, 240, 162, 185, 255, 205, 128, 128, 128}
+        },
+        {
+            { 1, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 244, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128},
+            { 238, 1, 255, 128, 128, 128, 128, 128, 128, 128, 128}
+        }
+    }
+};
+
+const vp8_prob VP8_YMode_Const[VP8_YMODES-1] = {112,86,140,37};
+
+const vp8_prob VP8_UVMode_Const[VP8_UV_MODES-1] = {162, 101,204};
+
+#endif
diff --git a/mixvbp/vbp_plugin/vp8/include/vp8parse.h b/mixvbp/vbp_plugin/vp8/include/vp8parse.h
new file mode 100755
index 0000000..a5c9c13
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/include/vp8parse.h
@@ -0,0 +1,72 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#ifndef _VP8PARSE_H_
+#define _VP8PARSE_H_
+
+#include "vp8.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+int32_t vp8_parse_frame_tag(FrameTagHeader *frame_tag, uint8_t *data, uint32_t data_sz);
+
+//vp8_Status vp8_translate_parse_status(vp8_Status status);
+
+void vp8_init_Info(vp8_Info *pi);
+
+void vp8_init_frame(vp8_Info *pi);
+
+void vp8_parse_segmentation_adjustments_data(vp8_Info *pi);
+
+void vp8_parse_loop_filter_type_level(vp8_Info *pi);
+
+void vp8_parse_loop_filter_adjustments_data(vp8_Info *pi);
+
+int32_t vp8_read_partition_size(uint8_t *cx_size);
+
+void vp8_parse_token_partition_data(vp8_Info *pi, uint8_t *cx_size);
+
+int read_q_delta(BOOL_CODER *bool_coder);
+
+void vp8_parse_dequantization_indices(vp8_Info *pi);
+
+void vp8_parse_gf_af_refresh_flags(vp8_Info *pi);
+
+void vp8_parse_coef_probs_tree(vp8_Info *pi);
+
+void vp8_parse_mb_mv_info(vp8_Info *pi);
+
+void vp8_parse_yuv_probs_update(vp8_Info *pi);
+
+void vp8_parse_remaining_frame_header_data(vp8_Info *pi);
+
+int32_t vp8_parse_frame_header(vp8_viddec_parser *parser);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c
new file mode 100755
index 0000000..03726c6
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/viddec_vp8_parse.c
@@ -0,0 +1,119 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include "viddec_parser_ops.h"
+
+#include "viddec_fw_workload.h"
+#include "viddec_pm.h"
+
+#include <utils/Log.h>
+#include "vp8.h"
+#include "vp8parse.h"
+
+/* Init function which can be called to intialized local context on open and flush and preserve*/
+void viddec_vp8_init(void *ctxt, uint32_t *persist_mem, uint32_t preserve)
+{
+    vp8_viddec_parser* parser = ctxt;
+    vp8_Info *pi = &(parser->info);
+
+    /* Avoid compiler warning */
+    persist_mem = persist_mem;
+
+    if (!preserve)
+    {
+        /* Init frame header information */
+        vp8_init_Info(pi);
+    }
+    else
+    {
+       /* Initialise the parser */
+       pi->decoded_frame_number = 0;
+       pi->refresh_entropy_lf = 1;
+    }
+
+    parser->got_start = 1;
+    return;
+}
+
+uint32_t viddec_vp8_parse(void *parent, void *ctxt)
+{
+    vp8_Status status = VP8_NO_ERROR;
+
+    vp8_viddec_parser *parser = (vp8_viddec_parser*)ctxt;
+    if (1 != parser->got_start) return VP8_NO_INITIALIZATION;
+
+    vp8_Info *pi = &(parser->info);
+    viddec_pm_cxt_t *pm_cxt = (viddec_pm_cxt_t *)parent;
+    pi->source = pm_cxt->parse_cubby.buf;
+    pi->source_sz = pm_cxt->parse_cubby.size;
+
+    if (pi->source_sz < 0)
+    {
+        return VP8_UNEXPECTED_END_OF_BITSTREAM;
+    }
+    else if (pi->source_sz == 0)
+    {
+        pi->frame_tag.frame_type = SKIPPED_FRAME;
+        status = VP8_NO_ERROR;
+    }
+    else if (pi->source_sz > 0)
+    {
+        status = vp8_parse_frame_header(parser);
+    }
+
+    return status;
+}
+
+uint32_t viddec_vp8_wkld_done(void *parent, void *ctxt, unsigned int next_sc,
+                              uint32_t *codec_specific_errors)
+{
+    return 0;
+}
+
+void viddec_vp8_get_context_size(viddec_parser_memory_sizes_t *size)
+{
+    /* Should return size of my structure */
+    size->context_size = sizeof(vp8_viddec_parser);
+    size->persist_size = 0;
+    return;
+}
+
+uint32_t viddec_vp8_is_frame_start(void *ctxt)
+{
+    vp8_viddec_parser* parser = ctxt;
+
+    return parser->got_start;
+}
+
+void viddec_vp8_get_ops(viddec_parser_ops_t *ops)
+{
+    ops->init = viddec_vp8_init;
+
+    ops->parse_syntax = viddec_vp8_parse;
+    ops->get_cxt_size = viddec_vp8_get_context_size;
+    ops->is_wkld_done = viddec_vp8_wkld_done;
+    ops->is_frame_start = viddec_vp8_is_frame_start;
+    return;
+}
diff --git a/mixvbp/vbp_plugin/vp8/vp8parse.c b/mixvbp/vbp_plugin/vp8/vp8parse.c
new file mode 100755
index 0000000..4f15736
--- /dev/null
+++ b/mixvbp/vbp_plugin/vp8/vp8parse.c
@@ -0,0 +1,605 @@
+/* INTEL CONFIDENTIAL
+* Copyright (c) 2012 Intel Corporation.  All rights reserved.
+* Copyright (c) Imagination Technologies Limited, UK
+*
+* The source code contained or described herein and all documents
+* related to the source code ("Material") are owned by Intel
+* Corporation or its suppliers or licensors.  Title to the
+* Material remains with Intel Corporation or its suppliers and
+* licensors.  The Material contains trade secrets and proprietary
+* and confidential information of Intel or its suppliers and
+* licensors. The Material is protected by worldwide copyright and
+* trade secret laws and treaty provisions.  No part of the Material
+* may be used, copied, reproduced, modified, published, uploaded,
+* posted, transmitted, distributed, or disclosed in any way without
+* Intel's prior express written permission.
+*
+* No license under any patent, copyright, trade secret or other
+* intellectual property right is granted to or conferred upon you
+* by disclosure or delivery of the Materials, either expressly, by
+* implication, inducement, estoppel or otherwise. Any license
+* under such intellectual property rights must be express and
+* approved by Intel in writing.
+*
+*/
+
+#include "vp8_tables.h"
+#include "vp8parse.h"
+
+static const uint8_t kVp8SyncCodeByte[] = {0x9d, 0x01, 0x2a};
+
+void vp8_init_Info(vp8_Info *pi)
+{
+    memset(pi, 0, sizeof(vp8_Info));
+
+    /* Initialise the parser */
+    pi->decoded_frame_number = 0;
+    pi->refresh_entropy_lf = 1;
+}
+
+int32_t vp8_parse_frame_tag(FrameTagHeader *frame_tag, uint8_t *data, uint32_t data_sz)
+{
+    if (data_sz < 3)
+    {
+        return VP8_CORRUPT_FRAME;
+    }
+
+    /* 1-bit frame type */
+    frame_tag->frame_type = (FRAME_TYPE)(data[0] & 1);
+
+    /* 3-bit version number */
+    frame_tag->version = (data[0] >> 1) & 7;
+    if (frame_tag->version > 3)
+    {
+        return VP8_UNSUPPORTED_VERSION ;
+    }
+
+    /* 1-bit show frame flag */
+    frame_tag->show_frame = (data[0] >> 4) & 1;
+
+    /* 19-bit field containing the sie of the first data partition in bytes */
+    frame_tag->first_part_size = (data[0] | (data[1] << 8) | (data[2] << 16)) >> 5;
+
+    return VP8_NO_ERROR;
+}
+
+void vp8_init_frame(vp8_Info *pi)
+{
+    pi->golden_copied = BufferCopied_NoneToGolden;
+    pi->altref_copied = BufferCopied_NoneToAltref;
+
+    if (pi->frame_tag.frame_type == KEY_FRAME)
+    {
+        /* Various keyframe initializations */
+        /* vp8_prob data initialization */
+        memcpy(pi->FrameContext.B_Mode_Prob, VP8_BMode_Const, sizeof(VP8_BMode_Const));
+        memcpy(pi->FrameContext.Y_Mode_Prob, VP8_YMode_Const, sizeof(VP8_YMode_Const));
+        memcpy(pi->FrameContext.UV_Mode_Prob, VP8_UVMode_Const, sizeof(VP8_UVMode_Const));
+        memcpy(pi->FrameContext.MVContext, VP8_MV_DefaultMVContext, sizeof(VP8_MV_DefaultMVContext));
+        memcpy(pi->FrameContext.DCT_Coefficients, VP8_Coefficient_Default_Probabilites, sizeof(VP8_Coefficient_Default_Probabilites));
+
+        /* reset the segment feature data to 0 with delta coding (Default state)*/
+        memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData));
+        pi->Segmentation.AbsDelta = SEGMENT_DELTADATA;
+
+        /* reset the mode ref deltasa for loop filter */
+        memset(pi->LoopFilter.DeltasRef, 0, sizeof(pi->LoopFilter.DeltasRef));
+        memset(pi->LoopFilter.DeltasMode, 0, sizeof(pi->LoopFilter.DeltasMode));
+
+        /* All buffers are implicitly updated on key frames */
+        pi->refresh_gf = 1;
+        pi->refresh_af = 1;
+
+        pi->sign_bias_golden = 0;
+        pi->sign_bias_alternate = 0;
+    }
+    else if (pi->frame_tag.frame_type == INTER_FRAME)
+    {
+        pi->refresh_gf = 0;
+        pi->refresh_af = 0;
+    }
+}
+
+/* This function provides vp8_prob and value infomation for implementing
+ * segment adaptive adjustments to default decoder behaviors.
+ * The data parsed here applies to the entire frame. The adjustments can be
+ * quantization level or loop filter strength.
+ * */
+void vp8_parse_segmentation_adjustments_data(vp8_Info *pi)
+{
+    int i,j;
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    const int *const mb_feature_data_bits = VP8_MB_FeatureDataBits;
+
+    /* Is segmentation enabled */
+    pi->Segmentation.Enabled = (uint8_t)vp8_decode_bool(bc, 128); //chapter 9.2 - macroblock uses segments ?  1: 0
+
+    if(pi->Segmentation.Enabled )
+    {
+        /* Signal whether or not the segmentation map is being explicitly updated this frame */
+        pi->Segmentation.UpdateMap = (uint8_t)vp8_decode_bool(bc, 128);
+        pi->Segmentation.UpdateData = (uint8_t)vp8_decode_bool(bc, 128);
+
+        if (pi->Segmentation.UpdateData)
+        {
+            pi->Segmentation.AbsDelta = (uint8_t)vp8_decode_bool(bc, 128);
+
+            memset(pi->Segmentation.FeatureData, 0, sizeof(pi->Segmentation.FeatureData));
+
+            /* For each segmentation feature (Quant and loop filter level) */
+            for (i = 0; i < MB_LVL_MAX; ++i)
+            {
+                for (j = 0; j < MAX_MB_SEGMENTS; ++j)
+                {
+                    /* Frame level data */
+                    if (vp8_decode_bool(bc, 128))
+                    {
+                        /* Parse magnitude */
+                        pi->Segmentation.FeatureData[i][j] = (int8_t) vp8_read_bits(bc, mb_feature_data_bits[i]) ;
+
+                        /* Parse sign data */
+                        if (vp8_decode_bool(bc, 128))
+                        {
+                            pi->Segmentation.FeatureData[i][j] = -pi->Segmentation.FeatureData[i][j];
+                        }
+                    }
+                    else
+                    {
+                        pi->Segmentation.FeatureData[i][j] = 0;
+                    }
+                }
+            }
+
+        }
+
+        if (pi->Segmentation.UpdateMap)
+        {
+            /* Which macro block level features are enabled */
+            memset(pi->Segmentation.TreeProbs, 255, sizeof(pi->Segmentation.TreeProbs));
+
+            /* Read the probs used to decode the segment id for each macro block */
+            for (i = 0; i < MB_FEATURE_TREE_PROBS; ++i)
+            {
+                /* If not explicitly set value is defaulted to 255 by memset above */
+                if (vp8_decode_bool(bc, 128))
+                {
+                    pi->Segmentation.TreeProbs[i] = (uint8_t)vp8_read_bits(bc, 8);
+                }
+            }
+        }
+    }
+}
+
+/* VP8 supprots two types of loop filter. The data parsed in the header
+ * to support the selection of the type, strength and sharpness behavior
+ * of the loop filter used for the current frame.
+ */
+void vp8_parse_loop_filter_type_level(vp8_Info *pi)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* Read the loop filter level and type */
+    pi->LoopFilter.Type = (LoopFilterType)vp8_decode_bool(bc, 128);
+    pi->LoopFilter.Level = (uint8_t)vp8_read_bits(bc, 6);
+    pi->LoopFilter.Sharpness = (uint8_t)vp8_read_bits(bc, 3);
+}
+
+/* This function provides flag and value information for implmenting
+ * per-macroblock loop filter level adjustments to default decoder
+ * behaviors. Data parsed here applies to the entire frame.
+ */
+void vp8_parse_loop_filter_adjustments_data(vp8_Info *pi)
+{
+    int i;
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* Read in loop filter deltas applied at the MB level based on mode or ref frame */
+    pi->LoopFilter.DeltaUpdate = 0;
+    pi->LoopFilter.DeltaEnabled =  (uint8_t)vp8_decode_bool(bc, 128);
+
+    if (pi->LoopFilter.DeltaEnabled)
+    {
+        /* Do the deltas need to be updated */
+        pi->LoopFilter.DeltaUpdate = (uint8_t)vp8_decode_bool(bc, 128);
+
+        if (pi->LoopFilter.DeltaUpdate)
+        {
+            /* Update based on reference */
+            for (i = 0; i < MAX_REF_LF_DELTAS; ++i)
+            {
+                if (vp8_decode_bool(bc, 128))
+                {
+                    pi->LoopFilter.DeltasRef[i] = (int8_t)vp8_read_bits(bc, 6);
+
+                    /* Parse sign */
+                    if (vp8_decode_bool(bc, 128))
+                    {
+                        pi->LoopFilter.DeltasRef[i] = -1 * pi->LoopFilter.DeltasRef[i];
+                    }
+                }
+            }
+
+            /* Update based on macroblock mode */
+            for (i = 0; i < MAX_MODE_LF_DELTAS; ++i)
+            {
+                if (vp8_decode_bool(bc, 128))
+                {
+                    pi->LoopFilter.DeltasMode[i] = (int8_t)vp8_read_bits(bc, 6);
+
+                    /* Parse sign */
+                    if (vp8_decode_bool(bc, 128))
+                    {
+                        pi->LoopFilter.DeltasMode[i] = -1 * pi->LoopFilter.DeltasMode[i];
+                    }
+                }
+            } /* End for (i = 0; i < MAX_MODE_LF_DELTAS; ++i) */
+        } /* End if (pi->LoopFilter.DeltaUpdate) */
+    }
+}
+
+/* Token partition and partition data offsets */
+void vp8_parse_token_partition_data(vp8_Info *pi, uint8_t *cx_size)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+    uint8_t *partition = NULL;
+    uint8_t *source_end = pi->source + pi->source_sz;
+    uint32_t partition_size = 0, i = 0;
+    uint8_t *partition_size_ptr = NULL;
+
+    /* Parse number of token partitions to use */
+    pi->partition_count = 1 << (uint8_t)vp8_read_bits(bc, 2);
+
+    /* Set up pointers to the first partition */
+    partition = cx_size;
+    if (pi->partition_count > 1)
+    {
+        /* Each partition offset is written in 3 bytes */
+        partition += 3 * (pi->partition_count - 1);
+    }
+
+    for (i = 0; i < pi->partition_count; i++)
+    {
+        partition_size_ptr = cx_size + i * 3;
+
+        if (i < pi->partition_count - 1)
+        {
+            pi->partition_size[i] = vp8_read_partition_size(partition_size_ptr);
+        }
+        else
+        {
+            /* Last offset can be calculated implictly */
+            pi->partition_size[i] = source_end - partition;
+        }
+
+        partition += pi->partition_size[i];
+    }
+}
+
+int32_t vp8_read_partition_size(uint8_t  *cx_size)
+{
+    uint32_t size = cx_size[0] + (cx_size[1] << 8) + (cx_size[2] << 16);
+
+    return size;
+}
+
+int read_q_delta(BOOL_CODER   *bool_coder)
+{
+    int q_delta = 0;
+
+    /* presence flag */
+    if (vp8_decode_bool(bool_coder, 128))
+    {
+        /* magnitude */
+        q_delta = (uint8_t)vp8_read_bits(bool_coder, 4) ;
+
+        /* sign */
+        if (vp8_decode_bool(bool_coder, 128))
+        {
+            q_delta = -q_delta;
+        }
+    }
+
+    return q_delta;
+}
+
+/* Read the default quantizers */
+void vp8_parse_dequantization_indices(vp8_Info *pi)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* AC 1st order Q = default as a baseline for other 5 items */
+    pi->Quantization.Y1_AC       = (int8_t)vp8_read_bits(bc, 7);
+    pi->Quantization.Y1_DC_Delta = (int8_t)read_q_delta(bc);
+    pi->Quantization.Y2_DC_Delta = (int8_t)read_q_delta(bc);
+    pi->Quantization.Y2_AC_Delta = (int8_t)read_q_delta(bc);
+    pi->Quantization.UV_DC_Delta = (int8_t)read_q_delta(bc);
+    pi->Quantization.UV_AC_Delta = (int8_t)read_q_delta(bc);
+}
+
+
+/* Determine if the golden frame or ARF buffer should be updated and how.
+ * For all non key frames the GF and ARF refresh flags and sign bias
+ * flags must be set explicitly.
+ */
+void vp8_parse_gf_af_refresh_flags(vp8_Info *pi)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* Read Golden and AltRef frame refresh */
+    pi->refresh_gf = (uint8_t)vp8_decode_bool(bc, 128);
+    pi->refresh_af = (uint8_t)vp8_decode_bool(bc, 128);
+
+    /* If not refreshed using the current reconstructed frame */
+    if (0 == pi->refresh_gf)
+    {
+        /* 2 bit indicating which buffer is copied to golden frame */
+        pi->golden_copied = (GoldenBufferCopiedType)(int8_t)vp8_read_bits(bc, 2);
+    }
+    else
+    {
+        /* No buffer is copied */
+        pi->golden_copied = (GoldenBufferCopiedType)0;
+    }
+
+    if (0 == pi->refresh_af)
+    {
+        /* 2 bit indicating which buffer is copied to alternative frame */
+        pi->altref_copied = (AltRefBufferCopiedType)vp8_read_bits(bc, 2);
+    }
+    else
+    {
+        pi->altref_copied = (AltRefBufferCopiedType)0;
+    }
+
+    pi->sign_bias_golden = (uint8_t)vp8_decode_bool(bc, 128);
+    pi->sign_bias_alternate = (uint8_t)vp8_decode_bool(bc, 128);
+
+}
+
+void vp8_parse_coef_probs_tree(vp8_Info *pi)
+{
+    int i, j, k, l;
+
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* DCT coeffienct probability tree update */
+    for (i = 0; i < BLOCK_TYPES; i++)
+    {
+        for (j = 0; j < COEF_BANDS; j++)
+        {
+            for (k = 0; k < PREV_COEF_CONTEXTS; k++)
+            {
+                for (l = 0; l < MAX_COEF_TOKENS - 1; l++)
+                {
+                    if (vp8_decode_bool(bc, VP8_Coefficient_Update_Probabilites[i][j][k][l]))
+                    {
+                        pi->FrameContext.DCT_Coefficients[i][j][k][l] = (vp8_prob)vp8_read_bits(bc, 8);
+                    }
+                }
+            }
+        }
+    }
+}
+
+/* Parse remaining non-key-frame only data from frame header */
+void vp8_parse_mb_mv_info(vp8_Info *pi)
+{
+    // read_mvcontexts
+    int i = 0;
+
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    do
+    {
+        const vp8_prob *up = VP8_MV_UpdateProbs[i];
+        vp8_prob *p = pi->FrameContext.MVContext[i];
+        vp8_prob *const pstop = p + VP8_MV_Pcount;
+
+        do
+        {
+            if (vp8_decode_bool(bc , *up++ ))
+            {
+                const vp8_prob x = (vp8_prob)vp8_read_bits(bc, 7);
+
+                *p = x ? x << 1 : 1;
+            }
+        }
+        while (++p < pstop);
+    }
+    while (++i < 2);
+}
+
+/* Parse remaining non-key-frame only data from frame header */
+void vp8_parse_yuv_probs_update(vp8_Info *pi)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* Read probabilities */
+    pi->prob_intra = (vp8_prob)vp8_read_bits(bc, 8);
+    pi->prob_lf = (vp8_prob)vp8_read_bits(bc, 8);
+    pi->prob_gf = (vp8_prob)vp8_read_bits(bc, 8);
+
+    pi->y_prob_valid = (uint8_t)vp8_decode_bool(bc , 128);
+    if (1 == pi->y_prob_valid)
+    {
+        pi->FrameContext.Y_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8);
+        pi->FrameContext.Y_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8);
+        pi->FrameContext.Y_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8);
+        pi->FrameContext.Y_Mode_Prob[3] = (vp8_prob)vp8_read_bits(bc, 8);
+    }
+
+    pi->c_prob_valid = (uint8_t)vp8_decode_bool(bc , 128);
+    if (1 == pi->c_prob_valid)
+    {
+        pi->FrameContext.UV_Mode_Prob[0] = (vp8_prob)vp8_read_bits(bc, 8);
+        pi->FrameContext.UV_Mode_Prob[1] = (vp8_prob)vp8_read_bits(bc, 8);
+        pi->FrameContext.UV_Mode_Prob[2] = (vp8_prob)vp8_read_bits(bc, 8);
+    }
+}
+
+
+void vp8_parse_remaining_frame_header_data(vp8_Info *pi)
+{
+    BOOL_CODER *bc = &(pi->bool_coder);
+
+    /* MB no coefficients skip */
+    pi->mb_no_coeff_skip = (uint8_t)vp8_decode_bool(bc, 128);
+
+    if (1 == pi->mb_no_coeff_skip)
+    {
+        pi->prob_skip_false = (vp8_prob)vp8_read_bits(bc, 8);
+    }
+    else
+    {
+        pi->mb_skip_coeff = 0;
+    }
+
+    if (pi->frame_tag.frame_type == INTER_FRAME)
+    {
+        vp8_parse_yuv_probs_update(pi);
+
+        /* Read motion vector info */
+        vp8_parse_mb_mv_info(pi);
+    }
+
+}
+
+#if 0
+vp8_Status vp8_translate_parse_status(vp8_Status status)
+{
+    switch (status)
+    {
+    case VP8_UNSUPPORTED_VERSION:
+        LOGE("Parser returned VP8_UNSUPPORTED_VERSION");
+       return VP8_UNSUPPORTED_VERSION;
+    case VP8_UNSUPPORTED_BITSTREAM:
+        LOGE("Parser returned VP8_UNSUPPORTED_BITSTREAM");
+        return VP8_UNSUPPORTED_BITSTREAM;
+    case VP8_INVALID_FRAME_SYNC_CODE:
+        LOGE("Parser returned VP8_INVALID_FRAME_SYNC_CODE");
+        return VP8_INVALID_FRAME_SYNC_CODE;
+    case VP8_UNEXPECTED_END_OF_BITSTREAM:
+        LOGE("Parser returned VP8_UNEXPECTED_END_OF_BITSTREAM");
+        return VP8_UNEXPECTED_END_OF_BITSTREAM;
+    default:
+        LOGE("Parser returned VP8_UNKNOWN_ERROR");
+        return VP8_UNKNOWN_ERROR;
+    }
+}
+#endif
+
+/* Parse VP8 frame header */
+int32_t vp8_parse_frame_header(vp8_viddec_parser *parser)
+{
+    vp8_Status ret = VP8_NO_ERROR;
+
+    vp8_Info *pi = &(parser->info);
+
+    uint8_t *data = pi->source;
+    uint32_t data_sz = pi->source_sz;
+
+    if (0 == pi->refresh_entropy_lf)
+    {
+        memcpy(&(pi->FrameContext), &(pi->LastFrameContext), sizeof(FrameContextData));
+    }
+
+    /* Step 1 : parse frame tag containing 3 bytes*/
+    ret = vp8_parse_frame_tag(&(pi->frame_tag), data, data_sz);
+    if (ret != VP8_NO_ERROR)
+    {
+        return ret;
+    }
+
+    /* Pointer advances 3 bytes */
+    data += 3;
+
+    /* Start the frame data offset */
+    pi->frame_data_offset = 3;
+
+    /* Step 2 : parse key frame parameters*/
+    if (pi->frame_tag.frame_type == KEY_FRAME)
+    {
+        /* Check sync code containg 3 bytes*/
+        if ((data[0] != kVp8SyncCodeByte[0]) || (data[1] != kVp8SyncCodeByte[1]) || (data[2] != kVp8SyncCodeByte[2]))
+        {
+            return VP8_INVALID_FRAME_SYNC_CODE;
+        }
+
+        pi->width = (data[3] | (data[4] << 8)) & 0x3fff;
+        pi->horiz_scale = data[4] >> 6;
+        pi->height = (data[5] | (data[6] << 8)) & 0x3fff;
+        pi->vert_scale = data[6] >> 6;
+
+        /* Pointer advances 7 bytes in this case*/
+        data += 7;
+        pi->frame_data_offset += 7;
+    }
+
+    if (0 == pi->width || 0 == pi->height)
+    {
+        return VP8_UNSUPPORTED_BITSTREAM;
+    }
+
+    /* Initialize frame parameters*/
+    vp8_init_frame(pi);
+
+    /* Initialize bool coder */
+    BOOL_CODER *bc = &(pi->bool_coder);
+    vp8_start_decode(bc, (uint8_t*)data);
+
+    /* Parse key frame parameters */
+    if (pi->frame_tag.frame_type == KEY_FRAME)
+    {
+        pi->clr_type   = (YUV_TYPE)vp8_decode_bool(bc, 128);
+        pi->clamp_type = (CLAMP_TYPE)vp8_decode_bool(bc, 128);
+    }
+
+    /* Step 3 : parse macroblock-level segmentation flag */
+    vp8_parse_segmentation_adjustments_data(pi);
+
+    /* Step 4 : parse loop filter type and levels */
+    vp8_parse_loop_filter_type_level(pi);
+
+    /* Step 5 : parse macroblock-level loop filter adjustments */
+    vp8_parse_loop_filter_adjustments_data(pi);
+
+    /* Step 6: parse token partition and partition data offsets */
+    vp8_parse_token_partition_data(pi, data + pi->frame_tag.first_part_size);
+
+    /* Step 7: parse dequantization indices */
+    vp8_parse_dequantization_indices(pi);
+
+    /* For key frames, both golden frame and altref frame are refreshed/replaced by the current reconstructed frame, by default */
+    if (pi->frame_tag.frame_type == INTER_FRAME)
+    {
+        /* Step 8: parse golden frame and altref frame refresh flags */
+        vp8_parse_gf_af_refresh_flags(pi);
+    }
+
+    /* Step 9: update proability to decode DCT coef */
+    pi->refresh_entropy = (uint8_t)vp8_decode_bool(bc, 128);
+    if (pi->refresh_entropy  == 0)
+    {
+        memcpy(&(pi->LastFrameContext), &(pi->FrameContext), sizeof(FrameContextData));
+    }
+
+    /* Step 10: refresh last frame buffer */
+    pi->refresh_lf = (pi->frame_tag.frame_type == KEY_FRAME) || (uint8_t)(vp8_decode_bool(bc, 128));
+
+    /* Step 11: read coef vp8_prob tree */
+    vp8_parse_coef_probs_tree(pi);
+
+    /* Step 12: read remaining frame header data */
+    vp8_parse_remaining_frame_header_data(pi);
+
+    /* Hold the current offset in the bitstream */
+    pi->frame_data_offset += pi->bool_coder.pos;
+
+    /* Get the frame header bits */
+    pi->header_bits = pi->frame_data_offset * 8 - 16 - pi->bool_coder.count;
+
+    pi->refresh_entropy_lf = pi->refresh_entropy;
+
+    return ret;
+}