Bug fixes and Video conferencing use case feature support: dynamic bitrate control(bitrate, bitrate percentage, window, minQP and initQP), NAL size, slice number setting for I/P frames, dynamic resolution change handling, AIR and decode error types

Change-Id: Id4b7b4a4c58c838b4adbf06116bb12897c53cf0b
diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog
index 5efdae9..8b6bad2 100644
--- a/mix_vbp/ChangeLog
+++ b/mix_vbp/ChangeLog
@@ -1,5 +1,14 @@
+2010-11-10 Andy Qiu <junhai.qiu@intel.com>
+	* Parse color matrix, video range and aspect ratio
+	* Changed version number to 0.1.20
+
+2010-10-25 Andy Qiu <junhai.qiu@intel.com>
+	* MPEG-4/H.263 partial frame support
+	* Changed VC1 slice parsing output to contain pic/slice header
+	* Changed version number to 0.1.19
+
 2010-09-29 Andy Qiu <junhai.qiu@intel.com>
-	* Enhanced H.264 bitstream parsing
+	* Enhanced H.264 bitstream parsing 
 	* Changed version number to 0.1.18
 
 2010-09-15 Tao Tao  <tao.q.tao@intel.com>
diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac
index 933f6ab..68e5d09 100644
--- a/mix_vbp/configure.ac
+++ b/mix_vbp/configure.ac
@@ -2,7 +2,7 @@
 
 AC_CONFIG_MACRO_DIR(m4)
 
-UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 18)
+UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 20)
 
 dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
 AM_MAINTAINER_MODE
diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec
index d3ccd71..1b6a353 100644
--- a/mix_vbp/mixvbp.spec
+++ b/mix_vbp/mixvbp.spec
@@ -6,7 +6,7 @@
 
 Summary: MIX Video Bitstream Parser
 Name: mixvbp
-Version: 0.1.18
+Version: 0.1.20
 Release: 1
 Source0: %{name}-%{version}.tar.bz2
 NoSource: 0
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
index 51f0602..4de5245 100644
--- a/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/include/h264.h
@@ -502,6 +502,10 @@
 	uint8_t   aspect_ratio_idc;                                // u(8)	
 	uint8_t   video_signal_type_present_flag;                  // u(1)
 	uint8_t   video_format;                                    // u(3)
+#ifdef VBP
+    uint8_t   video_full_range_flag;                           // u(1)
+    uint8_t   matrix_coefficients;                              // u(8) 
+#endif
 	
 	uint8_t   colour_description_present_flag;                 // u(1)	
 	uint8_t   colour_primaries;                                // u(8)	
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
index 29ef54d..c377e2d 100644
--- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_sps.c
@@ -166,6 +166,9 @@
 
 			viddec_pm_get_bits(parent, &code, 1);
 			pVUI_Seq_Not_Used->video_full_range_flag = (uint8_t)code;
+#ifdef VBP
+            SPS->sps_disp.vui_seq_parameters.video_full_range_flag = (uint8_t)code;
+#endif
 
 			viddec_pm_get_bits(parent, &code, 1);
 			SPS->sps_disp.vui_seq_parameters.colour_description_present_flag = (uint8_t)code;
@@ -180,6 +183,9 @@
 
 				viddec_pm_get_bits(parent, &code, 8);
 				pVUI_Seq_Not_Used->matrix_coefficients = (uint8_t)code;
+#ifdef VBP				
+                SPS->sps_disp.vui_seq_parameters.matrix_coefficients = (uint8_t)code;
+#endif
 			}
 		}	
 
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
index d98b38a..4973b1d 100644
--- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse.c
@@ -150,13 +150,19 @@
         if(result == 1)
         {
             if (sh.seq_disp_size.ASPECT_RATIO_FLAG == 1)
-            {
+            {   
                 result = viddec_pm_get_bits(ctxt, &tempValue, 4);
                 sh.ASPECT_RATIO = tempValue;
                 if (sh.ASPECT_RATIO == 15)
                 {
                     result = viddec_pm_get_bits(ctxt, &sh.aspect_size, 16);
                 }
+#ifdef VBP
+                md->ASPECT_RATIO_FLAG = 1;
+                md->ASPECT_RATIO = sh.ASPECT_RATIO;
+                md->ASPECT_HORIZ_SIZE = sh.seq_aspect_size.ASPECT_HORIZ_SIZE;
+                md->ASPECT_VERT_SIZE = sh.seq_aspect_size.ASPECT_VERT_SIZE;
+#endif
             }
 
             result = viddec_pm_get_bits(ctxt, &tempValue, 1);
@@ -182,6 +188,10 @@
             {
                 result = viddec_pm_get_bits(ctxt, &sh.color_format, 24);
             }
+#ifdef VBP
+            md->COLOR_FORMAT_FLAG = sh.COLOR_FORMAT_FLAG;
+            md->MATRIX_COEF = sh.seq_color_format.MATRIX_COEF;
+#endif
         } // Successful get of display size
     } // DISPLAY_EXT is 1
 
diff --git a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
index 149e364..ce36849 100644
--- a/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
+++ b/mix_vbp/viddec_fw/fw/codecs/vc1/parser/vc1parse_common_defs.h
@@ -207,7 +207,14 @@
     uint16_t heightMB;
 
 #ifdef VBP
+    uint8_t COLOR_FORMAT_FLAG;
+    uint8_t MATRIX_COEF;
     uint8_t SYNCMARKER;
+    uint8_t ASPECT_RATIO_FLAG;
+    uint8_t ASPECT_RATIO;
+    uint8_t ASPECT_HORIZ_SIZE;
+    uint8_t ASPECT_VERT_SIZE;
+    
 #endif
     
 } vc1_metadata_t;
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
index e62c411..27436b9 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
@@ -102,6 +102,31 @@
     Default_8x8_Inter
 };
 
+static uint8 h264_aspect_ratio_table[][2] = 
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    {4, 3},
+    {3, 2},
+    {2, 1},
+    // reserved
+    {0, 0}
+};
+
+
+
 /**
  *
  */
@@ -468,7 +493,7 @@
 	/* set short term reference frames */
 	for (buffer_idx = 0; buffer_idx < dpb->ref_frames_in_buffer; buffer_idx++)
 	{
-		if (frame_idx >= 16)
+		if (frame_idx >= 16 || buffer_idx >= 16)
 		{
 			WTRACE("Frame index is out of bound.");
 			break;
@@ -508,7 +533,7 @@
 	/* set long term reference frames */
 	for (buffer_idx = 0; buffer_idx < dpb->ltref_frames_in_buffer; buffer_idx++)
 	{
-		if (frame_idx >= 16)
+		if (frame_idx >= 16 || buffer_idx >= 16)
 		{
 			WTRACE("Frame index is out of bound.");
 			break;
@@ -792,41 +817,67 @@
 	codec_data->frame_height = (2 - parser->info.active_SPS.sps_disp.frame_mbs_only_flag) * 
 			(parser->info.active_SPS.sps_disp.pic_height_in_map_units_minus1 + 1) * 16;
 			
-	/* frame cropping */
-	codec_data->frame_cropping_flag = 
-		parser->info.active_SPS.sps_disp.frame_cropping_flag;
 	
-	codec_data->frame_crop_rect_left_offset = 
-		parser->info.active_SPS.sps_disp.frame_crop_rect_left_offset;
-	
-	codec_data->frame_crop_rect_right_offset = 
-		parser->info.active_SPS.sps_disp.frame_crop_rect_right_offset;
-		                 
-	codec_data->frame_crop_rect_top_offset =
-		parser->info.active_SPS.sps_disp.frame_crop_rect_top_offset;
-		 
-	codec_data->frame_crop_rect_bottom_offset = 
-		parser->info.active_SPS.sps_disp.frame_crop_rect_bottom_offset;
-	
-	/* aspect ratio	  */
-	codec_data->aspect_ratio_info_present_flag = 
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag;
+	/* aspect ratio */
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_info_present_flag)
+    {		
+    	codec_data->aspect_ratio_idc = 
+		    parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
+
+        if (codec_data->aspect_ratio_idc < 17)
+        {
+            codec_data->sar_width = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][0];
+            codec_data->sar_height = h264_aspect_ratio_table[codec_data->aspect_ratio_idc][1];            
+        }
+        else if (codec_data->aspect_ratio_idc == 255)
+        {
+        	codec_data->sar_width = 
+        		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
+        		                        
+        	codec_data->sar_height = 
+        		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
+        }
+        else
+        {
+            codec_data->sar_width = 0;
+            codec_data->sar_height = 0;        
+        }
+    }
+    else
+    {
+        // unspecified
+    	codec_data->aspect_ratio_idc = 0;
+    	codec_data->sar_width = 0;
+    	codec_data->sar_height = 0;        
+    }
 		
-	codec_data->aspect_ratio_idc = 
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.aspect_ratio_idc;
-	
-	codec_data->sar_width = 
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_width;
-		                        
-	codec_data->sar_height = 
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.sar_height;
-		
-	 /* video format */
-	 codec_data->video_format =
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;  			
+    /* video format */
+	if (parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag)
+	{
+        codec_data->video_format =
+	    	parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;
+    }	    
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format = 5;
+    }
 	 
-	codec_data->video_format =
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;  
+	codec_data->video_full_range_flag = 
+		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_full_range_flag; 
+	
+
+    if (parser->info.active_SPS.sps_disp.vui_seq_parameters.colour_description_present_flag)
+    {
+    	codec_data->matrix_coefficients = 
+	    	parser->info.active_SPS.sps_disp.vui_seq_parameters.matrix_coefficients; 
+    }
+    else
+    {
+        // Unspecified
+    	codec_data->matrix_coefficients = 2;    
+    }
+
 
     /* picture order type and count */
     codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
@@ -1481,6 +1532,16 @@
     }
     query_data->num_pictures = 0;
 
+    if (query_data->new_sps && !query_data->has_pps)
+    {
+        // we are waiting for a new pps, so should net reset new_sps flag
+    }
+    else
+    {
+        query_data->new_sps = 0;
+    }
+    query_data->new_pps = 0;
+
     cxt->list.num_items = 0;
 
     /* reset start position of first item to 0 in case there is only one item */
@@ -1664,12 +1725,17 @@
        	break;
        		
      	case h264_NAL_UNIT_TYPE_SPS:
+     	if (query_data->has_sps)
+     	    query_data->new_sps = 1;
      	query_data->has_sps = 1;
      	query_data->has_pps = 0;
         ITRACE("SPS header is parsed.");
  		break;
        		
        	case h264_NAL_UNIT_TYPE_PPS:
+       	if (query_data->has_pps || query_data->new_sps)
+       	    query_data->new_pps = 1;
+       	    
        	query_data->has_pps = 1;
        	ITRACE("PPS header is parsed.");
        	break;
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
index e266ea6..60a30b1 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
@@ -39,32 +39,57 @@
 typedef struct _vbp_codec_data_mp42 
 {
     uint8  profile_and_level_indication;
+    uint32 video_object_layer_width;
+    uint32 video_object_layer_height;
+
+    // 0 for unspecified, PAL/NTSC/SECAM
+    uint8  video_format;
+
+    // 0 short range, 1 full range
+    uint8  video_range;
+    
+    // default 2 (unspecified), 1 for BT709.
+    uint8  matrix_coefficients;
+    
+    uint8  short_video_header;
+
+    // always exist for mpeg-4, 
+    uint8   aspect_ratio_info;
+    uint8   par_width;
+    uint8   par_height;
+
 } vbp_codec_data_mp42;
 
 typedef struct _vbp_slice_data_mp42 
-{
+{   
 	uint8* buffer_addr;
 	uint32 slice_offset;
 	uint32 slice_size;
 	VASliceParameterBufferMPEG4 slice_param;
 } vbp_slice_data_mp42;
 
-typedef struct _vbp_picture_data_mp42 
+typedef struct _vbp_picture_data_mp42 vbp_picture_data_mp42;
+
+struct _vbp_picture_data_mp42
 {
 	uint8 vop_coded;
+	uint16 vop_time_increment;
+	/* indicates if current buffer contains parameter for the first slice of the picture */
+	uint8 new_picture_flag;
 	VAPictureParameterBufferMPEG4 picture_param;
-	VAIQMatrixBufferMPEG4 iq_matrix_buffer;
+	vbp_slice_data_mp42 slice_data;
 
-	uint32 number_slices;
-	vbp_slice_data_mp42 *slice_data;
-
-} vbp_picture_data_mp42;
+	vbp_picture_data_mp42* next_picture_data;
+};
 
 typedef struct _vbp_data_mp42 
 {
 	vbp_codec_data_mp42 codec_data;
+	VAIQMatrixBufferMPEG4 iq_matrix_buffer;
 
+	uint32 number_picture_data;
 	uint32 number_pictures;
+
 	vbp_picture_data_mp42 *picture_data;
 
 } vbp_data_mp42;
@@ -91,25 +116,26 @@
 	int			frame_width;
 	int			frame_height;
 		                   
-	uint8	 	frame_cropping_flag;                     
-	int 		frame_crop_rect_left_offset;
-	int			frame_crop_rect_right_offset;                 
-	int 		frame_crop_rect_top_offset;                
-	int 		frame_crop_rect_bottom_offset; 
-
 	uint8	 	vui_parameters_present_flag;
+
 	/* aspect ratio */
-	uint8  		aspect_ratio_info_present_flag;
 	uint8  		aspect_ratio_idc;   
 	uint16		sar_width;                                    
 	uint16		sar_height;
 	
 	/* video fromat */
-	uint8   	video_signal_type_present_flag; 	
+
+	// default 5 unspecified
 	uint8  		video_format;  
+    uint8       video_full_range_flag;
+
+    // default 2 unspecified
+    uint8       matrix_coefficients;
 
     uint8       pic_order_cnt_type;
     int         log2_max_pic_order_cnt_lsb_minus4;
+
+    int         bit_rate;
 		
 } vbp_codec_data_h264;
 
@@ -150,6 +176,10 @@
     /* if PPS has been received */
     uint8  has_pps;
 
+    uint8  new_sps;
+
+    uint8  new_pps;
+    
     vbp_picture_data_h264* pic_data;
 
     /** 
@@ -177,6 +207,9 @@
 	uint8  FINTERPFLAG;
 	uint8  PSF;
 
+    // default 2: unspecified
+    uint8  MATRIX_COEF;
+    
 	/* Entry point layer. */
 	uint8  BROKEN_LINK;
 	uint8  CLOSED_ENTRY;
@@ -210,6 +243,15 @@
 	uint8  INTCOMPFIELD;
 	uint8  LUMSCALE2;
 	uint8  LUMSHIFT2;
+
+	// aspect ratio
+
+	// default unspecified
+	uint8 ASPECT_RATIO;
+	
+	uint8 ASPECT_HORIZ_SIZE;
+	uint8 ASPECT_VERT_SIZE;
+	
 } vbp_codec_data_vc1;
 
 typedef struct _vbp_slice_data_vc1 
@@ -255,15 +297,12 @@
 	VBP_OK,
 	VBP_TYPE,
 	VBP_LOAD,
-	VBP_UNLOAD,
 	VBP_INIT,
 	VBP_DATA,
 	VBP_DONE,
-	VBP_GLIB,
 	VBP_MEM,
 	VBP_PARM,
-	VBP_CXT,
-	VBP_IMPL
+	VBP_PARTIAL	
 };
 
 enum _vbp_parser_type
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
index 7a65dbe..1acfd9b 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
@@ -16,31 +16,55 @@
 #include "vbp_mp42_parser.h"
 #include "../codecs/mp4/parser/viddec_mp4_parse.h"
 
-#define MIX_VBP_COMP 		"mixvbp"
+
+
+static bool short_video_header = TRUE;
+
+static uint8 mp4_aspect_ratio_table[][2] = 
+{
+    // forbidden
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    
+    // reserved
+    {0, 0}
+};
+
 
 /*
  * Some divX avi files contains 2 frames in one gstbuffer.
  */
-#define MAX_NUM_PICTURES_MP42 8
 
-uint32 vbp_get_sc_pos_mp42(uint8 *buf, uint32 length,
-		uint32* sc_phase, uint32 *sc_end_pos, uint8 *is_normal_sc);
+
+uint32 vbp_get_sc_pos_mp42(
+    uint8 *buf, 
+    uint32 length,
+    uint32 *sc_end_pos, 
+    uint8 *is_normal_sc, 
+    uint8* resync_marker);
 
 void vbp_on_vop_mp42(vbp_context *pcontext, int list_index);
 void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index);
-void vbp_dump_query_data(vbp_context *pcontext, int list_index);
-
+void vbp_fill_codec_data(vbp_context *pcontext);
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data);
 uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index);
 uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index);
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext);
 
-/* This is coppied from DHG mp42 parser */
-static inline mp4_Status_t
-vbp_sprite_trajectory_mp42(void *parent, mp4_VideoObjectLayer_t *vidObjLay,
-		mp4_VideoObjectPlane_t *vidObjPlane);
+static inline uint32 vbp_sprite_trajectory_mp42(
+    void *parent, 
+    mp4_VideoObjectLayer_t *vidObjLay,
+    mp4_VideoObjectPlane_t *vidObjPlane);
 
-/* This is coppied from DHG mp42 parser */
-static inline int32_t vbp_sprite_dmv_length_mp42(void * parent,
-		int32_t *dmv_length);
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
+    void * parent,
+    int32_t *dmv_length);
+ 
 
 /**
  *
@@ -49,7 +73,7 @@
 {
 	if (NULL == pcontext->parser_ops)
 	{
-		/* absolutely impossible, just sanity check */
+		// absolutely impossible, just sanity check
 		return VBP_PARM;
 	}
 	pcontext->parser_ops->init = dlsym(pcontext->fd_parser, "viddec_mp4_init");
@@ -96,11 +120,9 @@
  */
 uint32 vbp_parse_init_data_mp42(vbp_context *pcontext)
 {
-	VTRACE ("begin\n");
-	vbp_parse_start_code_mp42(pcontext);
-	VTRACE ("end\n");
-
-	return VBP_OK;
+    uint32 ret = VBP_OK;
+	ret = vbp_parse_start_code_mp42(pcontext);
+	return ret;
 }
 
 uint32 vbp_process_parsing_result_mp42(vbp_context *pcontext, int list_index) 
@@ -113,78 +135,101 @@
 	uint32 current_sc = parser->current_sc;
 	is_svh = parser->cur_sc_prefix ? false : true;
 
-	VTRACE ("begin\n");
-
-	VTRACE ("current_sc = 0x%x  profile_and_level_indication = 0x%x\n",
-			parser->current_sc, parser->info.profile_and_level_indication);
-
 	if (!is_svh) 
 	{
-		/* remove prefix from current_sc */
+		// remove prefix from current_sc
 		current_sc &= 0x0FF;
 		switch (current_sc) 
 		{
 		case MP4_SC_VISUAL_OBJECT_SEQUENCE:
-			VTRACE ("MP4_SC_VISUAL_OBJECT_SEQUENCE\n");
-
+			VTRACE ("Visual Object Sequence is parsed.\n");            
 			query_data->codec_data.profile_and_level_indication
 					= parser->info.profile_and_level_indication;
-
+            VTRACE ("profile_and_level_indication = 0x%x\n", parser->info.profile_and_level_indication);
 			break;
+			
 		case MP4_SC_VIDEO_OBJECT_PLANE:
-			VTRACE ("MP4_SC_VIDEO_OBJECT_PLANE\n");
+			VTRACE ("Video Object Plane is parsed.\n");
 			vbp_on_vop_mp42(pcontext, list_index);
 			break;
-		default: {
-			if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && (current_sc
-					<= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) {
-				query_data->codec_data.profile_and_level_indication
-						= parser->info.profile_and_level_indication;
-			} else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX) {
-				if (parser->sc_seen == MP4_SC_SEEN_SVH) {
-					VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+			
+		default:
+			if ((current_sc >= MP4_SC_VIDEO_OBJECT_LAYER_MIN) && 
+			    (current_sc <= MP4_SC_VIDEO_OBJECT_LAYER_MAX)) 
+            {
+                VTRACE ("Video Object Layer is parsed\n");
+                short_video_header = FALSE;
+                vbp_fill_codec_data(pcontext);				
+			} 
+			else if (current_sc <= MP4_SC_VIDEO_OBJECT_MAX &&
+			         current_sc >= MP4_SC_VIDEO_OBJECT_MIN) 
+			{
+				if (parser->sc_seen == MP4_SC_SEEN_SVH) 
+				{
+				    // this should never happen!!!!
+                    WTRACE ("Short video header is parsed.\n");
 					vbp_on_vop_svh_mp42(pcontext, list_index);
 				}
 			}
-		}
 			break;
 		}
-
-	} else {
-		if (parser->sc_seen == MP4_SC_SEEN_SVH) {
-			VTRACE ("parser->sc_seen == MP4_SC_SEEN_SVH\n");
+	} 
+	else 
+	{
+		if (parser->sc_seen == MP4_SC_SEEN_SVH) 
+		{
+			VTRACE ("Short video header is parsed.\n");
 			vbp_on_vop_svh_mp42(pcontext, list_index);
 		}
 	}
 
-	VTRACE ("End\n");
-
 	return VBP_OK;
 }
 
-/*
- * This function fills viddec_pm_cxt_t by start codes
- * I may change the codes to make it more efficient later
- */
 
+
+/* 
+* partial frame handling:
+* 
+* h.263: picture header is lost if the first GOB is discarded, a redudant pic header must be 
+* conveyed in the packet  (RFC 4629) for each following GOB, otherwise, 
+* picture can't be decoded.
+* 
+* MPEG4:  VideoObjectPlane header is lost if the first slice is discarded. However, picture
+* is still decodable as long as the header_extension_code is 1 in video_packet_header. 
+*
+*MPEG-4 with short header:   video_plane_with_short_header is lost if the first GOB
+* is discarded. As this header is not duplicated (RFC 3016), picture is not decodable.
+*
+* In sum:
+* If buffer contains the 32-bit start code (0x000001xx), proceed  as normal.
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1000 00", which indicates h.263
+* picture start code or short_video_start_marker, proceed as normal. 
+*
+* If buffer contains 22-bits of "0000 0000 0000 0000 1XXX XX", (when XXX XX starts from 000 01), which 
+* indicates  h.263 Group Start code or gob_resync_marker of gob_layer in MPEG-4 with 
+* short header, we should report packet as a partial frame - no more parsing is needed.
+*
+* If buffer contains a string of 0 between 16 bits and 22 bits, followed by 1-bit of '1', which indicates a resync-marker,
+* the buffer will be immeidately parsed and num_items is set to 0.
+*/
 uint32 vbp_parse_start_code_mp42(vbp_context *pcontext)
 {
 	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
-	/*viddec_parser_ops_t *func = pcontext->parser_ops; */
 	uint8 *buf = NULL;
 	uint32 size = 0;
-	uint32 sc_phase = 0;
 	uint32 sc_end_pos = -1;
-
 	uint32 bytes_parsed = 0;
-
 	viddec_mp4_parser_t *pinfo = NULL;
-
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-	/* reset query data for the new sample buffer */
+
+	
+	// reset query data for the new sample buffer 
+	query_data->number_picture_data= 0;
 	query_data->number_pictures = 0;
 	
-	/* emulation prevention byte is always present */
+	// emulation prevention byte is always present 
 	cxt->getbits.is_emul_reqd = 1;
 
 	cxt->list.num_items = 0;
@@ -197,25 +242,21 @@
 	pinfo = (viddec_mp4_parser_t *) &(cxt->codec_data[0]);
 
 	uint8 is_normal_sc = 0;
-
+	uint8 resync_marker = 0;
 	uint32 found_sc = 0;
+	uint32 ret = VBP_OK;
 
-	VTRACE ("begin cxt->parse_cubby.size= %d\n", size);
+	while (1) 
+	{
+		found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size- bytes_parsed, 
+		        &sc_end_pos, &is_normal_sc, &resync_marker);
 
-	while (1) {
-
-		sc_phase = 0;
-
-		found_sc = vbp_get_sc_pos_mp42(buf + bytes_parsed, size
-				- bytes_parsed, &sc_phase, &sc_end_pos, &is_normal_sc);
-
-		if (found_sc) {
-
-			VTRACE ("sc_end_pos = %d\n", sc_end_pos);
-
+		if (found_sc)
+		{
 			cxt->list.data[cxt->list.num_items].stpos = bytes_parsed
 					+ sc_end_pos - 3;
-			if (cxt->list.num_items != 0) {
+			if (cxt->list.num_items != 0) 
+			{
 				cxt->list.data[cxt->list.num_items - 1].edpos = bytes_parsed
 						+ sc_end_pos - 3;
 			}
@@ -223,27 +264,53 @@
 
 			cxt->list.num_items++;
 			pinfo->cur_sc_prefix = is_normal_sc;
-
-		} else {
-
-			if (cxt->list.num_items != 0) {
+		} 
+		else 
+		{
+			if (cxt->list.num_items != 0) 
+			{
 				cxt->list.data[cxt->list.num_items - 1].edpos
 						= cxt->parse_cubby.size;
 				break;
-			} else {
-
-				VTRACE ("I didn't find any sc in cubby buffer! The size of cubby is %d\n",
-						size);
-
+			}
+	    	else 
+			{
+				WTRACE ("No start-code is found in cubby buffer! The size of cubby is %d\n", size);
 				cxt->list.num_items = 1;
 				cxt->list.data[0].stpos = 0;
 				cxt->list.data[0].edpos = cxt->parse_cubby.size;
+
+                if (resync_marker)
+			    {
+                    // either the first slice (GOB) is lost or parser receives a single slice (GOB)
+    			    if (short_video_header)
+    			    {
+    			        // TODO: revisit if HW supportd GOB layer decoding for h.263
+    			        WTRACE("Partial frame: GOB buffer.\n");
+    			        ret = VBP_PARTIAL;    			        
+    			    }
+    			    else			    
+    			    {
+    			        WTRACE("Partial frame: video packet header buffer.\n");
+    			        ret =  vbp_process_video_packet_mp42(pcontext);
+    			    }
+
+    			    // set num_items to 0 so buffer will not be parsed again
+    			    cxt->list.num_items = 0;    			    
+			    }
+			    else
+			    {
+			        ETRACE("Invalid data received.\n");
+                    cxt->list.num_items = 0;
+                    return VBP_DATA;                    
+			    }
+			    
 				break;
 			}
 		}
 	}
 
-	return VBP_OK;
+	return ret;
 }
 
 uint32 vbp_populate_query_data_mp42(vbp_context *pcontext) 
@@ -254,33 +321,103 @@
 	return VBP_OK;
 }
 
-void vbp_fill_codec_data(vbp_context *pcontext, int list_index) 
+vbp_picture_data_mp42* vbp_get_mp42_picture_data(vbp_data_mp42 * query_data)
 {
+    vbp_picture_data_mp42 *picture_data = query_data->picture_data;
+    int num_pictures = query_data->number_picture_data;
+    while (num_pictures > 1)
+    {
+        picture_data = picture_data->next_picture_data;
+        num_pictures--;
+    }      
 
-	/* fill vbp_codec_data_mp42 data */
+    return picture_data;
+}
+
+void vbp_fill_codec_data(vbp_context *pcontext) 
+{
 	viddec_mp4_parser_t *parser =
 			(viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-	query_data->codec_data.profile_and_level_indication
+	vbp_codec_data_mp42* codec_data = &(query_data->codec_data);
+	
+	codec_data->profile_and_level_indication
 			= parser->info.profile_and_level_indication;
+
+    codec_data->video_object_layer_width = 
+            parser->info.VisualObject.VideoObject.video_object_layer_width;
+
+    codec_data->video_object_layer_height = 
+            parser->info.VisualObject.VideoObject.video_object_layer_height;
+
+    if (parser->info.VisualObject.VideoSignalType.is_video_signal_type)
+    {
+        codec_data->video_format =     
+            parser->info.VisualObject.VideoSignalType.video_format;
+    }
+    else
+    {
+        // Unspecified video format
+        codec_data->video_format =  5;
+    }
+            
+    codec_data->video_range = 
+            parser->info.VisualObject.VideoSignalType.video_range;
+
+    if (parser->info.VisualObject.VideoSignalType.is_colour_description)
+    {
+        codec_data->matrix_coefficients = 
+            parser->info.VisualObject.VideoSignalType.matrix_coefficients;
+    }
+    else if (short_video_header)
+    {
+        // SMPTE 170M
+        codec_data->matrix_coefficients = 6;        
+    }
+    else
+    {
+        // ITU-R Recommendation BT.709
+        codec_data->matrix_coefficients = 1;
+    }
+
+    codec_data->short_video_header = short_video_header;    
+
+    // aspect ratio
+    codec_data->aspect_ratio_info = parser->info.VisualObject.VideoObject.aspect_ratio_info;
+    if (codec_data->aspect_ratio_info < 6)
+    {
+        codec_data->par_width = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][0];
+        codec_data->par_height = mp4_aspect_ratio_table[codec_data->aspect_ratio_info][1];
+    }
+    else if (codec_data->aspect_ratio_info == 15)
+    {
+        codec_data->par_width = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_width;
+        codec_data->par_height = parser->info.VisualObject.VideoObject.aspect_ratio_info_par_height;
+    }
+    else
+    {
+        codec_data->par_width = 0;
+        codec_data->par_height = 0;
+    }
 }
 
 void vbp_fill_slice_data(vbp_context *pcontext, int list_index) 
 {
-
 	viddec_mp4_parser_t *parser =
 			(viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
 
-	if (!parser->info.VisualObject.VideoObject.short_video_header) {
+	if (!parser->info.VisualObject.VideoObject.short_video_header) 
+	{
 		vbp_process_slices_mp42(pcontext, list_index);
-	} else {
+	} 
+	else 
+	{
 		vbp_process_slices_svh_mp42(pcontext, list_index);
 	}
 }
 
-void vbp_fill_picture_param(vbp_context *pcontext, int list_index) 
+void vbp_fill_picture_param(vbp_context *pcontext, uint8 new_picture_flag) 
 {
-
 	viddec_mp4_parser_t *parser =
 			(viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
@@ -288,21 +425,61 @@
 	vbp_picture_data_mp42 *picture_data = NULL;
 	VAPictureParameterBufferMPEG4 *picture_param = NULL;
 
-	picture_data = &(query_data->picture_data[query_data->number_pictures]);
+    if (new_picture_flag)
+    {
+        query_data->number_pictures++;
+    }
+    
+    picture_data = query_data->picture_data;
+    if (picture_data == NULL || query_data->number_picture_data == 0)
+    {
+        // first entry
+        if (picture_data == NULL)
+        {
+            picture_data = (vbp_picture_data_mp42*)g_try_new0(vbp_picture_data_mp42, 1);
+            query_data->picture_data = picture_data;
+        }            
+        query_data->number_picture_data = 1;
+    }
+    else
+    {   
+        // find the last active one 
+        int i = query_data->number_picture_data;
+        while (i > 1)
+        {        
+            picture_data = picture_data->next_picture_data;
+            i--;
+        }           
+        if (picture_data->next_picture_data == NULL)
+        {
+            picture_data->next_picture_data = g_try_new0(vbp_picture_data_mp42, 1);
+        }
 
+        query_data->number_picture_data++;
+
+        picture_data = picture_data->next_picture_data;
+    }         
+         
 	picture_param = &(picture_data->picture_param);
 
 	uint8 idx = 0;
 
+    picture_data->new_picture_flag = new_picture_flag;
+    
 	picture_data->vop_coded
 			= parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coded;
+
 	VTRACE ("vop_coded = %d\n", picture_data->vop_coded);
 
-	/*
-	 * fill picture_param
-	 */
 
-	/* NOTE: for short video header, the parser saves vop_width and vop_height
+    picture_data->vop_time_increment = 
+            parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_time_increment;
+
+    // fill picture_param
+
+
+	/* 
+	 * NOTE: for short video header, the parser saves vop_width and vop_height
 	 * to VOL->video_object_layer_width and VOL->video_object_layer_height
 	 */
 	picture_param->vop_width
@@ -313,16 +490,15 @@
 	picture_param->forward_reference_picture = VA_INVALID_SURFACE;
 	picture_param->backward_reference_picture = VA_INVALID_SURFACE;
 
-	/*
-	 * VAPictureParameterBufferMPEG4::vol_fields
-	 */
+	// Fill VAPictureParameterBufferMPEG4::vol_fields
+
 	picture_param->vol_fields.bits.short_video_header
 			= parser->info.VisualObject.VideoObject.short_video_header;
 	picture_param->vol_fields.bits.chroma_format
 			= parser->info.VisualObject.VideoObject.VOLControlParameters.chroma_format;
 
 	/* TODO: find out why testsuite always set this value to be 0 */
-	//	picture_param->vol_fields.bits.chroma_format = 0;
+	picture_param->vol_fields.bits.chroma_format = 0;
 
 	picture_param->vol_fields.bits.interlaced
 			= parser->info.VisualObject.VideoObject.interlaced;
@@ -345,8 +521,9 @@
 
 	picture_param->no_of_sprite_warping_points
 			= parser->info.VisualObject.VideoObject.sprite_info.no_of_sprite_warping_points;
-
-	for (idx = 0; idx < 3; idx++) {
+   
+	for (idx = 0; idx < 3; idx++) 
+	{
 		picture_param->sprite_trajectory_du[idx]
 				= parser->info.VisualObject.VideoObject.VideoObjectPlane.warping_mv_code_du[idx];
 		picture_param->sprite_trajectory_dv[idx]
@@ -356,24 +533,28 @@
 	picture_param->quant_precision
 			= parser->info.VisualObject.VideoObject.quant_precision;
 
-	/*
-	 *  VAPictureParameterBufferMPEG4::vop_fields
-	 */
+    // fill VAPictureParameterBufferMPEG4::vop_fields
 
-	if (!parser->info.VisualObject.VideoObject.short_video_header) {
+
+	if (!parser->info.VisualObject.VideoObject.short_video_header) 
+	{
 		picture_param->vop_fields.bits.vop_coding_type
 				= parser->info.VisualObject.VideoObject.VideoObjectPlane.vop_coding_type;
-	} else {
+	} 
+	else 
+	{
 		picture_param->vop_fields.bits.vop_coding_type
 				= parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.picture_coding_type;
 	}
 
-	/* TODO:
+	/* 
+	  * TODO:
 	 * fill picture_param->vop_fields.bits.backward_reference_vop_coding_type
 	 * This shall be done in mixvideoformat_mp42. See M42 spec 7.6.7
 	 */
 
-	if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) {
+	if (picture_param->vop_fields.bits.vop_coding_type != MP4_VOP_TYPE_B) 
+	{
 		picture_param->vop_fields.bits.backward_reference_vop_coding_type
 				= picture_param->vop_fields.bits.vop_coding_type;
 	}
@@ -394,33 +575,19 @@
 	picture_param->vop_time_increment_resolution
 			= parser->info.VisualObject.VideoObject.vop_time_increment_resolution;
 
-	/* short header related */
+	// short header related 
 	picture_param->num_gobs_in_vop
 			= parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_gobs_in_vop;
 	picture_param->num_macroblocks_in_gob
 			= parser->info.VisualObject.VideoObject.VideoObjectPlaneH263.num_macroblocks_in_gob;
 
-	/* for direct mode prediction */
+	// for direct mode prediction 
 	picture_param->TRB = parser->info.VisualObject.VideoObject.TRB;
 	picture_param->TRD = parser->info.VisualObject.VideoObject.TRD;
-
-#if 0
-	printf(
-			"parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable = %d\n",
-			parser->info.VisualObject.VideoObject.reduced_resolution_vop_enable);
-
-	printf("parser->info.VisualObject.VideoObject.data_partitioned = %d\n",
-			parser->info.VisualObject.VideoObject.data_partitioned);
-
-	printf(
-			"####parser->info.VisualObject.VideoObject.resync_marker_disable = %d####\n",
-			parser->info.VisualObject.VideoObject.resync_marker_disable);
-#endif
 }
 
-void vbp_fill_iq_matrix_buffer(vbp_context *pcontext, int list_index) 
+void vbp_fill_iq_matrix_buffer(vbp_context *pcontext) 
 {
-
 	viddec_mp4_parser_t *parser =
 			(viddec_mp4_parser_t *) &(pcontext->parser_cxt->codec_data[0]);
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
@@ -428,52 +595,40 @@
 	mp4_VOLQuant_mat_t *quant_mat_info =
 			&(parser->info.VisualObject.VideoObject.quant_mat_info);
 
-	vbp_picture_data_mp42 *picture_data = NULL;
 	VAIQMatrixBufferMPEG4 *iq_matrix = NULL;
 
-	picture_data = &(query_data->picture_data[query_data->number_pictures]);
-	iq_matrix = &(picture_data->iq_matrix_buffer);
+	iq_matrix = &(query_data->iq_matrix_buffer);
 
 	iq_matrix->load_intra_quant_mat = quant_mat_info->load_intra_quant_mat;
 	iq_matrix->load_non_intra_quant_mat
 			= quant_mat_info->load_nonintra_quant_mat;
 	memcpy(iq_matrix->intra_quant_mat, quant_mat_info->intra_quant_mat, 64);
-	memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat,
-			64);
+	memcpy(iq_matrix->non_intra_quant_mat, quant_mat_info->nonintra_quant_mat, 64);
 }
 
+
 void vbp_on_vop_mp42(vbp_context *pcontext, int list_index) 
 {
-	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-
-	vbp_fill_codec_data(pcontext, list_index);
-
-	vbp_fill_picture_param(pcontext, list_index);
-	vbp_fill_iq_matrix_buffer(pcontext, list_index);
+	vbp_fill_codec_data(pcontext);
+	vbp_fill_picture_param(pcontext, 1);
+	vbp_fill_iq_matrix_buffer(pcontext);
 	vbp_fill_slice_data(pcontext, list_index);
-
-	query_data->number_pictures++;
 }
 
 void vbp_on_vop_svh_mp42(vbp_context *pcontext, int list_index) 
 {
-	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-
-	vbp_fill_codec_data(pcontext, list_index);
-
-	vbp_fill_picture_param(pcontext, list_index);
-	vbp_fill_iq_matrix_buffer(pcontext, list_index);
+	vbp_fill_codec_data(pcontext);
+	vbp_fill_picture_param(pcontext, 1);
+	vbp_fill_iq_matrix_buffer(pcontext);
 	vbp_fill_slice_data(pcontext, list_index);
-
-	query_data->number_pictures++;
 }
 
 uint32 vbp_get_sc_pos_mp42(
 	uint8 *buf, 
 	uint32 length,
-	uint32* sc_phase,
 	uint32 *sc_end_pos,
-	uint8 *is_normal_sc) 
+	uint8 *is_normal_sc,
+	uint8 *resync_marker) 
 {
 	uint8 *ptr = buf;
 	uint32 size;
@@ -481,15 +636,17 @@
 	size = 0;
 
 	data_left = length;
-	phase = *sc_phase;
 	*sc_end_pos = -1;
 
 	/* parse until there is more data and start code not found */
-	while ((data_left > 0) && (phase < 3)) {
+	while ((data_left > 0) && (phase < 3)) 
+	{
 		/* Check if we are byte aligned & phase=0, if thats the case we can check
 		 work at a time instead of byte*/
-		if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) {
-			while (data_left > 3) {
+		if (((((uint32) ptr) & 0x3) == 0) && (phase == 0)) 
+		{
+			while (data_left > 3) 
+			{
 				uint32 data;
 				char mask1 = 0, mask2 = 0;
 
@@ -501,12 +658,16 @@
 				mask2 = (FIRST_STARTCODE_BYTE != (data & SC_BYTE_MASK1));
 				/* If second byte and fourth byte are not zero's then we cannot have a start code here as we need
 				 two consecutive zero bytes for a start code pattern */
-				if (mask1 && mask2) {/* Success so skip 4 bytes and start over */
+				if (mask1 && mask2) 
+				{
+				    /* Success so skip 4 bytes and start over */
 					ptr += 4;
 					size += 4;
 					data_left -= 4;
 					continue;
-				} else {
+				} 
+				else 
+				{
 					break;
 				}
 			}
@@ -514,18 +675,25 @@
 
 		/* At this point either data is not on a word boundary or phase > 0 or On a word boundary but we detected
 		 two zero bytes in the word so we look one byte at a time*/
-		if (data_left > 0) {
-			if (*ptr == FIRST_STARTCODE_BYTE) {/* Phase can be 3 only if third start code byte is found */
+		if (data_left > 0) 
+		{
+			if (*ptr == FIRST_STARTCODE_BYTE) 
+			{
+			    /* Phase can be 3 only if third start code byte is found */
 				phase++;
 				ptr++;
 				size++;
 				data_left--;
-				if (phase > 2) {
+				if (phase > 2) 
+				{
 					phase = 2;
 
-					if ((((uint32) ptr) & 0x3) == 0) {
-						while (data_left > 3) {
-							if (*((uint32 *) ptr) != 0) {
+					if ((((uint32) ptr) & 0x3) == 0) 
+					{
+						while (data_left > 3) 
+						{
+							if (*((uint32 *) ptr) != 0) 
+							{
 								break;
 							}
 							ptr += 4;
@@ -534,30 +702,31 @@
 						}
 					}
 				}
-			} else {
+			} 
+			else 
+			{
 				uint8 normal_sc = 0, short_sc = 0;
-				if (phase == 2) {
+				if (phase == 2) 
+				{
 					normal_sc = (*ptr == THIRD_STARTCODE_BYTE);
 					short_sc = (SHORT_THIRD_STARTCODE_BYTE == (*ptr & 0xFC));
 
-					VTRACE ("short_sc = %d\n", short_sc);
-
 					*is_normal_sc = normal_sc;
+
+					// at least 16-bit 0, may be GOB start code or
+					// resync marker.
+					*resync_marker = 1;
 				}
 
-				if (!(normal_sc | short_sc)) {
+				if (!(normal_sc | short_sc)) 
+				{
 					phase = 0;
-				} else {/* Match for start code so update context with byte position */
+				} 
+				else 
+				{
+				    /* Match for start code so update context with byte position */
 					*sc_end_pos = size;
 					phase = 3;
-
-					//if (normal_sc) {
-					//} else {
-					//	/* For short start code since start code is in one nibble just return at this point */
-					//	phase += 1;
-					//	ret = 1;
-					//	break;
-					//}
 				}
 				ptr++;
 				size++;
@@ -565,35 +734,38 @@
 			}
 		}
 	}
-	if ((data_left > 0) && (phase == 3)) {
+	if ((data_left > 0) && (phase == 3)) 
+	{
 		(*sc_end_pos)++;
 		phase++;
 		ret = 1;
 	}
-	*sc_phase = phase;
-	/* Return SC found only if phase is 4, else always success */
+	
+	// Return 1 only if phase is 4, else always return 0
 	return ret;
 }
 
+
 uint32 vbp_macroblock_number_length_mp42(uint32 numOfMbs)
 {
 	uint32 length = 0;
 	numOfMbs--;
-	do {
+	do 
+	{
 		numOfMbs >>= 1;
 		length++;
-	} while (numOfMbs);
+	} 
+	while (numOfMbs);
 	return length;
 }
 
-mp4_Status_t vbp_video_packet_header_mp42(	
+uint32 vbp_parse_video_packet_header_mp42(	
 	void *parent,
 	viddec_mp4_parser_t *parser_cxt,
 	uint16_t *quant_scale,
 	uint32 *macroblock_number)
 {
-
-	mp4_Status_t ret = MP4_STATUS_OK;
+	uint32 ret = VBP_DATA;
 	mp4_Info_t *pInfo = &(parser_cxt->info);
 	mp4_VideoObjectLayer_t *vidObjLay = &(pInfo->VisualObject.VideoObject);
 	mp4_VideoObjectPlane_t *vidObjPlane =
@@ -607,148 +779,165 @@
 	uint32 header_extension_codes = 0;
 	uint8 vop_coding_type = vidObjPlane->vop_coding_type;
 
-	do {
-		if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) {
-			ret = MP4_STATUS_NOTSUPPORT;
-			break;
-		}
+	if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_RECTANGULAR) 
+	{
+		return VBP_DATA;
+	}
+	
+    do 
+	{
+		// get macroblock_number
+		uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
+		uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
+		uint32 length = vbp_macroblock_number_length_mp42(mbs_x * mbs_y);
 
-		/* get macroblock_number */
+		getbits = viddec_pm_get_bits(parent, &code, length);
+		BREAK_GETBITS_FAIL(getbits, ret);
+
+		_macroblock_number = code;
+
+		// quant_scale
+		if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) 
 		{
-			uint16_t mbs_x = (vidObjLay->video_object_layer_width + 15) >> 4;
-			uint16_t mbs_y = (vidObjLay->video_object_layer_height + 15) >> 4;
-			uint32 length = vbp_macroblock_number_length_mp42(mbs_x
-					* mbs_y);
-
-			getbits = viddec_pm_get_bits(parent, &code, length);
-			BREAK_GETBITS_FAIL(getbits, ret);
-
-			_macroblock_number = code;
-		}
-
-		/* quant_scale */
-		if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) {
-			getbits = viddec_pm_get_bits(parent, &code,
-					vidObjLay->quant_precision);
+			getbits = viddec_pm_get_bits(parent, &code, vidObjLay->quant_precision);
 			BREAK_GETBITS_FAIL(getbits, ret);
 			_quant_scale = code;
 		}
 
-		/* header_extension_codes */
-		if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) {
+		// header_extension_codes
+		if (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) 
+		{
 			getbits = viddec_pm_get_bits(parent, &code, 1);
 			BREAK_GETBITS_FAIL(getbits, ret);
 			header_extension_codes = code;
 		}
 
-		if (header_extension_codes) {
-			do {
+		if (header_extension_codes) 
+		{
+		    // modulo time base
+			do 
+			{
 				getbits = viddec_pm_get_bits(parent, &code, 1);
 				BREAK_GETBITS_FAIL(getbits, ret);
 			} while (code);
 
-			/* marker_bit */
+			// marker_bit
 			getbits = viddec_pm_get_bits(parent, &code, 1);
 			BREAK_GETBITS_FAIL(getbits, ret);
 
-			/* vop_time_increment */
+			// vop_time_increment
+			uint32 numbits = 0;
+			numbits = vidObjLay->vop_time_increment_resolution_bits;
+			if (numbits == 0) 
 			{
-				uint32 numbits = 0;
-				numbits = vidObjLay->vop_time_increment_resolution_bits;
-				if (numbits == 0) {
-					numbits = 1;
-				}
-				getbits = viddec_pm_get_bits(parent, &code, numbits);
-				BREAK_GETBITS_FAIL(getbits, ret);
+			    // ?? 
+				numbits = 1;
 			}
-			/* marker_bit */
+			getbits = viddec_pm_get_bits(parent, &code, numbits);
+			BREAK_GETBITS_FAIL(getbits, ret);
+			vidObjPlane->vop_time_increment = code;
+
+
+			// marker_bit
 			getbits = viddec_pm_get_bits(parent, &code, 1);
 			BREAK_GETBITS_FAIL(getbits, ret);
 
-			/* vop_coding_type */
+			// vop_coding_type
 			getbits = viddec_pm_get_bits(parent, &code, 2);
 			BREAK_GETBITS_FAIL(getbits, ret);
 
 			vop_coding_type = code & 0x3;
+			vidObjPlane->vop_coding_type = vop_coding_type;
 
-		/* Fixed Klocwork issue: Code is unreachable.
-		 * Comment the following codes because we have
-		 * already checked video_object_layer_shape
-		 */
-		 /* if (vidObjLay->video_object_layer_shape
-					!= MP4_SHAPE_TYPE_RECTANGULAR) {
-				ret = MP4_STATUS_NOTSUPPORT;
-				break;
-			}
-		 */
-			if (vidObjLay->video_object_layer_shape
-					!= MP4_SHAPE_TYPE_BINARYONLY) {
-				/* intra_dc_vlc_thr */
+	
+			if (vidObjLay->video_object_layer_shape != MP4_SHAPE_TYPE_BINARYONLY) 
+			{
+				// intra_dc_vlc_thr
 				getbits = viddec_pm_get_bits(parent, &code, 3);
 				BREAK_GETBITS_FAIL(getbits, ret);
-				if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC)
-						&& (vop_coding_type == MP4_VOP_TYPE_S)
-						&& (vidObjLay->sprite_info.no_of_sprite_warping_points
-								> 0)) {
+
+				vidObjPlane->intra_dc_vlc_thr = code;
+				if ((vidObjLay->sprite_enable == MP4_SPRITE_GMC) && 
+				    (vop_coding_type == MP4_VOP_TYPE_S) && 
+				    (vidObjLay->sprite_info.no_of_sprite_warping_points> 0)) 
+                {
 					if (vbp_sprite_trajectory_mp42(parent, vidObjLay,
-							vidObjPlane) != MP4_STATUS_OK) {
+							vidObjPlane) != VBP_OK) 
+                    {                        
 						break;
 					}
 				}
 
-				if (vidObjLay->reduced_resolution_vop_enable
-						&& (vidObjLay->video_object_layer_shape
-								== MP4_SHAPE_TYPE_RECTANGULAR)
-						&& ((vop_coding_type == MP4_VOP_TYPE_I)
-								|| (vop_coding_type == MP4_VOP_TYPE_P))) {
-					/* vop_reduced_resolution */
+				if (vidObjLay->reduced_resolution_vop_enable && 
+				    (vidObjLay->video_object_layer_shape == MP4_SHAPE_TYPE_RECTANGULAR) &&
+					((vop_coding_type == MP4_VOP_TYPE_I) ||
+					(vop_coding_type == MP4_VOP_TYPE_P))) 
+                {
+					// vop_reduced_resolution
 					getbits = viddec_pm_get_bits(parent, &code, 1);
 					BREAK_GETBITS_FAIL(getbits, ret);
 				}
 
-				if (vop_coding_type == MP4_VOP_TYPE_I) {
-					/* vop_fcode_forward */
-					getbits = viddec_pm_get_bits(parent, &code, 3);
+				if (vop_coding_type != MP4_VOP_TYPE_I) 
+				{
+					// vop_fcode_forward
+					getbits = viddec_pm_get_bits(parent, &code, 3);					
 					BREAK_GETBITS_FAIL(getbits, ret);
+					vidObjPlane->vop_fcode_forward = code;
 				}
 
-				if (vop_coding_type == MP4_VOP_TYPE_B) {
-					/* vop_fcode_backward */
+				if (vop_coding_type == MP4_VOP_TYPE_B) 
+				{
+					// vop_fcode_backward
 					getbits = viddec_pm_get_bits(parent, &code, 3);
 					BREAK_GETBITS_FAIL(getbits, ret);
+					vidObjPlane->vop_fcode_backward = code;
 				}
 			}
 		}
 
-		if (vidObjLay->newpred_enable) {
-			/* New pred mode not supported in HW, but, does libva support this? */
-			ret = MP4_STATUS_NOTSUPPORT;
+		if (vidObjLay->newpred_enable) 
+		{
+			// New pred mode not supported in HW, but, does libva support this?
+			ret = VBP_DATA;
 			break;
 		}
 
 		*quant_scale = _quant_scale;
 		*macroblock_number = _macroblock_number;
-	} while (0);
+
+		ret = VBP_OK;
+	}
+	while (0);
 	return ret;
 }
 
 uint32 vbp_resync_marker_Length_mp42(viddec_mp4_parser_t *parser_cxt)
 {
-
 	mp4_Info_t *pInfo = &(parser_cxt->info);
 	mp4_VideoObjectPlane_t *vidObjPlane =
 			&(pInfo->VisualObject.VideoObject.VideoObjectPlane);
 
 	uint32 resync_marker_length = 0;
-	if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I) {
+	if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_I)
+	{
 		resync_marker_length = 17;
-	} else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) {
+	} 
+	else if (vidObjPlane->vop_coding_type == MP4_VOP_TYPE_B) 
+	{
 		uint8 fcode_max = vidObjPlane->vop_fcode_forward;
-		if (fcode_max < vidObjPlane->vop_fcode_backward) {
+		if (fcode_max < vidObjPlane->vop_fcode_backward) 
+		{
 			fcode_max = vidObjPlane->vop_fcode_backward;
-		}
+		}	    
 		resync_marker_length = 16 + fcode_max;
-	} else {
+		
+        // resync_marker is max(15+fcode,17) zeros followed by a one		   
+		if (resync_marker_length < 18)
+		    resync_marker_length = 18;
+	} 
+	else 
+	{
 		resync_marker_length = 16 + vidObjPlane->vop_fcode_forward;
 	}
 	return resync_marker_length;
@@ -756,27 +945,22 @@
 
 uint32 vbp_process_slices_svh_mp42(vbp_context *pcontext, int list_index)
 {
-	uint32 ret = MP4_STATUS_OK;
+	uint32 ret = VBP_OK;
 
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
 	viddec_pm_cxt_t *parent = pcontext->parser_cxt;
 	viddec_mp4_parser_t *parser_cxt =
 			(viddec_mp4_parser_t *) &(parent->codec_data[0]);
 
-	VTRACE ("begin\n");
-
-	vbp_picture_data_mp42 *picture_data =
-			&(query_data->picture_data[query_data->number_pictures]);
-	vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data[0]);
+	vbp_picture_data_mp42 *picture_data = vbp_get_mp42_picture_data(query_data);
+	vbp_slice_data_mp42 *slice_data = &(picture_data->slice_data);
 	VASliceParameterBufferMPEG4* slice_param = &(slice_data->slice_param);
 
-	picture_data->number_slices = 1;
-
 	uint8 is_emul = 0;
 	uint32 bit_offset = 0;
 	uint32 byte_offset = 0;
 
-	/* The offsets are relative to parent->parse_cubby.buf */
+	// The offsets are relative to parent->parse_cubby.buf
 	viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
 
 	slice_data->buffer_addr = parent->parse_cubby.buf;
@@ -794,24 +978,20 @@
 	slice_param->quant_scale
 			= parser_cxt->info.VisualObject.VideoObject.VideoObjectPlaneH263.vop_quant;
 
-	VTRACE ("end\n");
-
 	return ret;
 }
 
-mp4_Status_t vbp_process_slices_mp42(vbp_context *pcontext, int list_index) 
+uint32 vbp_process_slices_mp42(vbp_context *pcontext, int list_index) 
 {
-
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
 	viddec_pm_cxt_t *parent = pcontext->parser_cxt;
-	viddec_mp4_parser_t *parser_cxt =
-			(viddec_mp4_parser_t *) &(parent->codec_data[0]);
+	viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
 
 	vbp_picture_data_mp42 *picture_data = NULL;
 	vbp_slice_data_mp42 *slice_data = NULL;
 	VASliceParameterBufferMPEG4* slice_param = NULL;
 
-	uint32 ret = MP4_STATUS_OK;
+	uint32 ret = VBP_OK;
 
 	uint8 is_emul = 0;
 	uint32 bit_offset = 0;
@@ -821,12 +1001,9 @@
 	int32_t getbits = 0;
 	uint32 resync_marker_length = 0;
 
-	uint32 slice_index = 0;
-
 #ifdef VBP_TRACE
 	uint32 list_size_at_index = parent->list.data[list_index].edpos
 	- parent->list.data[list_index].stpos;
-#endif
 
 	VTRACE ("list_index = %d list_size_at_index = %d\n", list_index,
 			list_size_at_index);
@@ -834,6 +1011,7 @@
 	VTRACE ("list_index = %d edpos = %d stpos = %d\n", list_index,
 			parent->list.data[list_index].edpos,
 			parent->list.data[list_index].stpos);
+#endif
 
 	/* The offsets are relative to parent->parse_cubby.buf */
 	viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
@@ -845,8 +1023,9 @@
 	}
 #endif
 
-	picture_data = &(query_data->picture_data[query_data->number_pictures]);
-	slice_data = &(picture_data->slice_data[slice_index]);
+
+	picture_data = vbp_get_mp42_picture_data(query_data);
+	slice_data = &(picture_data->slice_data);
 	slice_param = &(slice_data->slice_param);
 
 	slice_data->buffer_addr = parent->parse_cubby.buf;
@@ -864,216 +1043,339 @@
 	slice_param->quant_scale
 			= parser_cxt->info.VisualObject.VideoObject.VideoObjectPlane.vop_quant;
 
-	slice_index++;
-	picture_data->number_slices = slice_index;
+	if (parser_cxt->info.VisualObject.VideoObject.resync_marker_disable)
+	{
+	    // no resync_marker
+        return VBP_OK;
+    }
 
-	/*
-	 * scan for resync_marker
-	 */
-
-	if (!parser_cxt->info.VisualObject.VideoObject.resync_marker_disable) {
-
-		viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
-		if (bit_offset) {
-			getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
-			if (getbits == -1) {
-				ret = MP4_STATUS_PARSE_ERROR;
-				return ret;
-			}
-		}
-
-		/*
-		 * get resync_marker_length
-		 */
-		resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
-
-		while (1) {
-
-			uint16_t quant_scale = 0;
-			uint32 macroblock_number = 0;
-
-			getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
-			BREAK_GETBITS_FAIL(getbits, ret);
-
-			if (code != 1) {
-				getbits = viddec_pm_get_bits(parent, &code, 8);
-				BREAK_GETBITS_FAIL(getbits, ret);
-				continue;
-			}
-
-			/*
-			 * We found resync_marker
-			 */
-
-			viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
-
-			slice_data->slice_size -= (parent->list.data[list_index].edpos
-					- parent->list.data[list_index].stpos - byte_offset);
-			slice_param->slice_data_size = slice_data->slice_size;
-
-			slice_data = &(picture_data->slice_data[slice_index]);
-			slice_param = &(slice_data->slice_param);
-
-			/*
-			 * parse video_packet_header
-			 */
-			getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
-			BREAK_GETBITS_FAIL(getbits, ret);
-
-			vbp_video_packet_header_mp42(parent, parser_cxt,
-					&quant_scale, &macroblock_number);
-
-			viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
-
-			slice_data->buffer_addr = parent->parse_cubby.buf;
-
-			slice_data->slice_offset = byte_offset
-					+ parent->list.data[list_index].stpos;
-			slice_data->slice_size = parent->list.data[list_index].edpos
-					- parent->list.data[list_index].stpos - byte_offset;
-
-			slice_param->slice_data_size = slice_data->slice_size;
-			slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
-			slice_param->slice_data_offset = 0;
-			slice_param->macroblock_offset = bit_offset;
-			slice_param->macroblock_number = macroblock_number;
-			slice_param->quant_scale = quant_scale;
-
-			slice_index++;
-
-			if (slice_index >= MAX_NUM_SLICES) {
-				ret = MP4_STATUS_PARSE_ERROR;
-				break;
-			}
-
-			if (bit_offset)
-			{
-				/* byte-align parsing position */
-				getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
-				if (getbits == -1)
-				{
-					ret = MP4_STATUS_PARSE_ERROR;
-					return ret;
-				}
-			}
-
-			picture_data->number_slices = slice_index;
+	// scan for resync_marker
+	viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+	if (bit_offset) 
+	{
+	    // byte-aligned
+		getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+		if (getbits == -1) 
+		{
+			return VBP_DATA;
 		}
 	}
-	return ret;
+
+    // get resync_marker_length
+	resync_marker_length = vbp_resync_marker_Length_mp42(parser_cxt);
+
+    uint16_t quant_scale = 0;
+    uint32 macroblock_number = 0;
+
+    while (1) 
+    {            
+		getbits = viddec_pm_peek_bits(parent, &code, resync_marker_length);
+
+		// return VBP_OK as resync_marker may not be present
+		BREAK_GETBITS_FAIL(getbits, ret);
+
+		if (code != 1) 
+		{
+			getbits = viddec_pm_get_bits(parent, &code, 8);
+			BREAK_GETBITS_FAIL(getbits, ret);
+			continue;
+		}
+
+        // We found resync_marker
+		viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+        // update slice data as we found resync_marker
+		slice_data->slice_size -= (parent->list.data[list_index].edpos
+				- parent->list.data[list_index].stpos - byte_offset);
+		slice_param->slice_data_size = slice_data->slice_size;
+
+        // skip resync marker
+		getbits = viddec_pm_get_bits(parent, &code, resync_marker_length);
+
+		// return VBP_DATA, this should never happen!
+		BREAK_GETBITS_FAIL(getbits, ret);
+	
+		// parse video_packet_header 
+		ret = vbp_parse_video_packet_header_mp42(parent, parser_cxt,
+				&quant_scale, &macroblock_number);
+
+        if (ret != VBP_OK)
+        {
+            ETRACE("Failed to parse video packet header.\n");
+            return ret;
+        }
+        
+        // new_picture_flag = 0, this is not the first slice of a picture
+        vbp_fill_picture_param(pcontext, 0);
+        
+        picture_data = vbp_get_mp42_picture_data(query_data);
+        slice_data = &(picture_data->slice_data);
+        slice_param = &(slice_data->slice_param);
+                    
+
+		viddec_pm_get_au_pos(parent, &bit_offset, &byte_offset, &is_emul);
+
+		slice_data->buffer_addr = parent->parse_cubby.buf;
+
+		slice_data->slice_offset = byte_offset
+				+ parent->list.data[list_index].stpos;
+		slice_data->slice_size = parent->list.data[list_index].edpos
+				- parent->list.data[list_index].stpos - byte_offset;
+
+		slice_param->slice_data_size = slice_data->slice_size;
+		slice_param->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
+		slice_param->slice_data_offset = 0;
+		slice_param->macroblock_offset = bit_offset;
+		slice_param->macroblock_number = macroblock_number;
+		slice_param->quant_scale = quant_scale;
+
+		if (bit_offset)
+		{
+			// byte-align parsing position 
+			getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+			if (getbits == -1)
+			{
+                ETRACE("Failed to align parser to byte position.\n");
+				return VBP_DATA;
+			}
+		}
+
+	}
+
+	return VBP_OK;
 }
 
-/* This is coppied from DHG MP42 parser */
-static inline int32_t vbp_sprite_dmv_length_mp42(
+uint32 vbp_process_video_packet_mp42(vbp_context *pcontext)
+{
+	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
+	viddec_pm_cxt_t *parent = pcontext->parser_cxt;
+	viddec_mp4_parser_t *parser_cxt = (viddec_mp4_parser_t *) &(parent->codec_data[0]);
+    uint32 code = 0;
+    int32_t getbits = 0;
+    
+	uint32 ret = VBP_DATA;
+
+
+    // setup bitstream parser 
+	parent->getbits.list = &(parent->list);
+	
+	parent->getbits.bstrm_buf.buf = parent->parse_cubby.buf;
+    parent->getbits.bstrm_buf.buf_index = 0;
+    parent->getbits.bstrm_buf.buf_st = 0;
+    parent->getbits.bstrm_buf.buf_end = parent->parse_cubby.size;    
+    parent->getbits.bstrm_buf.buf_bitoff = 0;
+    
+    parent->getbits.au_pos = 0;    
+    parent->getbits.list_off = 0;
+    parent->getbits.phase = 0;
+    parent->getbits.emulation_byte_counter = 0;
+    
+    parent->list.start_offset = 0;
+    parent->list.end_offset = parent->parse_cubby.size;
+    parent->list.total_bytes = parent->parse_cubby.size;
+	
+	
+    // skip leading zero-byte
+    while (code == 0)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 8);
+		BREAK_GETBITS_FAIL(getbits, ret);
+		getbits = viddec_pm_peek_bits(parent, &code, 8);
+		BREAK_GETBITS_FAIL(getbits, ret);
+    }
+
+    if (getbits != 0)
+    {   
+        return VBP_DATA;
+    }
+    
+    // resync-marker is represented as 17-23 bits. (16-22 bits of 0)
+    // as 16-bit '0' has been skipped, we try to parse buffer bit by bit
+    // until bit 1 is encounted or up to 7 bits are parsed.
+    code = 0;
+    uint8 count = 0;
+    while (code == 0  && count < 7)
+    {
+        getbits = viddec_pm_get_bits(parent, &code, 1);
+		BREAK_GETBITS_FAIL(getbits, ret);
+		count++;
+    }
+    
+    if (code == 0 || getbits != 0)
+    {
+        ETRACE("no resync-marker in the buffer.\n");
+        return ret;
+    }
+
+    // resync marker is skipped    
+	uint16_t quant_scale = 0;
+	uint32 macroblock_number = 0;
+
+	// parse video_packet_header
+	vbp_parse_video_packet_header_mp42(parent, parser_cxt,
+			&quant_scale, &macroblock_number);
+
+    // new_picture_flag = 0, this is not the first slice of a picture
+    vbp_fill_picture_param(pcontext, 0);
+    
+	vbp_picture_data_mp42 *picture_data = NULL;
+	vbp_slice_data_mp42 *slice_data = NULL;
+	VASliceParameterBufferMPEG4* slice_param = NULL;
+	
+	picture_data = vbp_get_mp42_picture_data(query_data);
+	slice_data = &(picture_data->slice_data);
+	slice_param = &(slice_data->slice_param);
+
+	ret = vbp_process_slices_mp42(pcontext, 0);
+
+    // update slice's QP and macro_block number as it is set to 0 by default.
+    slice_param->macroblock_number = macroblock_number;
+    slice_param->quant_scale = quant_scale;
+
+    // VOP must be coded!
+    picture_data->vop_coded = 1;
+	return ret;
+
+}
+
+
+static inline uint32 vbp_sprite_dmv_length_mp42(
 	void * parent,
 	int32_t *dmv_length) 
 {
 	uint32 code, skip;
 	int32_t getbits = 0;
-	mp4_Status_t ret = MP4_STATUS_PARSE_ERROR;
+	uint32 ret = VBP_DATA;
 	*dmv_length = 0;
 	skip = 3;
-	do {
+	do 
+	{
 		getbits = viddec_pm_peek_bits(parent, &code, skip);
 		BREAK_GETBITS_FAIL(getbits, ret);
 
-		if (code == 7) {
+		if (code == 7) 
+		{
 			viddec_pm_skip_bits(parent, skip);
 			getbits = viddec_pm_peek_bits(parent, &code, 9);
 			BREAK_GETBITS_FAIL(getbits, ret);
 
 			skip = 1;
-			while ((code & 256) != 0) {/* count number of 1 bits */
+			while ((code & 256) != 0) 
+			{
+			    // count number of 1 bits 
 				code <<= 1;
 				skip++;
 			}
 			*dmv_length = 5 + skip;
-		} else {
+		} 
+		else 
+		{
 			skip = (code <= 1) ? 2 : 3;
 			*dmv_length = code - 1;
 		}
 		viddec_pm_skip_bits(parent, skip);
-		ret = MP4_STATUS_OK;
+		ret = VBP_OK;
 
-	} while (0);
+	} 
+	while (0);
 	return ret;
 }
 
-/* This is coppied from DHG MP42 parser */
-static inline mp4_Status_t vbp_sprite_trajectory_mp42(
+
+static inline uint32 vbp_sprite_trajectory_mp42(
 	void *parent,
 	mp4_VideoObjectLayer_t *vidObjLay, 
 	mp4_VideoObjectPlane_t *vidObjPlane) 
 {
 	uint32 code, i;
 	int32_t dmv_length = 0, dmv_code = 0, getbits = 0;
-	mp4_Status_t ret = MP4_STATUS_OK;
-	for (i = 0; i
-			< (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) {
+	uint32 ret = VBP_OK;
+	for (i = 0; i < (uint32) vidObjLay->sprite_info.no_of_sprite_warping_points; i++) 
+	{
+	    ret = VBP_DATA;
 		ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
-		if (ret != MP4_STATUS_OK) {
+		if (ret != VBP_OK) 
+		{
 			break;
 		}
-		if (dmv_length <= 0) {
+		if (dmv_length <= 0) 
+		{
 			dmv_code = 0;
-		} else {
+		} 
+		else 
+		{
 			getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
 			BREAK_GETBITS_FAIL(getbits, ret);
 			dmv_code = (int32_t) code;
-			if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+			if ((dmv_code & (1 << (dmv_length - 1))) == 0) 
+			{
 				dmv_code -= (1 << dmv_length) - 1;
 			}
 		}
 		getbits = viddec_pm_get_bits(parent, &code, 1);
 		BREAK_GETBITS_FAIL(getbits, ret);
-		if (code != 1) {
-			ret = MP4_STATUS_PARSE_ERROR;
+		if (code != 1) 
+		{
+			ret = VBP_DATA;
 			break;
 		}
 		vidObjPlane->warping_mv_code_du[i] = dmv_code;
-		/* TODO: create another inline function to avoid code duplication */
+		// TODO: create another inline function to avoid code duplication 
 		ret = vbp_sprite_dmv_length_mp42(parent, &dmv_length);
-		if (ret != MP4_STATUS_OK) {
+		if (ret != VBP_OK) 
+		{
 			break;
 		}
-		if (dmv_length <= 0) {
+		// reset return value in case early break
+        ret = VBP_DATA;
+		if (dmv_length <= 0) 
+		{
 			dmv_code = 0;
-		} else {
+		} 
+		else 
+		{
 			getbits = viddec_pm_get_bits(parent, &code, (uint32) dmv_length);
 			BREAK_GETBITS_FAIL(getbits, ret);
 			dmv_code = (int32_t) code;
-			if ((dmv_code & (1 << (dmv_length - 1))) == 0) {
+			if ((dmv_code & (1 << (dmv_length - 1))) == 0) 
+			{
 				dmv_code -= (1 << dmv_length) - 1;
 			}
 		}
 		getbits = viddec_pm_get_bits(parent, &code, 1);
 		BREAK_GETBITS_FAIL(getbits, ret);
-		if (code != 1) {
-			ret = MP4_STATUS_PARSE_ERROR;
+		if (code != 1) 
+		{
 			break;
 		}
 		vidObjPlane->warping_mv_code_dv[i] = dmv_code;
 
+		// set to VBP_OK  
+		ret = VBP_OK;
+
 	}
 	return ret;
 }
 
+
 /*
  * free memory of vbp_data_mp42 structure and its members
  */
 uint32 vbp_free_query_data_mp42(vbp_context *pcontext) 
 {
-
 	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-	gint idx = 0;
+    vbp_picture_data_mp42* current = NULL;
+    vbp_picture_data_mp42* next = NULL;
 
-	if (query_data) {
-		if (query_data->picture_data) {
-			for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
-				g_free(query_data->picture_data[idx].slice_data);
-			}
-			g_free(query_data->picture_data);
-		}
+	if (query_data) 
+	{
+	    current = query_data->picture_data;
+	    while (current != NULL)
+	    {
+	        next = current->next_picture_data;
+	        g_free(current);
+	        current = next;
+        }	       
 
 		g_free(query_data);
 	}
@@ -1087,202 +1389,25 @@
  */
 uint32 vbp_allocate_query_data_mp42(vbp_context *pcontext) 
 {
-
-	gint idx = 0;
 	vbp_data_mp42 *query_data;
 	pcontext->query_data = NULL;
 
 	query_data = g_try_new0(vbp_data_mp42, 1);
-	if (query_data == NULL) {
+	if (query_data == NULL) 
+	{
 		goto cleanup;
 	}
 
-	query_data->picture_data = g_try_new0(vbp_picture_data_mp42,
-			MAX_NUM_PICTURES_MP42);
-	if (NULL == query_data->picture_data) {
-		goto cleanup;
-	}
-
-	for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
-		query_data->picture_data[idx].number_slices = 0;
-		query_data->picture_data[idx].slice_data = g_try_new0(
-				vbp_slice_data_mp42, MAX_NUM_SLICES);
-
-		if (query_data->picture_data[idx].slice_data == NULL) {
-			goto cleanup;
-		}
-	}
-
 	pcontext->query_data = (void *) query_data;
+	query_data->picture_data = NULL;
+    query_data->number_picture_data = 0;
+    query_data->number_pictures = 0;
+
 	return VBP_OK;
 
-	cleanup:
+cleanup:
 
-	if (query_data) {
-		if (query_data->picture_data) {
-			for (idx = 0; idx < MAX_NUM_PICTURES_MP42; idx++) {
-				g_free(query_data->picture_data[idx].slice_data);
-			}
-			g_free(query_data->picture_data);
-		}
-
-		g_free(query_data);
-	}
-
+    vbp_free_query_data_mp42(pcontext);
+    
 	return VBP_MEM;
 }
-
-void vbp_dump_query_data(vbp_context *pcontext, int list_index) 
-{
-	vbp_data_mp42 *query_data = (vbp_data_mp42 *) pcontext->query_data;
-
-	vbp_picture_data_mp42 *picture_data = NULL;
-	VAPictureParameterBufferMPEG4 *picture_param = NULL;
-	vbp_slice_data_mp42 *slice_data = NULL;
-
-	uint32 idx = 0, jdx = 0;
-
-	for (idx = 0; idx < query_data->number_pictures; idx++) {
-
-		picture_data = &(query_data->picture_data[idx]);
-		picture_param = &(picture_data->picture_param);
-		slice_data = &(picture_data->slice_data[0]);
-
-		g_print("======================= dump_begin ======================\n\n");
-		g_print("======================= codec_data ======================\n");
-
-		/* codec_data */
-		g_print("codec_data.profile_and_level_indication = 0x%x\n",
-				query_data->codec_data.profile_and_level_indication);
-
-		g_print("==================== picture_param =======================\n");
-
-		/* picture_param */
-		g_print("picture_param->vop_width = %d\n", picture_param->vop_width);
-		g_print("picture_param->vop_height = %d\n", picture_param->vop_height);
-
-		g_print("picture_param->vol_fields.bits.short_video_header = %d\n",
-				picture_param->vol_fields.bits.short_video_header);
-		g_print("picture_param->vol_fields.bits.chroma_format = %d\n",
-				picture_param->vol_fields.bits.chroma_format);
-		g_print("picture_param->vol_fields.bits.interlaced = %d\n",
-				picture_param->vol_fields.bits.interlaced);
-		g_print("picture_param->vol_fields.bits.obmc_disable = %d\n",
-				picture_param->vol_fields.bits.obmc_disable);
-		g_print("picture_param->vol_fields.bits.sprite_enable = %d\n",
-				picture_param->vol_fields.bits.sprite_enable);
-		g_print(
-				"picture_param->vol_fields.bits.sprite_warping_accuracy = %d\n",
-				picture_param->vol_fields.bits.sprite_warping_accuracy);
-		g_print("picture_param->vol_fields.bits.quant_type = %d\n",
-				picture_param->vol_fields.bits.quant_type);
-		g_print("picture_param->vol_fields.bits.quarter_sample = %d\n",
-				picture_param->vol_fields.bits.quarter_sample);
-		g_print("picture_param->vol_fields.bits.data_partitioned = %d\n",
-				picture_param->vol_fields.bits.data_partitioned);
-		g_print("picture_param->vol_fields.bits.reversible_vlc = %d\n",
-				picture_param->vol_fields.bits.reversible_vlc);
-
-		g_print("picture_param->no_of_sprite_warping_points = %d\n",
-				picture_param->no_of_sprite_warping_points);
-		g_print("picture_param->quant_precision = %d\n",
-				picture_param->quant_precision);
-		g_print("picture_param->sprite_trajectory_du = %d, %d, %d\n",
-				picture_param->sprite_trajectory_du[0],
-				picture_param->sprite_trajectory_du[1],
-				picture_param->sprite_trajectory_du[2]);
-		g_print("picture_param->sprite_trajectory_dv = %d, %d, %d\n",
-				picture_param->sprite_trajectory_dv[0],
-				picture_param->sprite_trajectory_dv[1],
-				picture_param->sprite_trajectory_dv[2]);
-
-		g_print("picture_param->vop_fields.bits.vop_coding_type = %d\n",
-				picture_param->vop_fields.bits.vop_coding_type);
-		g_print(
-				"picture_param->vop_fields.bits.backward_reference_vop_coding_type = %d\n",
-				picture_param->vop_fields.bits.backward_reference_vop_coding_type);
-		g_print("picture_param->vop_fields.bits.vop_rounding_type = %d\n",
-				picture_param->vop_fields.bits.vop_rounding_type);
-		g_print("picture_param->vop_fields.bits.intra_dc_vlc_thr = %d\n",
-				picture_param->vop_fields.bits.intra_dc_vlc_thr);
-		g_print("picture_param->vop_fields.bits.top_field_first = %d\n",
-				picture_param->vop_fields.bits.top_field_first);
-		g_print(
-				"picture_param->vop_fields.bits.alternate_vertical_scan_flag = %d\n",
-				picture_param->vop_fields.bits.alternate_vertical_scan_flag);
-
-		g_print("picture_param->vop_fcode_forward = %d\n",
-				picture_param->vop_fcode_forward);
-		g_print("picture_param->vop_fcode_backward = %d\n",
-				picture_param->vop_fcode_backward);
-		g_print("picture_param->num_gobs_in_vop = %d\n",
-				picture_param->num_gobs_in_vop);
-		g_print("picture_param->num_macroblocks_in_gob = %d\n",
-				picture_param->num_macroblocks_in_gob);
-		g_print("picture_param->TRB = %d\n", picture_param->TRB);
-		g_print("picture_param->TRD = %d\n", picture_param->TRD);
-
-		g_print("==================== slice_data ==========================\n");
-
-		g_print("slice_data.buffer_addr = 0x%x\n",
-				(unsigned int) slice_data->buffer_addr);
-		g_print("slice_data.slice_offset = 0x%x\n", slice_data->slice_offset);
-		g_print("slice_data.slice_size = 0x%x\n", slice_data->slice_size);
-
-		g_print("slice_data.slice_param.macroblock_number = %d\n",
-				slice_data->slice_param.macroblock_number);
-		g_print("slice_data.slice_param.macroblock_offset = 0x%x\n",
-				slice_data->slice_param.macroblock_offset);
-		g_print("slice_data.slice_param.quant_scale = %d\n",
-				slice_data->slice_param.quant_scale);
-		g_print("slice_data.slice_param.slice_data_flag = %d\n",
-				slice_data->slice_param.slice_data_flag);
-		g_print("slice_data.slice_param.slice_data_offset = %d\n",
-				slice_data->slice_param.slice_data_offset);
-		g_print("slice_data.slice_param.slice_data_size = %d\n",
-				slice_data->slice_param.slice_data_size);
-
-		g_print("================= iq_matrix_buffer ======================\n");
-		g_print("iq_matrix_buffer.load_intra_quant_mat = %d\n",
-				picture_data->iq_matrix_buffer.load_intra_quant_mat);
-		g_print("iq_matrix_buffer.load_non_intra_quant_mat = %d\n",
-				picture_data->iq_matrix_buffer.load_non_intra_quant_mat);
-
-		g_print("------- iq_matrix_buffer.intra_quant_mat ----------\n");
-		for (jdx = 0; jdx < 64; jdx++) {
-
-			g_print("%02x ",
-					picture_data->iq_matrix_buffer.intra_quant_mat[jdx]);
-
-			if ((jdx + 1) % 8 == 0) {
-				g_print("\n");
-			}
-		}
-
-		g_print("----- iq_matrix_buffer.non_intra_quant_mat --------\n");
-		for (jdx = 0; jdx < 64; jdx++) {
-
-			g_print("%02x ",
-					picture_data->iq_matrix_buffer.non_intra_quant_mat[jdx]);
-
-			if ((jdx + 1) % 8 == 0) {
-				g_print("\n");
-			}
-		}
-
-		g_print("-------- slice buffer begin ------------\n");
-
-		for (jdx = 0; jdx < 64; jdx++) {
-			g_print("%02x ", *(slice_data->buffer_addr
-					+ slice_data->slice_offset + jdx));
-			if ((jdx + 1) % 8 == 0) {
-				g_print("\n");
-			}
-		}
-		g_print("-------- slice buffer begin ------------\n");
-
-		g_print("\n\n============== dump_end ==========================\n\n");
-
-	}
-}
-
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
index a26a9f1..b5548ab 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
@@ -234,7 +234,7 @@
 		{
 			/* mandatory for H.264 */
 			ETRACE("Failed to allocate memory");
-			error =  VBP_CXT;
+			error =  VBP_TYPE;
 			goto cleanup;
 		}
 	}
@@ -377,23 +377,6 @@
 		}			
 	}
 
-	/* currently always assume a complete frame is supplied for parsing, so
-	 * there is no need to check if workload is done
-	 */
-	 
-	/*
-	uint32_t codec_errors = 0;
-	uint32_t state;
-
-	error = ops->is_wkld_done(
-		(void *)cxt, 
-		(void *)&(cxt->codec_data[0]),
-		(uint32_t)cxt->sc_prefix_info.next_sc, 
-		&codec_errors);
-	state = (ret == VIDDEC_PARSE_FRMDONE) ? VBP_DONE : VBP_OK;
-	return state;
-	*/
-
 	return VBP_OK;
 }
 
@@ -554,6 +537,6 @@
  */
 uint32 vbp_utils_flush(vbp_context *pcontext)
 {
-	return VBP_IMPL;
+	return VBP_OK;
 }
 
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
index af16e8d..4739798 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
@@ -34,6 +34,28 @@
 };
 
 
+static uint8 vc1_aspect_ratio_table[][2] = 
+{
+    {0, 0},
+    {1, 1},
+    {12, 11},
+    {10, 11},
+    {16, 11},
+    {40, 33},
+    {24, 11},
+    {20, 11},
+    {32, 11},
+    {80, 33},
+    {18, 11},
+    {15, 11},
+    {64, 33},
+    {160, 99},
+    
+    // reserved
+    {0, 0}
+};
+
+
 
 /**
  * set parser entry points
@@ -678,6 +700,46 @@
 	se_data->TFCNTRFLAG = seqLayerHeader->TFCNTRFLAG;
 	se_data->FINTERPFLAG = seqLayerHeader->FINTERPFLAG;
 	se_data->PSF = seqLayerHeader->PSF;
+
+    // color matrix
+    if (seqLayerHeader->COLOR_FORMAT_FLAG)
+    {
+    	se_data->MATRIX_COEF = seqLayerHeader->MATRIX_COEF;
+    }    	
+    else
+    {
+        //ITU-R BT. 601-5.
+        se_data->MATRIX_COEF = 6;
+    }
+
+    // aspect ratio
+    if (seqLayerHeader->ASPECT_RATIO_FLAG == 1)
+    {
+        se_data->ASPECT_RATIO = seqLayerHeader->ASPECT_RATIO;
+        if (se_data->ASPECT_RATIO < 14)
+        {
+            se_data->ASPECT_HORIZ_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][0];
+            se_data->ASPECT_VERT_SIZE = vc1_aspect_ratio_table[se_data->ASPECT_RATIO][1];            
+        }
+        else if (se_data->ASPECT_RATIO == 15)
+        {
+            se_data->ASPECT_HORIZ_SIZE = seqLayerHeader->ASPECT_HORIZ_SIZE;
+            se_data->ASPECT_VERT_SIZE = seqLayerHeader->ASPECT_VERT_SIZE;
+        }
+        else  // se_data->ASPECT_RATIO == 14
+        { 
+            se_data->ASPECT_HORIZ_SIZE = 0;
+            se_data->ASPECT_VERT_SIZE = 0;
+        }
+    }
+    else
+    {
+        // unspecified
+        se_data->ASPECT_RATIO = 0;
+        se_data->ASPECT_HORIZ_SIZE = 0;
+        se_data->ASPECT_VERT_SIZE = 0;
+    }
+	
 	se_data->BROKEN_LINK = seqLayerHeader->BROKEN_LINK;
 	se_data->CLOSED_ENTRY = seqLayerHeader->CLOSED_ENTRY;
 	se_data->PANSCAN_FLAG = seqLayerHeader->PANSCAN_FLAG;
@@ -739,9 +801,16 @@
 	pic_parms->inloop_decoded_picture = VA_INVALID_SURFACE;
 
 	pic_parms->sequence_fields.value = 0;
+	pic_parms->sequence_fields.bits.pulldown = seqLayerHeader->PULLDOWN;
 	pic_parms->sequence_fields.bits.interlace = seqLayerHeader->INTERLACE;
+	pic_parms->sequence_fields.bits.tfcntrflag =  seqLayerHeader->TFCNTRFLAG;
+    pic_parms->sequence_fields.bits.finterpflag = seqLayerHeader->FINTERPFLAG;
+    pic_parms->sequence_fields.bits.psf = seqLayerHeader->PSF;
+    pic_parms->sequence_fields.bits.multires = seqLayerHeader->MULTIRES;
+    pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
 	pic_parms->sequence_fields.bits.syncmarker = seqLayerHeader->SYNCMARKER;
-	pic_parms->sequence_fields.bits.overlap = seqLayerHeader->OVERLAP;
+    pic_parms->sequence_fields.bits.rangered = seqLayerHeader->RANGERED;
+    pic_parms->sequence_fields.bits.max_b_frames = seqLayerHeader->MAXBFRAMES;
 
 	pic_parms->coded_width = (seqLayerHeader->width + 1) << 1;
 	pic_parms->coded_height = (seqLayerHeader->height + 1) << 1;
@@ -750,6 +819,7 @@
 	pic_parms->entrypoint_fields.bits.closed_entry = seqLayerHeader->CLOSED_ENTRY;
 	pic_parms->entrypoint_fields.bits.broken_link = seqLayerHeader->BROKEN_LINK;
 	pic_parms->entrypoint_fields.bits.loopfilter = seqLayerHeader->LOOPFILTER;
+	pic_parms->entrypoint_fields.bits.panscan_flag = seqLayerHeader->PANSCAN_FLAG;
 
 	pic_parms->conditional_overlap_flag = picLayerHeader->CONDOVER;
 	pic_parms->fast_uvmc_flag = seqLayerHeader->FASTUVMC;
@@ -939,8 +1009,8 @@
 	/*uint32 data_offset = byte - cxt->list.data[index].stpos;*/
 
 	slc_data->buffer_addr = cxt->parse_cubby.buf + cxt->list.data[index].stpos;
-	slc_data->slice_size = slice_size - byte;
-	slc_data->slice_offset = byte;
+	slc_data->slice_size = slice_size;
+	slc_data->slice_offset = 0;
 
 	slc_parms->slice_data_size = slc_data->slice_size;
 	slc_parms->slice_data_offset = 0;
@@ -948,7 +1018,7 @@
 	/* fix this.  we need to be able to handle partial slices. */
 	slc_parms->slice_data_flag = VA_SLICE_DATA_FLAG_ALL;
 
-	slc_parms->macroblock_offset = bit;
+	slc_parms->macroblock_offset = bit + byte * 8;
 
 	/* fix this.  we need o get the slice_vertical_position from the code */
 	slc_parms->slice_vertical_position = pic_data->num_slices;
diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog
index 654fed0..6ac8f39 100644
--- a/mix_video/ChangeLog
+++ b/mix_video/ChangeLog
@@ -1,3 +1,37 @@
+2010-11-18 Andy Qiu <junhai.qiu@intel.com>
+		* Changed version number to 0.1.24
+
+2010-11-17 Weian Chen <weian.chen@intel.com>
+		* Support dynamic frame rate change in MI-X
+
+2010-11-16 Weian Chen <weian.chen@intel.com>
+		* Change to use one API to set AIR parameters
+		* Support to set AIP dynamically
+
+2010-11-15 Weian Chen <weian.chen@intel.com>
+		* Support to set slice number for I and P frame seperately
+
+2010-11-12 Tao Tao <tao.q.tao@intel.com>
+		* Added new error codes to MI-X video and changed some return values in mixvideoformat_h264.c
+
+2010-11-12 Andy Qiu <junhai.qiu@intel.com>
+		* Changed version number to 0.1.23
+
+2010-11-11 Weian Chen <weian.chen@intel.com>
+		* Add new feature (VCM, MTU, etc)
+
+2010-11-10 Andy Qiu <junhai.qiu@intel.com>
+		* Support color matrix, video range, aspect ratio.
+		* Support h264 dynamic stream detection
+		* Support H264 RTSP streaming
+		* Changed version number to 0.1.22
+
+2010-10-25 Andy Qiu <junhai.qiu@intel.com>
+		* H.264 error robust improvement
+		* MPEG-4/H.263 partial frame support
+		* MPEG-4/H.263 RTSP stream support
+		* Changed version number to 0.1.21
+
 2010-09-29 Andy Qiu <junhai.qiu@intel.com>
 		* Supported FLV playback
 		* Re-factored frame manager
diff --git a/mix_video/configure.ac b/mix_video/configure.ac
index ec50fd4..93f2986 100644
--- a/mix_video/configure.ac
+++ b/mix_video/configure.ac
@@ -2,7 +2,7 @@
 
 AC_CONFIG_MACRO_DIR(m4)
 
-UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 20)
+UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 24)
 
 dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
 AM_MAINTAINER_MODE
diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec
index f8f1947..dec7602 100644
--- a/mix_video/mixvideo.spec
+++ b/mix_video/mixvideo.spec
@@ -6,7 +6,7 @@
 
 Summary: MIX Video
 Name: mixvideo
-Version: 0.1.20
+Version: 0.1.24
 Release: 1
 Source0: %{name}-%{version}.tar.bz2
 NoSource: 0
diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c
index 6be0ace..b3edd90 100644
--- a/mix_video/src/mixframemanager.c
+++ b/mix_video/src/mixframemanager.c
@@ -13,7 +13,7 @@
 
 #define INITIAL_FRAME_ARRAY_SIZE 	16
 
-// Assume only one backward reference is used. This will hold up to 2 frames before forcing
+// Assume only one backward reference is used. This will hold up to 2 frames before forcing 
 // the earliest frame out of queue.
 #define MIX_MAX_ENQUEUE_SIZE        2
 
@@ -101,10 +101,10 @@
 
 	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
-	if (!MIX_IS_FRAMEMANAGER(fm) ||
+	if (!MIX_IS_FRAMEMANAGER(fm) || 
 	    mode <= MIX_DISPLAY_ORDER_UNKNOWN ||
-	    mode >= MIX_DISPLAY_ORDER_LAST ||
-	    framerate_numerator <= 0 ||
+	    mode >= MIX_DISPLAY_ORDER_LAST || 
+	    framerate_numerator <= 0 || 
 	    framerate_denominator <= 0) {
 		return MIX_RESULT_INVALID_PARAM;
 	}
@@ -259,7 +259,7 @@
     {
 		return MIX_RESULT_FAIL;
     }
-
+    
 	g_mutex_lock(fm->lock);
 
 	fm->max_enqueue_size = size;
@@ -286,12 +286,12 @@
         // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16.
 		return MIX_RESULT_INVALID_PARAM;
     }
-
+    
 	g_mutex_lock(fm->lock);
 
     // max_picture_number is exclusie (range from 0 to num - 1).
     // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the
-    // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches
+    // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches 
     // fm->max_picture_number.
 	fm->max_picture_number = num;
 	LOG_V("max picture number is %d\n", num);
@@ -322,7 +322,7 @@
         fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame);
         mix_videoframe_unref(frame);
 	    LOG_V("one frame is flushed\n");
-    };
+    };     
 
 	fm->eos = FALSE;
 	fm->is_first_frame = TRUE;
@@ -377,29 +377,29 @@
     }
 #endif
 
-	g_mutex_lock(fm->lock);
+	g_mutex_lock(fm->lock);	
     fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf);
 	g_mutex_unlock(fm->lock);
-
+	
     LOG_V("End\n");
 
 	return ret;
 }
 
-void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf)
+void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) 
 {
     // this function finds the lowest time stamp in the list and assign it to the dequeued video frame,
     // if that timestamp is smaller than the timestamp of dequeued video frame.
     int i;
-    guint64 ts, min_ts;
-    MixVideoFrame *p, *min_p;
+    guint64 ts = 0, min_ts = 0;
+    MixVideoFrame *p = NULL, *min_p = NULL;
     int len = g_slist_length(fm->frame_list);
     if (len == 0)
     {
         // nothing to update
         return;
     }
-
+    
     // find video frame with the smallest timestamp, take rollover into account when
     // comparing timestamp.
     for (i = 0; i < len; i++)
@@ -408,19 +408,19 @@
         mix_videoframe_get_timestamp(p, &ts);
         if (i == 0 ||
             (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) ||
-            (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD))
+            (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD))        
         {
             min_ts = ts;
             min_p = p;
-        }
+        }       
     }
 
     mix_videoframe_get_timestamp(mvf, &ts);
     if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) ||
-        (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD))
+        (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) 
     {
         // frame to be updated has smaller time stamp
-    }
+    }  
     else
     {
         // time stamp needs to be monotonically non-decreasing so swap timestamp.
@@ -432,7 +432,7 @@
 }
 
 
-MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf)
+MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
 {
     int i, num_i_or_p;
     MixVideoFrame *p, *first_i_or_p;
@@ -441,7 +441,7 @@
 
     num_i_or_p = 0;
     first_i_or_p = NULL;
-
+    
     for (i = 0; i < len; i++)
     {
         p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
@@ -455,15 +455,15 @@
             *mvf = p;
             LOG_V("B frame is dequeued.\n");
             return MIX_RESULT_SUCCESS;
-        }
-
+        }  
+        
         if (type != TYPE_I && type != TYPE_P)
         {
-            // this should never happen
+            // this should never happen 
             LOG_E("Frame typs is invalid!!!\n");
             fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
             mix_videoframe_unref(p);
-            return MIX_RESULT_FRAME_NOTAVAIL;
+            return MIX_RESULT_FRAME_NOTAVAIL;                       
         }
         num_i_or_p++;
         if (first_i_or_p == NULL)
@@ -472,7 +472,7 @@
         }
     }
 
-    // if there are more than one reference frame in the list, the first one is dequeued.
+    // if there are more than one reference frame in the list, the first one is dequeued. 
     if (num_i_or_p > 1 || fm->eos)
     {
         if (first_i_or_p == NULL)
@@ -493,40 +493,40 @@
         else
         {
             LOG_V("P frame is dequeued.\n");
-        }
+        }                    
 #endif
-        return MIX_RESULT_SUCCESS;
+        return MIX_RESULT_SUCCESS;            
     }
-
-    return MIX_RESULT_FRAME_NOTAVAIL;
+    
+    return MIX_RESULT_FRAME_NOTAVAIL;   
 }
 
-MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf)
+MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
 {
     int i, len;
     MixVideoFrame *p, *p_out_of_dated;
     guint64 ts, ts_next_pending, ts_out_of_dated;
     guint64 tolerance = fm->frame_timestamp_delta/4;
 
-retry:
+retry:    
     // len may be changed during retry!
     len = g_slist_length(fm->frame_list);
-    ts_next_pending = (guint64)-1;
+    ts_next_pending = (guint64)-1; 
     ts_out_of_dated = 0;
     p_out_of_dated = NULL;
-
-
+    
+    
     for (i = 0; i < len; i++)
     {
         p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
         mix_videoframe_get_timestamp(p, &ts);
-        if (ts >= fm->last_frame_timestamp &&
+        if (ts >= fm->last_frame_timestamp && 
             ts <= fm->next_frame_timestamp + tolerance)
         {
             fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
             *mvf = p;
             mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp));
-            fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta;
+            fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta;            
             LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts);
             return MIX_RESULT_SUCCESS;
         }
@@ -536,7 +536,7 @@
         {
             ts_next_pending = ts;
         }
-        if (ts < fm->last_frame_timestamp &&
+        if (ts < fm->last_frame_timestamp && 
             ts >= ts_out_of_dated)
         {
             // video frame that is most recently out-of-dated.
@@ -544,10 +544,10 @@
             // the "next frame" criteria, and the one with larger timestamp is dequeued first.
             ts_out_of_dated = ts;
             p_out_of_dated = p;
-        }
+        }        
     }
 
-    if (p_out_of_dated &&
+    if (p_out_of_dated && 
         fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD)
     {
         fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated);
@@ -556,10 +556,10 @@
             ts_out_of_dated, fm->last_frame_timestamp);
         return MIX_RESULT_FRAME_NOTAVAIL;
     }
-
+    
     if (len <= fm->max_enqueue_size && fm->eos == FALSE)
     {
-        LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n",
+        LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", 
             fm->next_frame_timestamp, ts_next_pending, len);
         return MIX_RESULT_FRAME_NOTAVAIL;
     }
@@ -569,13 +569,13 @@
     {
         LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n",
                 fm->next_frame_timestamp, ts_next_pending);
-
+                
         fm->next_frame_timestamp = ts_next_pending;
         goto retry;
     }
 
     // time stamp roll-over
-    LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n",
+    LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", 
         fm->next_frame_timestamp);
 
     fm->next_frame_timestamp = 0;
@@ -587,7 +587,7 @@
     return MIX_RESULT_FAIL;
 }
 
-MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf)
+MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
 {
     int i, len;
     MixVideoFrame* p;
@@ -596,9 +596,9 @@
 
     len = g_slist_length(fm->frame_list);
 
-retry:
+retry:    
     next_picnum_pending = (guint32)-1;
-
+    
     for (i = 0; i < len; i++)
     {
         p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
@@ -607,11 +607,11 @@
         {
             fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
             mix_framemanager_update_timestamp(fm, p);
-            *mvf = p;
+            *mvf = p;           
             LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber);
             fm->next_frame_picnumber++;
             //if (fm->next_frame_picnumber == fm->max_picture_number)
-            //    fm->next_frame_picnumber = 0;
+            //    fm->next_frame_picnumber = 0;            
             return MIX_RESULT_SUCCESS;
         }
 
@@ -624,7 +624,7 @@
         if (picnum < fm->next_frame_picnumber &&
             fm->next_frame_picnumber - picnum < 8)
         {
-            // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number"
+            // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number"  
             // to the pic number  in the list is less than half of 16, it is safe to assume that pic number
             // is reset when a new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1).
             LOG_V("picture number is reset to %d, next pic number is %d, next pending number is %d.\n",
@@ -632,10 +632,10 @@
             break;
         }
     }
-
+    
     if (len <= fm->max_enqueue_size && fm->eos == FALSE)
     {
-        LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n",
+        LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", 
                 fm->next_frame_picnumber, next_picnum_pending, len);
         return MIX_RESULT_FRAME_NOTAVAIL;
     }
@@ -645,13 +645,13 @@
     {
         LOG_V("picture number has gap, jumping from %d to %d.\n",
                 fm->next_frame_picnumber, next_picnum_pending);
-
+                
         fm->next_frame_picnumber = next_picnum_pending;
         goto retry;
     }
 
     // picture number roll-over
-    LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n",
+    LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", 
         fm->next_frame_picnumber);
 
     fm->next_frame_picnumber = 0;
@@ -693,29 +693,29 @@
         {
             LOG_V("No frame is dequeued as queue is empty!\n");
             ret = MIX_RESULT_FRAME_NOTAVAIL;
-        }
+        }            
 	}
 	else if (fm->is_first_frame)
 	{
 	    // dequeue the first entry in the list. Not need to update the time stamp as
 	    // the list should contain only one frame.
-#ifdef MIX_LOG_ENABLE
+#ifdef MIX_LOG_ENABLE	    
     	if (g_slist_length(fm->frame_list) != 1)
     	{
-    	    LOG_W("length of list is not equal to 1 for the first frame.\n");
+    	    LOG_W("length of list is not equal to 1 for the first frame.\n");    	    
     	}
-#endif
+#endif    	
         *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0);
         fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf));
 
         if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP)
-        {
-            mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp));
+        {            
+            mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp));             
             fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta;
             LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp);
         }
         else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER)
-        {
+        {            
             mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber));
             LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber);
             fm->next_frame_picnumber++;
@@ -724,15 +724,15 @@
         }
         else
         {
-#ifdef MIX_LOG_ENABLE
+#ifdef MIX_LOG_ENABLE     
             MixFrameType type;
             mix_videoframe_get_frame_type(*mvf, &type);
             LOG_V("The first frame is dequeud, frame type is %d.\n", type);
-#endif
+#endif            
         }
 	    fm->is_first_frame = FALSE;
-
-        ret = MIX_RESULT_SUCCESS;
+	    
+        ret = MIX_RESULT_SUCCESS;	       
 	}
 	else
 	{
@@ -751,14 +751,14 @@
             ret = mix_framemanager_pictype_based_dequeue(fm, mvf);
             break;
 
-        case MIX_DISPLAY_ORDER_FIFO:
+        case MIX_DISPLAY_ORDER_FIFO:        
             *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0);
             fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf));
-            ret = MIX_RESULT_SUCCESS;
+            ret = MIX_RESULT_SUCCESS;          
             LOG_V("One frame is dequeued.\n");
             break;
-
-        default:
+            
+        default:         
             LOG_E("Invalid frame order mode\n");
             ret = MIX_RESULT_FAIL;
             break;
@@ -784,7 +784,7 @@
 		return MIX_RESULT_NOT_INIT;
 	}
 
-	g_mutex_lock(fm->lock);
+	g_mutex_lock(fm->lock);		
 	fm->eos = TRUE;
 	LOG_V("EOS is received.\n");
 	g_mutex_unlock(fm->lock);
diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c
index 9f92ae1..5ed6007 100644
--- a/mix_video/src/mixsurfacepool.c
+++ b/mix_video/src/mixsurfacepool.c
@@ -396,7 +396,7 @@
 
 		LOG_E( "out of surfaces\n");
 
-		return MIX_RESULT_NO_MEMORY;
+		return MIX_RESULT_OUTOFSURFACES;
 	}
 
 	//Remove a frame from the free pool
@@ -482,7 +482,7 @@
 
 		LOG_E( "out of surfaces\n");
 
-		return MIX_RESULT_NO_MEMORY;
+		return MIX_RESULT_OUTOFSURFACES;
 	}
 
 	//Remove a frame from the free pool
diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c
index 718d355..56bd264 100644
--- a/mix_video/src/mixvideo.c
+++ b/mix_video/src/mixvideo.c
@@ -10,16 +10,16 @@
  * SECTION:mixvideo
  * @short_description: Object to support a single stream decoding or encoding using hardware accelerated decoder/encoder.
  * @include: mixvideo.h
- * 
+ *
  * #MixVideo objects are created by the MMF/App and utilized for main MI-X API functionality for video.
- * 
+ *
  * The MixVideo object handles any of the video formats internally.
  * The App/MMF will pass a MixVideoConfigParamsDecH264/MixVideoConfigParamsDecVC1/
  * MixVideoConfigParamsEncH264/etc object to MixVideo in the mix_video_configure()
  * call. MixVideoInitParams, MixVideoDecodeParams, MixVideoEncodeParams, and
  * MixVideoRenderParams objects will be passed in the mix_video_initialize(),
  * mix_video_decode(), mix_video_encode() and mix_video_render() calls respectively.
- * 
+ *
  * The application can take the following steps to decode video:
  * <itemizedlist>
  * <listitem>Create a mix_video object using mix_video_new()</listitem>
@@ -30,19 +30,19 @@
  * <listitem>At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). The frame can be retained for redrawing until the next frame is retrieved.</listitem>
  * <listitem>When the frame is no longer needed for redrawing, release the frame using mix_video_release_frame().</listitem>
  * </itemizedlist>
- * 
+ *
  * For encoding, the application can take the following steps to encode video:
  * <itemizedlist>
  * <listitem>Create a mix_video object using mix_video_new()</listitem>
  * <listitem>Initialize the object using mix_video_initialize()</listitem>
  * <listitem>Configure the stream using mix_video_configure()</listitem>
  * <listitem>Encode frames using mix_video_encode()</listitem>
- * <listitem>Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file.</listitem> 
+ * <listitem>Use the encoded data buffers as desired; for example, forward to a muxing component for saving to a file.</listitem>
  * <listitem>Retrieve the uncompressed frames for display using mix_video_get_frame().</listitem>
  * <listitem>At the presentation time, using the timestamp provided with the decoded frame, render the frame to an X11 Window using mix_video_render(). For encode, the frame should not be retained for redrawing after the initial rendering, due to resource limitations.</listitem>
  * <listitem>Release the frame using mix_video_release_frame().</listitem>
  * </itemizedlist>
- * 
+ *
  */
 
 #include <va/va.h>             /* libVA */
@@ -103,7 +103,7 @@
 #define mix_strcmp g_strcmp0
 #endif
 
-#define USE_OPAQUE_POINTER 
+#define USE_OPAQUE_POINTER
 
 #ifdef USE_OPAQUE_POINTER
 #define MIX_VIDEO_PRIVATE(mix) (MixVideoPrivate *)(mix->context)
@@ -632,20 +632,20 @@
 
 	if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER)
 	{
-    	display_order_mode = MIX_DISPLAY_ORDER_FIFO;	
+        display_order_mode = MIX_DISPLAY_ORDER_FIFO;
     }
-	else if (mix_strcmp(mime_type, "video/x-wmv")  == 0 || 
+	else if (mix_strcmp(mime_type, "video/x-wmv")  == 0 ||
             mix_strcmp(mime_type, "video/mpeg")   == 0 ||
-            mix_strcmp(mime_type, "video/x-divx") == 0 || 
+            mix_strcmp(mime_type, "video/x-divx") == 0 ||
             mix_strcmp(mime_type, "video/x-h263") == 0 ||
-            mix_strcmp(mime_type, "video/x-xvid") == 0 ) 
+            mix_strcmp(mime_type, "video/x-xvid") == 0 )
     {
-        display_order_mode = MIX_DISPLAY_ORDER_PICTYPE;           
-	} 
-	else 
+        display_order_mode = MIX_DISPLAY_ORDER_PICTYPE;
+	}
+	else
 	{
-        //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP;           
-        display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER;           
+        //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP;
+        display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER;
 	}
 
 	/* initialize frame manager */
@@ -702,15 +702,16 @@
 
 		priv->video_format = MIX_VIDEOFORMAT(video_format);
 
-	} else if (mix_strcmp(mime_type, "video/mpeg")   == 0 || 
-                   mix_strcmp(mime_type, "video/x-divx") == 0 || 
+	} else if (mix_strcmp(mime_type, "video/mpeg")   == 0 ||
+                   mix_strcmp(mime_type, "video/x-divx") == 0 ||
                    mix_strcmp(mime_type, "video/x-h263") == 0 ||
-                   mix_strcmp(mime_type, "video/x-xvid") == 0) {
+                   mix_strcmp(mime_type, "video/x-xvid") == 0 ||
+                   mix_strcmp(mime_type, "video/x-dx50") == 0) {
 
 		guint version = 0;
 
 		/* Is this mpeg4:2 ? */
-		if (mix_strcmp(mime_type, "video/mpeg") == 0 || 
+		if (mix_strcmp(mime_type, "video/mpeg") == 0 ||
                     mix_strcmp(mime_type, "video/x-h263") == 0 ) {
 
 			/*
@@ -962,7 +963,7 @@
 		priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
 
 	}
-        
+
         else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263
             && MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) {
 
@@ -978,7 +979,7 @@
 		priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
 
 	}
-  
+
         else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW
             && MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) {
 
@@ -1044,7 +1045,7 @@
 	MixVideoPrivate *priv = NULL;
 
 	LOG_V( "Begin\n");
-	
+
 	CHECK_INIT(mix, priv);
 	if(!config_params) {
 		LOG_E("!config_params\n");
@@ -1113,6 +1114,9 @@
 		return MIX_RESULT_NULL_PTR;
 	}
 
+    // reset new sequence flag
+    decode_params->new_sequence = FALSE;
+
 	//First check that we have surfaces available for decode
 	ret = mix_surfacepool_check_available(priv->surface_pool);
 
@@ -1138,7 +1142,7 @@
 
 	MIX_RESULT ret = MIX_RESULT_FAIL;
 	MixVideoPrivate *priv = NULL;
-	
+
 	CHECK_INIT_CONFIG(mix, priv);
 
 	if (!frame) {
@@ -1205,7 +1209,7 @@
 
 MIX_RESULT mix_video_render_default(MixVideo * mix,
                 MixVideoRenderParams * render_params, MixVideoFrame *frame) {
-	
+
 	return MIX_RESULT_NOTIMPL;
 }
 
@@ -1395,6 +1399,15 @@
 		return MIX_RESULT_NULL_PTR;
 	}
 
+	//First check that we have surfaces available for decode
+	ret = mix_surfacepool_check_available(priv->surface_pool);
+
+	if (ret == MIX_RESULT_POOLEMPTY) {
+		LOG_I( "Out of surface\n");
+		return MIX_RESULT_OUTOFSURFACES;
+	}
+
+
 	g_mutex_lock(priv->objlock);
 
 	ret = mix_videofmtenc_encode(priv->video_format_enc, bufin, bufincnt,
@@ -1447,7 +1460,7 @@
 	MixVideoPrivate *priv = NULL;
 
 	LOG_V( "Begin\n");
-	
+
 	CHECK_INIT_CONFIG(mix, priv);
 
 	/* ---------------------- begin lock --------------------- */
@@ -1456,11 +1469,11 @@
 	if (priv->codec_mode == MIX_CODEC_MODE_DECODE && priv->video_format != NULL) {
 		ret = mix_videofmt_eos(priv->video_format);
 
-		/* We should not call mix_framemanager_eos() here. 
-		 * MixVideoFormat* is responsible to call this function. 
+		/* We should not call mix_framemanager_eos() here.
+		 * MixVideoFormat* is responsible to call this function.
 		 * Commnet the function call here!
-		 */	
-		/* frame manager will set EOS flag to be TRUE */		
+		 */
+		/* frame manager will set EOS flag to be TRUE */
 		/* ret = mix_framemanager_eos(priv->frame_manager); */
 	} else if (priv->codec_mode == MIX_CODEC_MODE_ENCODE
 			&& priv->video_format_enc != NULL) {
@@ -1581,7 +1594,7 @@
 }
 
 
-MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix,  
+MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix,
 	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params)
 {
 	MIX_RESULT ret = MIX_RESULT_FAIL;
@@ -1608,7 +1621,7 @@
 	else {
 		LOG_E(
 			"priv->config_params is invalid\n");
-		return MIX_RESULT_FAIL;		
+		return MIX_RESULT_FAIL;
 	}
 
 	g_mutex_lock(priv->objlock);
@@ -1620,38 +1633,120 @@
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n");
 				goto cleanup;
-			}				
+			}
 		}
 			break;
-		case MIX_ENC_PARAMS_SLICE_SIZE:
+
+		case MIX_ENC_PARAMS_INIT_QP:
+		{
+			ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->init_QP);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+		case MIX_ENC_PARAMS_MIN_QP:
+		{
+			ret = mix_videoconfigparamsenc_set_min_qp (priv_config_params_enc, dynamic_params->min_QP);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_min_qp\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+		case MIX_ENC_PARAMS_WINDOW_SIZE:
+		{
+			ret = mix_videoconfigparamsenc_set_window_size (priv_config_params_enc, dynamic_params->window_size);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_window_size\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+		case MIX_ENC_PARAMS_TARGET_PERCENTAGE:
+		{
+			ret = mix_videoconfigparamsenc_set_target_percentage (priv_config_params_enc, dynamic_params->target_percentage);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_target_percentage\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+
+		case MIX_ENC_PARAMS_MTU_SLICE_SIZE:
+		{
+			ret = mix_videoconfigparamsenc_set_max_slice_size(priv_config_params_enc, dynamic_params->max_slice_size);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_max_slice_size\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+		case MIX_ENC_PARAMS_SLICE_NUM:
 		{
 			/*
 			*/
-			MixVideoConfigParamsEncH264 * config_params_enc_h264 = 
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 =
 				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
 
 			ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num);
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n");
 				goto cleanup;
-			}				
+			}
 		}
 			break;
-			
+
+		case MIX_ENC_PARAMS_I_SLICE_NUM:
+		{
+			/*
+			*/
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 =
+				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
+
+			ret = mix_videoconfigparamsenc_h264_set_I_slice_num (config_params_enc_h264, dynamic_params->I_slice_num);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_h264_set_I_slice_num\n");
+				goto cleanup;
+			}
+		}
+			break;
+
+		case MIX_ENC_PARAMS_P_SLICE_NUM:
+		{
+			/*
+			*/
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 =
+				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
+
+			ret = mix_videoconfigparamsenc_h264_set_P_slice_num (config_params_enc_h264, dynamic_params->P_slice_num);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_h264_set_P_slice_num\n");
+				goto cleanup;
+			}
+		}
+			break;
+
 		case MIX_ENC_PARAMS_IDR_INTERVAL:
 		{
-			MixVideoConfigParamsEncH264 * config_params_enc_h264 = 
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 =
 				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
 
 			ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval);
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n");
 				goto cleanup;
-			}				
+			}
 		}
 			break;
 
-		case MIX_ENC_PARAMS_RC_MODE:			
+		case MIX_ENC_PARAMS_RC_MODE:
 		case MIX_ENC_PARAMS_RESOLUTION:
 		{
 			/*
@@ -1660,8 +1755,8 @@
 			if (priv->video_format_enc) {
 				mix_videofmtenc_deinitialize(priv->video_format_enc);
 			}
-			
-			MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref)	
+
+			MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref)
 
 			//priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0
 
@@ -1671,30 +1766,30 @@
 
 			/*
 			 * Step 2: Change configuration parameters (frame size)
-			 */			
+			 */
 
 			if (params_type == MIX_ENC_PARAMS_RESOLUTION) {
 				ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height);
 				if (ret != MIX_RESULT_SUCCESS) {
 					LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n");
 					goto cleanup;
-				}			
+				}
 			}
 			else if (params_type == MIX_ENC_PARAMS_RC_MODE) {
 				ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode);
 				if (ret != MIX_RESULT_SUCCESS) {
 					LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n");
 					goto cleanup;
-				}					
+				}
 			}
 
 
 			/*
 			 * Step 3: Renew mixvideofmtenc object
-			 */	
+			 */
 
 			MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264;
-			
+
 			ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc,
 				&encode_format);
 			if (ret != MIX_RESULT_SUCCESS) {
@@ -1734,7 +1829,7 @@
 				priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
 
 			}
-        
+
         		else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263
 				&& MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) {
 
@@ -1775,10 +1870,10 @@
 
 			/*
 			 * Step 4: Re-initialize and start a new encode session, of course with new resolution value
-			 */				
+			 */
 
-			/* 
-			  * Initialize MixVideoEncFormat 
+			/*
+			  * Initialize MixVideoEncFormat
 			  */
 
 			/*
@@ -1800,11 +1895,11 @@
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed initialize video format\n");
 				goto cleanup;
-			}				
+			}
 
 			mix_surfacepool_ref(priv->surface_pool);
-			
-			
+
+
 		}
 			break;
 		case MIX_ENC_PARAMS_GOP_SIZE:
@@ -1813,7 +1908,7 @@
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n");
 				goto cleanup;
-			}						
+			}
 
 		}
 			break;
@@ -1823,7 +1918,7 @@
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n");
 				goto cleanup;
-			}				
+			}
 		}
 			break;
 		case MIX_ENC_PARAMS_FORCE_KEY_FRAME:
@@ -1833,39 +1928,51 @@
 			 */
 		}
 			break;
-		case MIX_ENC_PARAMS_QP:
+
+		case MIX_ENC_PARAMS_REFRESH_TYPE:
 		{
-			ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->QP);
+			ret = mix_videoconfigparamsenc_set_refresh_type(priv_config_params_enc, dynamic_params->refresh_type);
 			if (ret != MIX_RESULT_SUCCESS) {
-				LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n");
+				LOG_E("Failed mix_videoconfigparamsenc_set_refresh_type\n");
 				goto cleanup;
-			}				
+			}
 		}
 			break;
+
+		case MIX_ENC_PARAMS_AIR:
+		{
+			ret = mix_videoconfigparamsenc_set_AIR_params(priv_config_params_enc, dynamic_params->air_params);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_AIR_params\n");
+				goto cleanup;
+			}
+		}
+			break;
+
 		case MIX_ENC_PARAMS_CIR_FRAME_CNT:
 		{
 			ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt);
 			if (ret != MIX_RESULT_SUCCESS) {
 				LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n");
 				goto cleanup;
-			}				
-			
+			}
+
 		}
 			break;
-			
+
 		default:
 			break;
 	}
 
 	ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type);
 
-cleanup:	
+cleanup:
 
 	g_mutex_unlock(priv->objlock);
 
 	LOG_V( "End ret = 0x%x\n", ret);
 
-	return ret;	
+	return ret;
 }
 /*
  * API functions
@@ -1951,7 +2058,7 @@
 	CHECK_AND_GET_MIX_CLASS(mix, klass);
 
 	if (klass->decode_func) {
-		return klass->decode_func(mix, bufin, bufincnt, 
+		return klass->decode_func(mix, bufin, bufincnt,
 				decode_params);
 	}
 	return MIX_RESULT_NOTIMPL;
@@ -2074,13 +2181,13 @@
 	return MIX_RESULT_NOTIMPL;
 }
 
-MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, 
+MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix,
 	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params)
 {
        MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix);
 	if (klass->set_dynamic_enc_config_func) {
 		return klass->set_dynamic_enc_config_func(mix, params_type, dynamic_params);
-	}	   
-	return MIX_RESULT_NOTIMPL;	
+	}
+	return MIX_RESULT_NOTIMPL;
 
 }
diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h
index f8e4828..678ba65 100644
--- a/mix_video/src/mixvideo.h
+++ b/mix_video/src/mixvideo.h
@@ -169,10 +169,10 @@
 /**
  * mix_video_get_version:
  * @mix: #MixVideo object.
- * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library 
+ * @major: Pointer to an unsigned integer indicating the major version number of this MI-X Video library
  * @minor: Pointer to an unsigned integer indicating the minor version number of this MI-X Video library
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function will return the major and minor version numbers of the library.
  */
 MIX_RESULT mix_video_get_version(MixVideo * mix, guint * major, guint * minor);
@@ -182,16 +182,16 @@
 /**
  * mix_video_initialize:
  * @mix: #MixVideo object.
- * @mode: Enum value to indicate encode or decode mode 
+ * @mode: Enum value to indicate encode or decode mode
  * @init_params: MixVideoInitParams object which includes display type and pointer to display, encode or decode mode
- * @drm_init_params: MixDrmParams defined in <emphasis>Moorestown MI-X DRM API</emphasis>. 
+ * @drm_init_params: MixDrmParams defined in <emphasis>Moorestown MI-X DRM API</emphasis>.
  *                   This can be null if content is not protected.
- * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>, 
+ * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>,
  *           the following error codes may be returned.
  * <itemizedlist>
  * <listitem>MIX_RESULT_ALREADY_INIT, mix_video_initialize() has already been called.</listitem>
  * </itemizedlist>
- * 
+ *
  * This function will return the major and minor version numbers of the library.
  */
 MIX_RESULT mix_video_initialize(MixVideo * mix, MixCodecMode mode,
@@ -201,7 +201,7 @@
  * mix_video_deinitialize:
  * @mix: #MixVideo object.
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function will un-initialize a session with this MI-X instance. During this call, the
  * LibVA session is closed and all resources including surface buffers, #MixBuffers and
  * #MixVideoFrame objects are freed. This function is called by the application once
@@ -213,18 +213,18 @@
 /**
  * mix_video_configure:
  * @mix: #MixVideo object.
- * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or 
+ * @config_params: Pointer to #MixVideoConfigParams object (either #MixVideoConfigParamsDec or
  *                 #MixVideoConfigParamsEnc for specific media type)
- * @drm_config_params: Pointer to #MixDrmParams defined in <emphasis>Moorestown MI-X DRM API</emphasis>. 
- *                     This can be null if content is not protected. 
- * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>, 
+ * @drm_config_params: Pointer to #MixDrmParams defined in <emphasis>Moorestown MI-X DRM API</emphasis>.
+ *                     This can be null if content is not protected.
+ * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>,
  *           the following error codes may be returned.
  * <itemizedlist>
  * <listitem>MIX_RESULT_RESOURCES_NOTAVAIL, HW accelerated decoding is not available.</listitem>
  * <listitem>MIX_RESULT_NOTSUPPORTED, A requested parameter is not supported or not available.</listitem>
  * </itemizedlist>
- * 
- * This function can be used to configure a stream for the current session. 
+ *
+ * This function can be used to configure a stream for the current session.
  *         The caller can use this function to do the following:
  * <itemizedlist>
  * <listitem>Choose frame ordering mode (display order or decode order)</listitem>
@@ -243,18 +243,18 @@
 /**
  * mix_video_get_config:
  * @mix: #MixVideo object.
- * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in 
+ * @config_params: Pointer to pointer to #MixVideoConfigParams object defined in
  *                 description of mix_video_configure()
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function can be used to get the current configuration of a stream for the current session.
  * A #MixVideoConfigParams object will be returned, which can be used to get each of the
  * parameter current values. The caller will need to release this object when it is no
- * longer needed. 
- * 
+ * longer needed.
+ *
  * This function can only be called once mix_video_configure() has been called.
- * 
- * <note> See description of mix_video_configure() for #MixVideoConfigParams object details. 
+ *
+ * <note> See description of mix_video_configure() for #MixVideoConfigParams object details.
  * For mix_video_get_config(), all input parameter fields become OUT parameters.
  * </note>
  */
@@ -264,18 +264,18 @@
 /**
  * mix_video_decode:
  * @mix: #MixVideo object.
- * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() * 
+ * @bufin: Array of pointers to #MixBuffer objects, described in mix_video_get_mixbuffer() *
  * @bufincnt: Number of #MixBuffer objects
  * @decode_params: #MixVideoDecodeParams object
- * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>, 
+ * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>,
  *           the following error codes may be returned.
  * <itemizedlist>
  *     <listitem>
- *           MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done. 
+ *           MIX_RESULT_OUTOFSURFACES, No surfaces available for decoding. Nothing will be done.
  *           Caller can try again with the same MixBuffers later when surfaces may have been freed.
  *     </listitem>
  * </itemizedlist>
- * 
+ *
  * <para>
  * This function is used to initiate HW accelerated decoding of encoded data buffers. This
  * function is used to decode to a surface buffer, which can then be rendered using
@@ -289,7 +289,7 @@
  * and provided for the #MixVideoFrame object that contains the decoded data for this
  * frame data.
  * </para>
- * 
+ *
  * <para>
  * As only one timestamp is passed in for the buffer, there should be no more than one
  * video frame included in the encoded data buffer provided in a single call to
@@ -297,8 +297,8 @@
  * mix_video_decode(), the same timestamp should be provided with each call having
  * data associated with the same frame.
  * </para>
- * 
- * <para> 
+ *
+ * <para>
  * The application should request a #MixBuffer object using mix_video_get_mixbuffer(),
  * initialize the #MixBuffer with the data pointer to the coded input data, along with the
  * size of the input data buffer, and optionally can provide a token value and a callback
@@ -318,18 +318,18 @@
  * mix_video_get_frame:
  * @mix: #MixVideo object.
  * @frame: A pointer to a pointer to a #MixVideoFrame object
- * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>, 
+ * @returns: In addition to the <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>,
  *           the following error codes may be returned.
  * <itemizedlist>
  *    <listitem>
  *        MIX_RESULT_FRAME_NOTAVAIL, No decoded frames are available.
  *    </listitem>
  *    <listitem>
- *        MIX_RESULT_EOS, No more decoded frames are available, 
+ *        MIX_RESULT_EOS, No more decoded frames are available,
  *        since end of stream has been encountered.
  *     </listitem>
  * </itemizedlist>
- * 
+ *
  * <para>
  * This function returns a frame object that represents the next frame ID and includes
  * timestamp and discontinuity information. If display frame ordering has been
@@ -337,7 +337,7 @@
  * configured, it is the next frame decoded. In both cases the timestamp reflects the
  * presentation timestamp. For encode mode the frame order is always display order.
  * </para>
- * 
+ *
  * <para>
  * The frame object is a reference counted object that represents the frame. The
  * application can retain this frame object as long as needed to display the frame and
@@ -357,7 +357,7 @@
  * @mix: #MixVideo object.
  * @frame: A pointer to a #MixVideoFrame object, described in mix_video_get_frame()
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function releases a frame object that was acquired from mix_video_get_frame().
  */
 MIX_RESULT mix_video_release_frame(MixVideo * mix, MixVideoFrame * frame);
@@ -366,14 +366,14 @@
 /**
  * mix_video_render:
  * @mix: #MixVideo object.
- * @render_params: #MixVideoRenderParams object defined below, 
- *                 which includes the display window and type, 
- *                 src and dest image sizes, deinterlace info, clipping rectangles, 
+ * @render_params: #MixVideoRenderParams object defined below,
+ *                 which includes the display window and type,
+ *                 src and dest image sizes, deinterlace info, clipping rectangles,
  *                 some post processing parameters, and so forth.
  * @frame: Pointer to a #MixVideoFrame object returned from mix_video_get_frame().
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
- * This function renders a video frame associated with a MixVideoFrame object to the display. 
+ *
+ * This function renders a video frame associated with a MixVideoFrame object to the display.
  * The display is either an X11 Pixmap or an X11 Window using the overlay.
  */
 MIX_RESULT mix_video_render(MixVideo * mix,
@@ -384,12 +384,12 @@
  * mix_video_encode:
  * @mix: #MixVideo object.
  * @bufin: Array of pointers to #MixBuffer objects, structure defined in mix_video_decode()
- * @bufincnt: Number of #MixBuffer objects 
+ * @bufincnt: Number of #MixBuffer objects
  * @iovout: Array of #MixIOVec structures, pointing to buffers allocated by the application
  * @iovoutcnt: Number of items in iovout array
- * @encode_params: #MixVideoEncodeParams object 
+ * @encode_params: #MixVideoEncodeParams object
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * <para>
  * This function is used to initiate HW accelerated encoding of uncompressed video input
  * buffers. The input buffers may either be uncompressed video in user space buffers, or
@@ -397,14 +397,14 @@
  * shared buffer mode should be indicated in the #MixVideoConfigParamsEnc object
  * provided to mix_video_configure().
  * </para>
- * 
+ *
  * <para>
  * Video uncompressed data input buffers are provided in a scatter/gather list of
  * reference counted MixBuffers. The input #MixBuffers are considered a complete frame
  * of data, and are used for encoding before the input buffers are released. LibCI frame
  * indices may also be provided in MixBuffers.
  * </para>
- * 
+ *
  * <para>
  * The encoded data will be copied to the output buffers provided in the array of
  * #MixIOVec structures, also in a scatter/gather list. These output buffers are allocated
@@ -416,19 +416,19 @@
  * the encoded data size placed in the buffer. For any buffer not used for encoded data,
  * the data_size will be set to zero.
  * </para>
- * 
+ *
  * <para>
  * Alternatively, if the application does not allocate the output buffers, the data pointers
  * in the #MixIOVec structures (still provided by the application) can be set to NULL,
  * whereupon #MixVideo will allocate a data buffer for each frame and set the data,
- * buffer_size and data_size pointers in the #MixIOVec structures accordingly. 
+ * buffer_size and data_size pointers in the #MixIOVec structures accordingly.
  * </para>
- * 
- * <note> 
+ *
+ * <note>
  * This is not an efficient method to handle these buffers and it is preferred that
  * the application provide pre-allocated buffers.
  * </note>
- * 
+ *
  * <para>
  * The application should request a #MixBuffer object using mix_video_get_mixbuffer(),
  * initialize the #MixBuffer with the data pointer to the uncompressed input data or a LibCI
@@ -461,7 +461,7 @@
  * mix_video_flush:
  * @mix: #MixVideo object.
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function will flush all encoded and decoded buffers that are currently enqueued or
  * in the process of decoding. After this call, decoding can commence again, but would
  * need to start at the beginning of a sequence (for example, with no dependencies on
@@ -473,7 +473,7 @@
  * mix_video_eos:
  * @mix: #MixVideo object.
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function will signal end of stream to #MixVideo. This can be used to finalize
  * decoding of the last frame and other end of stream processing. #MixVideo will complete
  * the decoding of all buffers received, and will continue to provide the decoded frame
@@ -488,7 +488,7 @@
  * @mix: #MixVideo object.
  * @state: Current state of MI-X session.
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function returns the current state of the MI-X session.
  */
 MIX_RESULT mix_video_get_state(MixVideo * mix, MixState * state);
@@ -498,7 +498,7 @@
  * @mix: #MixVideo object.
  * @buf: A pointer to a pointer to a #MixBuffer object
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * <para>
  * This function returns a frame object that represents the next frame ID and includes
  * timestamp and discontinuity information. If display frame ordering has been
@@ -506,7 +506,7 @@
  * configured, it is the next frame decoded. In both cases the timestamp reflects the
  * presentation timestamp.
  * </para>
- * 
+ *
  * <para>
  * The frame object is a reference counted object that represents the frame. The
  * application can retain this frame object as long as needed to display the frame and
@@ -516,7 +516,7 @@
  * mix_video_release_frame(). The application should not modify the reference count or
  * delete this object directly.
  * </para>
- * 
+ *
  */
 MIX_RESULT mix_video_get_mixbuffer(MixVideo * mix, MixBuffer ** buf);
 
@@ -526,7 +526,7 @@
  * @mix: #MixVideo object.
  * @buf: A pointer to a #MixBuffer object, described in mix_video_get_mixbuffer().
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * This function releases a frame object that was acquired from mix_video_get_mixbuffer().
  */
 MIX_RESULT mix_video_release_mixbuffer(MixVideo * mix, MixBuffer * buf);
@@ -537,7 +537,7 @@
  * @mix: #MixVideo object.
  * @bufsize: Pointer to guint.
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * <para>
  * This function can be used to get the maximum size of encoded data buffer needed for
  * the mix_video_encode() call.
@@ -555,7 +555,7 @@
  * @params_type: Dynamic encoder configuration type
  * @dynamic_params: Point to dynamic control data structure which includes the new value to be changed to
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
- * 
+ *
  * <para>
  * This function can be used to change the encoder parameters at run-time
  * </para>
@@ -563,7 +563,7 @@
  * Usually this function is after the encoding session is started.
  * </para>
  */
-MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, 
+MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix,
 	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params);
 
 G_END_DECLS
diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c
index 7ad334f..e9659dd 100644
--- a/mix_video/src/mixvideoconfigparamsdec.c
+++ b/mix_video/src/mixvideoconfigparamsdec.c
@@ -50,6 +50,13 @@
 	self->mixbuffer_pool_size = 0;
 	self->extra_surface_allocation = 0;
 
+	self->video_range = 0;
+	self->color_matrix = 0;
+	self->bit_rate = 0;
+
+	self->par_num = 0;
+	self->par_denom= 0;
+
 	/* TODO: initialize other properties */
 	self->reserved1 = NULL;
 	self->reserved2 = NULL;
@@ -165,6 +172,11 @@
 		this_target->rate_control = this_src->rate_control;
 		this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size;
 		this_target->extra_surface_allocation = this_src->extra_surface_allocation;
+		this_target->video_range = this_src->video_range;
+		this_target->color_matrix = this_src->color_matrix;
+		this_target->bit_rate = this_src->bit_rate;
+		this_target->par_num = this_src->par_num;
+		this_target->par_denom = this_src->par_denom;	
 
 		/* copy properties of non-primitive */
 
@@ -297,6 +309,31 @@
 			goto not_equal;
 		}
 
+        if (this_first->video_range != this_second->video_range)
+        {
+            goto not_equal;
+        }
+        
+        if (this_first->color_matrix != this_second->color_matrix)
+        {
+            goto not_equal;
+        }
+        
+        if (this_first->bit_rate != this_second->bit_rate)
+        {
+            goto not_equal;
+        }
+
+        if (this_first->par_num != this_second->par_num)
+        {
+            goto not_equal;
+        }  
+
+        if (this_first->par_denom != this_second->par_denom)
+        {
+            goto not_equal;
+        }        
+		        
 		ret = TRUE;
 
 		not_equal:
@@ -533,5 +570,80 @@
 }
 
 
+MIX_RESULT mix_videoconfigparamsdec_set_video_range(
+		MixVideoConfigParamsDec * obj,
+        guint8 video_range) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+	obj->video_range = video_range;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_video_range(
+		MixVideoConfigParamsDec * obj,
+        guint8 *video_range) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, video_range);
+	*video_range = obj->video_range;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(
+		MixVideoConfigParamsDec * obj,
+        guint8 color_matrix) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+	obj->color_matrix = color_matrix;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(
+		MixVideoConfigParamsDec * obj,
+        guint8 *color_matrix) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, color_matrix);
+	*color_matrix = obj->color_matrix;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(
+		MixVideoConfigParamsDec * obj,
+        guint bit_rate) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+	obj->bit_rate = bit_rate;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(
+		MixVideoConfigParamsDec * obj,
+        guint *bit_rate) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, bit_rate);
+	*bit_rate = obj->bit_rate;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(
+    MixVideoConfigParamsDec * obj,
+    guint par_num, 
+    guint par_denom) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
+	obj->par_num = par_num;
+	obj->par_denom = par_denom;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(
+    MixVideoConfigParamsDec * obj,
+    guint * par_num, 
+    guint * par_denom) 
+{
+	MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT_PAIR (obj, par_num, par_denom);
+	*par_num = obj->par_num;
+	*par_denom = obj->par_denom;
+	return MIX_RESULT_SUCCESS;
+}
 
 
diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h
index 809eb1e..b4574c1 100644
--- a/mix_video/src/mixvideoconfigparamsdec.h
+++ b/mix_video/src/mixvideoconfigparamsdec.h
@@ -105,6 +105,26 @@
 	
 	/* Extra surfaces for MixVideoFrame objects to be allocated */
 	guint extra_surface_allocation;
+
+    /* video range, 0 for short range and 1 for full range, output only */
+	guint8 video_range;
+
+    /* 
+        color matrix, output only. Possible values defined in va.h
+        #define VA_SRC_BT601            0x00000010
+        #define VA_SRC_BT709            0x00000020
+        #define VA_SRC_SMPTE_240     0x00000040
+      */
+    guint8  color_matrix;
+
+    /* bit rate in bps, output only */
+    guint8 bit_rate;
+
+	/* Pixel aspect ratio numerator value */
+	guint par_num;
+	
+	/* Pixel aspect ratio  denominator value */	
+	guint par_denom;
 	
 	/* Reserved for future use */
 	void *reserved1;
@@ -375,6 +395,102 @@
 MIX_RESULT mix_videoconfigparamsdec_get_extra_surface_allocation(MixVideoConfigParamsDec * obj,
 		guint *extra_surface_allocation);
 
+
+/**
+ * mix_videoconfigparamsdec_set_video_range:
+ * @obj: #MixVideoConfigParamsDec object
+ * @video_range: 1 for full video range, 0 for short video range.
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set video range
+ */
+MIX_RESULT mix_videoconfigparamsdec_set_video_range(MixVideoConfigParamsDec * obj,
+		guint8 video_range);
+
+/**
+ * mix_videoconfigparamsdec_get_video_range:
+ * @obj: #MixVideoConfigParamsDec object
+ * @video_range: video range to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get video range
+ */
+MIX_RESULT mix_videoconfigparamsdec_get_video_range(MixVideoConfigParamsDec * obj,
+		guint8 *video_range);
+
+
+/**
+ * mix_videoconfigparamsdec_set_color_matrix:
+ * @obj: #MixVideoConfigParamsDec object
+ * @color_matrix: BT601 or BT709, defined in va.h. 0 for any other including unspecified color matrix.
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set color matrix
+ */
+MIX_RESULT mix_videoconfigparamsdec_set_color_matrix(MixVideoConfigParamsDec * obj,
+		guint8 color_matrix);
+
+/**
+ * mix_videoconfigparamsdec_get_color_matrix:
+ * @obj: #MixVideoConfigParamsDec object
+ * @color_matrix: color matrix to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get color matrix
+ */
+MIX_RESULT mix_videoconfigparamsdec_get_color_matrix(MixVideoConfigParamsDec * obj,
+		guint8 *color_matrix);
+
+
+/**
+ * mix_videoconfigparamsdec_set_bit_rate:
+ * @obj: #MixVideoConfigParamsDec object
+ * @bit_rate: bit rate in bit per second.
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set bit rate
+ */
+MIX_RESULT mix_videoconfigparamsdec_set_bit_rate(MixVideoConfigParamsDec * obj,
+		guint bit_rate);
+
+/**
+ * mix_videoconfigparamsdec_get_bit_rate:
+ * @obj: #MixVideoConfigParamsDec object
+ * @bit_rate: bit rate to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get bit rate
+ */
+MIX_RESULT mix_videoconfigparamsdec_get_bit_rate(MixVideoConfigParamsDec * obj,
+		guint *bit_rate);		
+
+
+
+/**
+ * mix_videoconfigparamsdec_set_pixel_aspect_ratio:
+ * @obj: #MixVideoConfigParamsDec object
+ * @par_num: Pixel aspect ratio numerator value
+ * @par_denom: Pixel aspect ratio denominator value *  
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set pixel aspect ratio
+ */
+MIX_RESULT mix_videoconfigparamsdec_set_pixel_aspect_ratio(MixVideoConfigParamsDec * obj,
+		guint par_num, guint par_denom);
+
+/**
+ * mix_videoconfigparamsdec_get_pixel_aspect_ratio:
+ * @obj: #MixVideoConfigParamsDec object
+ * @par_num: Pixel aspect ratio  numerator value to be returned 
+ * @par_denom: Pixel aspect ratio denominator value to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get pixel aspect ratio
+ */
+MIX_RESULT mix_videoconfigparamsdec_get_pixel_aspect_ratio(MixVideoConfigParamsDec * obj,
+		guint * par_num, guint * par_denom);
+		
+
 /* TODO: Add getters and setters for other properties */
 
 G_END_DECLS
diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c
index 040b612..c35ade2 100644
--- a/mix_video/src/mixvideoconfigparamsenc.c
+++ b/mix_video/src/mixvideoconfigparamsenc.c
@@ -34,12 +34,16 @@
 		MIX_TYPE_VIDEOCONFIGPARAMS, _do_init);
 
 static void mix_videoconfigparamsenc_init(MixVideoConfigParamsEnc * self) {
-    /* initialize properties here */	
+    /* initialize properties here */
 	self->bitrate = 0;
 	self->frame_rate_num = 30;
-	self->frame_rate_denom = 1;	
+	self->frame_rate_denom = 1;
 	self->initial_qp = 15;
 	self->min_qp = 0;
+	self->target_percentage = 95;
+	self->window_size = 500;
+
+	self->max_slice_size = 0;  /*Set to 0 means it won't take effect*/
 
 	self->picture_width = 0;
 	self->picture_height = 0;
@@ -54,15 +58,20 @@
 
 	self->ci_frame_id = NULL;
 	self->ci_frame_num = 0;
-	
-	self->need_display = TRUE;	
+
+	self->need_display = TRUE;
 
 	self->rate_control = MIX_RATE_CONTROL_NONE;
 	self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420;
-	self->profile = MIX_PROFILE_H264BASELINE;	
+	self->profile = MIX_PROFILE_H264BASELINE;
 	self->level = 30;
 
 	self->CIR_frame_cnt = 15;
+	self->refresh_type = MIX_VIDEO_NONIR;
+
+	self->air_params.air_MBs = 0;
+	self->air_params.air_threshold = 0;
+	self->air_params.air_auto = 0;
 
 	/* TODO: initialize other properties */
 	self->reserved1 = NULL;
@@ -73,10 +82,10 @@
 
 static void mix_videoconfigparamsenc_class_init(MixVideoConfigParamsEncClass * klass) {
     MixParamsClass *mixparams_class = MIX_PARAMS_CLASS(klass);
-    
+
     /* setup static parent class */
     parent_class = (MixParamsClass *) g_type_class_peek_parent(klass);
-    
+
     mixparams_class->finalize = mix_videoconfigparamsenc_finalize;
     mixparams_class->copy = (MixParamsCopyFunction) mix_videoconfigparamsenc_copy;
     mixparams_class->dup = (MixParamsDupFunction) mix_videoconfigparamsenc_dup;
@@ -89,7 +98,7 @@
     MixVideoConfigParamsEnc *ret =
         (MixVideoConfigParamsEnc *) g_type_create_instance(
                 MIX_TYPE_VIDEOCONFIGPARAMSENC);
-    
+
     return ret;
 }
 
@@ -128,9 +137,9 @@
 MixParams *
 mix_videoconfigparamsenc_dup(const MixParams * obj) {
     MixParams *ret = NULL;
-    
-    LOG_V( "Begin\n");	
-    
+
+    LOG_V( "Begin\n");
+
     if (MIX_IS_VIDEOCONFIGPARAMSENC(obj)) {
         MixVideoConfigParamsEnc *duplicate = mix_videoconfigparamsenc_new();
         if (mix_videoconfigparamsenc_copy(MIX_PARAMS(duplicate), MIX_PARAMS(obj))) {
@@ -156,7 +165,7 @@
 	MixVideoConfigParamsEnc *this_target, *this_src;
 	MIX_RESULT mix_result = MIX_RESULT_FAIL;
 
-    LOG_V( "Begin\n");	
+    LOG_V( "Begin\n");
 
 	if (MIX_IS_VIDEOCONFIGPARAMSENC(target) && MIX_IS_VIDEOCONFIGPARAMSENC(src)) {
 
@@ -168,24 +177,31 @@
 
 		this_target->bitrate   = this_src->bitrate;
 		this_target->frame_rate_num = this_src->frame_rate_num;
-		this_target->frame_rate_denom = this_src->frame_rate_denom;		
+		this_target->frame_rate_denom = this_src->frame_rate_denom;
 		this_target->initial_qp = this_src->initial_qp;
 		this_target->min_qp = this_src->min_qp;
+		this_target->target_percentage = this_src->target_percentage;
+		this_target->window_size = this_src->window_size;
+		this_target->max_slice_size = this_src->max_slice_size;
 		this_target->intra_period    = this_src->intra_period;
-		this_target->picture_width    = this_src->picture_width;		
+		this_target->picture_width    = this_src->picture_width;
 		this_target->picture_height   = this_src->picture_height;
 		this_target->mixbuffer_pool_size = this_src->mixbuffer_pool_size;
 		this_target->share_buf_mode = this_src->share_buf_mode;
-		this_target->encode_format = this_src->encode_format;		
-		this_target->ci_frame_num = this_src->ci_frame_num;		
-		this_target->draw= this_src->draw;		
+		this_target->encode_format = this_src->encode_format;
+		this_target->ci_frame_num = this_src->ci_frame_num;
+		this_target->draw= this_src->draw;
 		this_target->need_display = this_src->need_display;
-	       this_target->rate_control = this_src->rate_control;
-	       this_target->raw_format = this_src->raw_format;
-	       this_target->profile = this_src->profile;		
-	       this_target->level = this_src->level;			   
-	       this_target->CIR_frame_cnt = this_src->CIR_frame_cnt;	
-		
+		this_target->rate_control = this_src->rate_control;
+		this_target->raw_format = this_src->raw_format;
+		this_target->profile = this_src->profile;
+		this_target->level = this_src->level;
+		this_target->CIR_frame_cnt = this_src->CIR_frame_cnt;
+		this_target->refresh_type = this_src->refresh_type;
+		this_target->air_params.air_MBs = this_src->air_params.air_MBs;
+		this_target->air_params.air_threshold = this_src->air_params.air_threshold;
+		this_target->air_params.air_auto = this_src->air_params.air_auto;
+
 		/* copy properties of non-primitive */
 
 		/* copy mime_type */
@@ -193,32 +209,32 @@
 		if (this_src->mime_type) {
 #ifdef MDEBUG
             if (this_src->mime_type->str) {
-                
-                LOG_I( "this_src->mime_type->str = %s  %x\n", 
-                        this_src->mime_type->str, (unsigned int)this_src->mime_type->str);	
+
+                LOG_I( "this_src->mime_type->str = %s  %x\n",
+                        this_src->mime_type->str, (unsigned int)this_src->mime_type->str);
             }
 #endif
 
             mix_result = mix_videoconfigparamsenc_set_mime_type(this_target,
                     this_src->mime_type->str);
         } else {
-            
+
             LOG_I( "this_src->mime_type = NULL\n");
-            
+
             mix_result = mix_videoconfigparamsenc_set_mime_type(this_target, NULL);
         }
-        
+
         if (mix_result != MIX_RESULT_SUCCESS) {
-            
-            LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n");	
+
+            LOG_E( "Failed to mix_videoconfigparamsenc_set_mime_type\n");
             return FALSE;
-        }	
-        
+        }
+
         mix_result = mix_videoconfigparamsenc_set_ci_frame_info (this_target, this_src->ci_frame_id,
                 this_src->ci_frame_num);
-        
+
         /* TODO: copy other properties if there's any */
-        
+
 		/* Now chainup base class */
 		if (parent_class->copy) {
             return parent_class->copy(MIX_PARAMS_CAST(target), MIX_PARAMS_CAST(
@@ -227,7 +243,7 @@
             return TRUE;
         }
     }
-    
+
     return FALSE;
 }
 
@@ -273,10 +289,22 @@
 		if (this_first->min_qp != this_second->min_qp) {
 			goto not_equal;
 		}
-		
+
+		if (this_first->target_percentage != this_second->target_percentage) {
+			goto not_equal;
+		}
+
+		if (this_first->window_size != this_second->window_size) {
+			goto not_equal;
+		}
+
+		if (this_first->max_slice_size != this_second->max_slice_size) {
+			goto not_equal;
+		}
+
 		if (this_first->intra_period != this_second->intra_period) {
 			goto not_equal;
-		}		
+		}
 
 		if (this_first->picture_width != this_second->picture_width
 				&& this_first->picture_height != this_second->picture_height) {
@@ -289,11 +317,11 @@
 
 		if (this_first->mixbuffer_pool_size != this_second->mixbuffer_pool_size) {
 			goto not_equal;
-		}	
+		}
 
 		if (this_first->share_buf_mode != this_second->share_buf_mode) {
 			goto not_equal;
-		}		
+		}
 
 		if (this_first->ci_frame_id != this_second->ci_frame_id) {
 			goto not_equal;
@@ -301,34 +329,52 @@
 
 		if (this_first->ci_frame_num != this_second->ci_frame_num) {
 			goto not_equal;
-		}		
+		}
 
 		if (this_first->draw != this_second->draw) {
 			goto not_equal;
-		}	
+		}
 
 		if (this_first->need_display!= this_second->need_display) {
 			goto not_equal;
-		}		
+		}
 
-	      if (this_first->rate_control != this_second->rate_control) {
-		  	goto not_equal;
-		}	  
+        if (this_first->rate_control != this_second->rate_control) {
+            goto not_equal;
+        }
 
-	      if (this_first->raw_format != this_second->raw_format) {
-		  	goto not_equal;
-		}	  
+        if (this_first->raw_format != this_second->raw_format) {
+            goto not_equal;
+        }
 
-	      if (this_first->profile != this_second->profile) {
-		  	goto not_equal;
-		}	  	
-	      if (this_first->level != this_second->level) {
-		  	goto not_equal;
-		}		
+        if (this_first->profile != this_second->profile) {
+            goto not_equal;
+        }
 
-	      if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) {
-		  	goto not_equal;
-		}	
+        if (this_first->level != this_second->level) {
+            goto not_equal;
+		}
+
+        if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) {
+            goto not_equal;
+        }
+
+        if (this_first->refresh_type != this_second->refresh_type) {
+            goto not_equal;
+        }
+
+        if (this_first->air_params.air_MBs != this_second->air_params.air_MBs) {
+            goto not_equal;
+        }
+
+        if (this_first->air_params.air_threshold != this_second->air_params.air_threshold) {
+            goto not_equal;
+        }
+
+        if (this_first->air_params.air_auto != this_second->air_params.air_auto) {
+            goto not_equal;
+        }
+
 		/* check the equalitiy of the none-primitive type properties */
 
 		/* compare mime_type */
@@ -340,7 +386,7 @@
 			}
 		} else if (!(!this_first->mime_type && !this_second->mime_type)) {
 			goto not_equal;
-		}	
+		}
 
 		ret = TRUE;
 
@@ -385,7 +431,7 @@
 		return MIX_RESULT_NULL_PTR;
 	}
 
-	LOG_I( "mime_type = %s  %x\n", 
+	LOG_I( "mime_type = %s  %x\n",
 		mime_type, (unsigned int)mime_type);
 
 	if (obj->mime_type) {
@@ -396,9 +442,9 @@
 	}
 
 
-	LOG_I( "mime_type = %s  %x\n", 
+	LOG_I( "mime_type = %s  %x\n",
 		mime_type, (unsigned int)mime_type);
-	
+
 	obj->mime_type = g_string_new(mime_type);
 	if (!obj->mime_type) {
 		return MIX_RESULT_NO_MEMORY;
@@ -479,13 +525,13 @@
 	obj->bitrate= bitrate;
 	return MIX_RESULT_SUCCESS;
 
-}              
+}
 
 MIX_RESULT mix_videoconfigparamsenc_get_bit_rate (MixVideoConfigParamsEnc * obj,
         guint *bitrate) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, bitrate);
 	*bitrate = obj->bitrate;
-	return MIX_RESULT_SUCCESS;              
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_set_init_qp (MixVideoConfigParamsEnc * obj,
@@ -493,20 +539,20 @@
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->initial_qp = initial_qp;
 	return MIX_RESULT_SUCCESS;
-}              
+}
 
 MIX_RESULT mix_videoconfigparamsenc_get_init_qp (MixVideoConfigParamsEnc * obj,
         guint *initial_qp) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, initial_qp);
 	*initial_qp = obj->initial_qp;
 	return MIX_RESULT_SUCCESS;
-             
-}              
+
+}
 
 MIX_RESULT mix_videoconfigparamsenc_set_min_qp (MixVideoConfigParamsEnc * obj,
         guint min_qp) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
-	obj->min_qp = min_qp;	
+	obj->min_qp = min_qp;
 	return MIX_RESULT_SUCCESS;
 }
 
@@ -514,15 +560,51 @@
         guint *min_qp) {
     MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, min_qp);
     *min_qp = obj->min_qp;
-    
+
+    return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj,
+        guint target_percentage) {
+
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->target_percentage = target_percentage;
+	return MIX_RESULT_SUCCESS;
+	}
+
+
+MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj,
+        guint *target_percentage) {
+
+    MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, target_percentage);
+    *target_percentage = obj->target_percentage;
+
+    return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj,
+        guint window_size) {
+
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->window_size = window_size;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj,
+        guint *window_size) {
+
+    MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, window_size);
+    *window_size = obj->window_size;
+
     return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_set_intra_period (MixVideoConfigParamsEnc * obj,
         guint intra_period) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
-	obj->intra_period = intra_period;	
-	
+	obj->intra_period = intra_period;
+
 	return MIX_RESULT_SUCCESS;
 }
 
@@ -530,7 +612,7 @@
         guint *intra_period) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, intra_period);
 	*intra_period = obj->intra_period;
-	
+
 	return MIX_RESULT_SUCCESS;
 }
 
@@ -564,15 +646,15 @@
 		gboolean *share_buf_mod) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, share_buf_mod);
 
-	*share_buf_mod = obj->share_buf_mode;	
-	return MIX_RESULT_SUCCESS;		
+	*share_buf_mod = obj->share_buf_mode;
+	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj,
         gulong * ci_frame_id, guint ci_frame_num) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
-	
-	
+
+
 	if (!ci_frame_id || !ci_frame_num) {
 		obj->ci_frame_id = NULL;
 		obj->ci_frame_num = 0;
@@ -584,7 +666,7 @@
 
 	guint size = ci_frame_num * sizeof (gulong);
 	obj->ci_frame_num = ci_frame_num;
-	
+
 	obj->ci_frame_id = g_malloc (ci_frame_num * sizeof (gulong));
 	if (!(obj->ci_frame_id)) {
 		return MIX_RESULT_NO_MEMORY;
@@ -600,7 +682,7 @@
     MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT_PAIR (obj, ci_frame_id, ci_frame_num);
 
 	*ci_frame_num = obj->ci_frame_num;
-	
+
 	if (!obj->ci_frame_id) {
 		*ci_frame_id = NULL;
 		return MIX_RESULT_SUCCESS;
@@ -608,36 +690,36 @@
 
 	if (obj->ci_frame_num) {
 		*ci_frame_id = g_malloc (obj->ci_frame_num * sizeof (gulong));
-		
+
 		if (!*ci_frame_id) {
 			return MIX_RESULT_NO_MEMORY;
-		}		
-		
+		}
+
 		memcpy (*ci_frame_id, obj->ci_frame_id, obj->ci_frame_num * sizeof (gulong));
-		
+
 	} else {
 		*ci_frame_id = NULL;
 	}
-	
-	return MIX_RESULT_SUCCESS;		
+
+	return MIX_RESULT_SUCCESS;
 }
 
 
-MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj,
         gulong draw) {
-		
+
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->draw = draw;
 	return MIX_RESULT_SUCCESS;
-		
+
 }
 
 MIX_RESULT mix_videoconfigparamsenc_get_drawable (MixVideoConfigParamsEnc * obj,
         gulong *draw) {
 
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, draw);
-	*draw = obj->draw;	
-	return MIX_RESULT_SUCCESS;		
+	*draw = obj->draw;
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_set_need_display (
@@ -652,8 +734,8 @@
 		gboolean *need_display) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, need_display);
 
-	*need_display = obj->need_display;	
-	return MIX_RESULT_SUCCESS;		
+	*need_display = obj->need_display;
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj,
@@ -668,62 +750,110 @@
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, rate_control);
 	*rate_control = obj->rate_control;
 	return MIX_RESULT_SUCCESS;
-}	
+}
 
 MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj,
 		MixRawTargetFormat raw_format) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->raw_format = raw_format;
-	return MIX_RESULT_SUCCESS;		
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj,
 		MixRawTargetFormat * raw_format) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, raw_format);
 	*raw_format = obj->raw_format;
-	return MIX_RESULT_SUCCESS;		
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj,
 		MixProfile profile) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->profile = profile;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
 }
 
 MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj,
 		MixProfile * profile) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, profile);
 	*profile = obj->profile;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj,
 		guint8 level) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->level = level;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj,
 		guint8 * level) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level);
 	*level = obj->level;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
 }
 
 
-MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj,
 		guint CIR_frame_cnt) {
 	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
 	obj->CIR_frame_cnt = CIR_frame_cnt;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj,
 		guint * CIR_frame_cnt) {
 	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt);
 	*CIR_frame_cnt = obj->CIR_frame_cnt;
-	return MIX_RESULT_SUCCESS;			
+	return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj,
+		guint max_slice_size) {
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->max_slice_size = max_slice_size;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj,
+		guint * max_slice_size) {
+	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, max_slice_size);
+	*max_slice_size = obj->max_slice_size;
+	return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_refresh_type(MixVideoConfigParamsEnc * obj,
+		MixVideoIntraRefreshType refresh_type) {
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->refresh_type = refresh_type;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj,
+		MixVideoIntraRefreshType * refresh_type) {
+	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, refresh_type);
+	*refresh_type = obj->refresh_type;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj,
+		MixAIRParams air_params) {
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->air_params.air_MBs = air_params.air_MBs;
+	obj->air_params.air_threshold = air_params.air_threshold;
+	obj->air_params.air_auto = air_params.air_auto;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj,
+		MixAIRParams * air_params) {
+	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, air_params);
+	air_params->air_MBs = obj->air_params.air_MBs;
+	air_params->air_threshold = obj->air_params.air_threshold;
+	air_params->air_auto = obj->air_params.air_auto;
+	return MIX_RESULT_SUCCESS;
 }
 
diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h
index adb5f25..f45cc50 100644
--- a/mix_video/src/mixvideoconfigparamsenc.h
+++ b/mix_video/src/mixvideoconfigparamsenc.h
@@ -83,35 +83,44 @@
 	MixRawTargetFormat raw_format;
 
 	/* Rate control mode */
-	MixRateControl rate_control;  	
+	MixRateControl rate_control;
 
 	/* Bitrate when rate control is used */
 	guint bitrate;
-	
+
 	/* Numerator of frame rate */
 	guint frame_rate_num;
-	
+
 	/* Denominator of frame rate */
 	guint frame_rate_denom;
-	
+
 	/* The initial QP value */
 	guint initial_qp;
-	
+
 	/* The minimum QP value */
 	guint min_qp;
-	
+
+	/* this is the bit-rate the rate control is targeting, as a percentage of the maximum bit-rate
+	* for example if target_percentage is 95 then the rate control will target a bit-rate that is
+	* 95% of the maximum bit-rate
+	*/
+	guint target_percentage;
+
+	/* windows size in milliseconds. For example if this is set to 500, then the rate control will guarantee the */
+	guint window_size;
+
 	/* Number of frames between key frames (GOP size) */
 	guint intra_period;
-	
+
 	/* Width of video frame */
 	guint16 picture_width;
-	
+
 	/* Height of the video frame */
-	guint16 picture_height;	
+	guint16 picture_height;
 
 	/* Mime type, reserved */
 	GString * mime_type;
-	
+
 	/* Encode target format */
 	MixEncodeTargetFormat encode_format;
 
@@ -119,35 +128,45 @@
 	guint mixbuffer_pool_size;
 
 	/* Are buffers shared between capture and encoding drivers */
-	gboolean share_buf_mode;	
+	gboolean share_buf_mode;
 
 	/* Array of frame IDs created by capture library */
 	gulong *	ci_frame_id;
-	
+
 	/* Size of the array ci_frame_id */
 	guint	ci_frame_num;
-	
+
 	guint 	CIR_frame_cnt;
-	
+
+    /* The maximum slice size to be set to video driver (in bits).
+     * The encoder hardware will try to make sure the single slice does not exceed this size
+     * If not, mix_video_encode() will report a specific error
+     */
+	guint	max_slice_size;
+
+	MixVideoIntraRefreshType refresh_type;
+
+	MixAIRParams air_params;
+
 	/* < private > */
 	gulong draw;
-	
+
 	/*< public > */
-	
-	/* Indicates whether MixVideoFrames suitable for displaying 
+
+	/* Indicates whether MixVideoFrames suitable for displaying
 	 * need to be enqueued for retrieval using mix_video_get_frame() */
 	gboolean need_display;
-	
+
 	/* Reserved for future use */
 	void *reserved1;
-	
-	/* Reserved for future use */	
+
+	/* Reserved for future use */
 	void *reserved2;
-	
-	/* Reserved for future use */	
+
+	/* Reserved for future use */
 	void *reserved3;
-	
-	/* Reserved for future use */	
+
+	/* Reserved for future use */
 	void *reserved4;
 };
 
@@ -215,10 +234,10 @@
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Get mime type
- * 
+ *
  * <note>
  * Caller is responsible to g_free *mime_type
- * </note> 
+ * </note>
  */
 MIX_RESULT mix_videoconfigparamsenc_get_mime_type(MixVideoConfigParamsEnc * obj,
 		gchar ** mime_type);
@@ -360,6 +379,51 @@
 MIX_RESULT mix_videoconfigparamsenc_get_min_qp(MixVideoConfigParamsEnc * obj,
         guint *min_qp);
 
+
+/**
+ * mix_videoconfigparamsenc_set_target_percentage:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @target_percentage: The target percentage value
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set The  target percentage value
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_target_percentage (MixVideoConfigParamsEnc * obj,
+        guint target_percentage);
+
+/**
+ * mix_videoconfigparamsenc_get_target_percentage:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @target_percentage: The target percentage value to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get The target percentage value
+ */
+MIX_RESULT mix_videoconfigparamsenc_get_target_percentage(MixVideoConfigParamsEnc * obj,
+        guint *target_percentage);
+
+/**
+ * mix_videoconfigparamsenc_set_window_size:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @window_size: The window size for rate control
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set The window size value
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_window_size (MixVideoConfigParamsEnc * obj,
+        guint window_size);
+
+/**
+ * mix_videoconfigparamsenc_get_window_size:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @window_size: The window size for rate control
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get The window size value
+ */
+MIX_RESULT mix_videoconfigparamsenc_get_window_size (MixVideoConfigParamsEnc * obj,
+        guint *window_size);
+
 /**
  * mix_videoconfigparamsenc_set_intra_period:
  * @obj: #MixVideoConfigParamsEnc object
@@ -407,7 +471,7 @@
 /**
  * mix_videoconfigparamsenc_set_share_buf_mode:
  * @obj: #MixVideoConfigParamsEnc object
- * @share_buf_mod: A flag to indicate whether buffers are shared 
+ * @share_buf_mod: A flag to indicate whether buffers are shared
  *                 between capture and encoding drivers or not
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
@@ -419,7 +483,7 @@
 /**
  * mix_videoconfigparamsenc_get_share_buf_mode:
  * @obj: #MixVideoConfigParamsEnc object
- * @share_buf_mod: the flag to be returned that indicates whether buffers 
+ * @share_buf_mod: the flag to be returned that indicates whether buffers
  *                 are shared between capture and encoding drivers or not
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
@@ -431,26 +495,26 @@
 /**
  * mix_videoconfigparamsenc_set_ci_frame_info:
  * @obj: #MixVideoConfigParamsEnc object
- * @ci_frame_id: Array of frame IDs created by capture library * 
- * @ci_frame_num: Size of the array ci_frame_id 
+ * @ci_frame_id: Array of frame IDs created by capture library *
+ * @ci_frame_num: Size of the array ci_frame_id
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Set CI frame information
  */
-MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_ci_frame_info(MixVideoConfigParamsEnc * obj,
 		gulong *	ci_frame_id, guint  ci_frame_num);
 
 /**
  * mix_videoconfigparamsenc_get_ci_frame_info:
  * @obj: #MixVideoConfigParamsEnc object
- * @ci_frame_id: Array of frame IDs created by capture library to be returned 
+ * @ci_frame_id: Array of frame IDs created by capture library to be returned
  * @ci_frame_num: Size of the array ci_frame_id to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Get CI frame information
  * <note>
  * Caller is responsible to g_free *ci_frame_id
- * </note> 
+ * </note>
  */
 MIX_RESULT mix_videoconfigparamsenc_get_ci_frame_info (MixVideoConfigParamsEnc * obj,
 		gulong * *ci_frame_id, guint *ci_frame_num);
@@ -459,18 +523,18 @@
 /**
  * mix_videoconfigparamsenc_set_drawable:
  * @obj: #MixVideoConfigParamsEnc object
- * @draw: drawable 
+ * @draw: drawable
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Set drawable
  */
-MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_drawable (MixVideoConfigParamsEnc * obj,
 		gulong draw);
 
 /**
  * mix_videoconfigparamsenc_get_drawable:
  * @obj: #MixVideoConfigParamsEnc object
- * @draw: drawable to be returned 
+ * @draw: drawable to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Get drawable
@@ -481,11 +545,11 @@
 /**
  * mix_videoconfigparamsenc_set_need_display:
  * @obj: #MixVideoConfigParamsEnc object
- * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying 
+ * @need_display: Flag to indicates whether MixVideoFrames suitable for displaying
  *                need to be enqueued for retrieval using mix_video_get_frame()
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Set the flag used to indicate whether MixVideoFrames suitable for displaying 
+ * Set the flag used to indicate whether MixVideoFrames suitable for displaying
  * need to be enqueued for retrieval using mix_video_get_frame()
  */
 MIX_RESULT mix_videoconfigparamsenc_set_need_display (
@@ -495,11 +559,11 @@
 /**
  * mix_videoconfigparamsenc_get_need_display:
  * @obj: #MixVideoConfigParamsEnc object
- * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying 
+ * @need_display: A flag to be returned to indicates whether MixVideoFrames suitable for displaying
  *                need to be enqueued for retrieval using mix_video_get_frame()
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get the flag used to indicate whether MixVideoFrames suitable for displaying 
+ * Get the flag used to indicate whether MixVideoFrames suitable for displaying
  * need to be enqueued for retrieval using mix_video_get_frame()
  */
 MIX_RESULT mix_videoconfigparamsenc_get_need_display(MixVideoConfigParamsEnc * obj,
@@ -508,10 +572,10 @@
 /**
  * mix_videoconfigparamsenc_set_rate_control:
  * @obj: #MixVideoConfigParamsEnc object
- * @rcmode: Rate control mode  
+ * @rcmode: Rate control mode
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Set Rate control mode 
+ * Set Rate control mode
  */
 MIX_RESULT mix_videoconfigparamsenc_set_rate_control(MixVideoConfigParamsEnc * obj,
 		MixRateControl rcmode);
@@ -519,10 +583,10 @@
 /**
  * mix_videoconfigparamsenc_set_rate_control:
  * @obj: #MixVideoConfigParamsEnc object
- * @rcmode: Rate control mode to be returned  
+ * @rcmode: Rate control mode to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get Rate control mode 
+ * Get Rate control mode
  */
 MIX_RESULT mix_videoconfigparamsenc_get_rate_control(MixVideoConfigParamsEnc * obj,
 		MixRateControl * rcmode);
@@ -530,10 +594,10 @@
 /**
  * mix_videoconfigparamsenc_set_raw_format:
  * @obj: #MixVideoConfigParamsEnc object
- * @raw_format: Raw format to be encoded  
+ * @raw_format: Raw format to be encoded
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Set Raw format to be encoded 
+ * Set Raw format to be encoded
  */
 MIX_RESULT mix_videoconfigparamsenc_set_raw_format (MixVideoConfigParamsEnc * obj,
 		MixRawTargetFormat raw_format);
@@ -541,10 +605,10 @@
 /**
  * mix_videoconfigparamsenc_get_raw_format:
  * @obj: #MixVideoConfigParamsEnc object
- * @raw_format: Raw format to be returned  
+ * @raw_format: Raw format to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get Raw format 
+ * Get Raw format
  */
 MIX_RESULT mix_videoconfigparamsenc_get_raw_format (MixVideoConfigParamsEnc * obj,
 		MixRawTargetFormat * raw_format);
@@ -552,10 +616,10 @@
 /**
  * mix_videoconfigparamsenc_set_profile:
  * @obj: #MixVideoConfigParamsEnc object
- * @profile: Encoding profile  
+ * @profile: Encoding profile
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Set Encoding profile 
+ * Set Encoding profile
  */
 MIX_RESULT mix_videoconfigparamsenc_set_profile (MixVideoConfigParamsEnc * obj,
 		MixProfile profile);
@@ -563,10 +627,10 @@
 /**
  * mix_videoconfigparamsenc_get_profile:
  * @obj: #MixVideoConfigParamsEnc object
- * @profile: Encoding profile to be returned  
+ * @profile: Encoding profile to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get Encoding profile 
+ * Get Encoding profile
  */
 MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj,
 		MixProfile * profile);
@@ -575,51 +639,122 @@
 /**
  * mix_videoconfigparamsenc_set_level:
  * @obj: #MixVideoConfigParamsEnc object
- * @level: Encoding level  
+ * @level: Encoding level
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Set Encoding level 
+ * Set Encoding level
  */
-MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj,
 		guint8 level);
 
 
 /**
  * mix_videoconfigparamsenc_get_level:
  * @obj: #MixVideoConfigParamsEnc object
- * @level: Encoding level to be returned  
+ * @level: Encoding level to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get Encoding level 
+ * Get Encoding level
  */
 
-MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj,
 		guint8 * level);
 
 
 /**
  * mix_videoconfigparamsenc_set_CIR_frame_cnt:
  * @obj: #MixVideoConfigParamsEnc object
- * @CIR_frame_cnt: Encoding CIR frame count  
+ * @CIR_frame_cnt: Encoding CIR frame count
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Set Encoding CIR frame count
  */
-MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj,
 		guint CIR_frame_cnt);
 
 /**
  * mix_videoconfigparamsenc_set_CIR_frame_cnt:
  * @obj: #MixVideoConfigParamsEnc object
- * @CIR_frame_cnt: Encoding CIR frame count to be returned  
+ * @CIR_frame_cnt: Encoding CIR frame count to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
- * Get Encoding CIR frame count 
+ * Get Encoding CIR frame count
  */
 
-MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj,
 		guint * CIR_frame_cnt);
 
+
+/**
+ * mix_videoconfigparamsenc_set_max_slice_size:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @max_slice_size: Maximum encoded slice size
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set Maximum encoded slice size
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_max_slice_size (MixVideoConfigParamsEnc * obj,
+		guint max_slice_size);
+
+/**
+ * mix_videoconfigparamsenc_get_max_slice_size:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @max_slice_size: Maximum encoded slice size
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get Maximum encoded slice size
+ */
+
+MIX_RESULT mix_videoconfigparamsenc_get_max_slice_size (MixVideoConfigParamsEnc * obj,
+		guint * max_slice_size);
+
+
+/**
+ * mix_videoconfigparamsenc_set_refresh_type:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @refresh_type: The intra refresh type (CIR, AIR etc)
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set Intra Refresh Type
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_refresh_type (MixVideoConfigParamsEnc * obj,
+		MixVideoIntraRefreshType refresh_type);
+
+/**
+ * mix_videoconfigparamsenc_get_refresh_type:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @refresh_type: The intra refresh type (CIR, AIR etc)
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get Intra Refresh Type
+ */
+
+MIX_RESULT mix_videoconfigparamsenc_get_refresh_type (MixVideoConfigParamsEnc * obj,
+		MixVideoIntraRefreshType * refresh_type);
+
+/**
+ * mix_videoconfigparamsenc_set_AIR_params:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set AIR parameters
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_AIR_params (MixVideoConfigParamsEnc * obj,
+		MixAIRParams air_params);
+
+/**
+ * mix_videoconfigparamsenc_get_AIR_params:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @air_params: AIR Parameters, including air_MBs, air_threshold and air_auto
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get AIR parameters
+ */
+
+MIX_RESULT mix_videoconfigparamsenc_get_AIR_params (MixVideoConfigParamsEnc * obj,
+		MixAIRParams * air_params);
+
 /* TODO: Add getters and setters for other properties */
 
 G_END_DECLS
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c
index 620093d..12a5dd1 100644
--- a/mix_video/src/mixvideoconfigparamsenc_h264.c
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.c
@@ -1,6 +1,6 @@
-/* 
+/*
 INTEL CONFIDENTIAL
-Copyright 2009 Intel Corporation All Rights Reserved. 
+Copyright 2009 Intel Corporation All Rights Reserved.
 The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
 
 No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
@@ -53,6 +53,8 @@
   /* TODO: initialize properties */
   self->basic_unit_size = 0;
   self->slice_num = 1;
+  self->I_slice_num = 1;
+  self->P_slice_num = 1;
   self->disable_deblocking_filter_idc = 0;
 
   self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX;
@@ -119,7 +121,7 @@
 * mix_videoconfigparamsenc_h264_dup:
 * @obj: a #MixVideoConfigParams object
 * @returns: a newly allocated duplicate of the object.
-* 
+*
 * Copy duplicate of the object.
 */
 MixParams *
@@ -148,7 +150,7 @@
 * @target: copy to target
 * @src: copy from src
 * @returns: boolean indicates if copy is successful.
-* 
+*
 * Copy instance data from @src to @target.
 */
 gboolean
@@ -157,7 +159,7 @@
     MixVideoConfigParamsEncH264 *this_target, *this_src;
     MixParamsClass *root_class;
 
-    LOG_V( "Begin\n");	
+    LOG_V( "Begin\n");
 
     if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (target)
       && MIX_IS_VIDEOCONFIGPARAMSENC_H264 (src))
@@ -169,11 +171,13 @@
       //add properties
       this_target->basic_unit_size = this_src->basic_unit_size;
       this_target->slice_num = this_src->slice_num;
+      this_target->I_slice_num = this_src->I_slice_num;
+      this_target->P_slice_num = this_src->P_slice_num;
       this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc;
       this_target->delimiter_type = this_src->delimiter_type;
-      this_target->idr_interval = this_src->idr_interval;	  
+      this_target->idr_interval = this_src->idr_interval;
 
-	  
+
 
       // Now chainup base class
       root_class = MIX_PARAMS_CLASS (parent_class);
@@ -196,7 +200,7 @@
 * @first: first object to compare
 * @second: seond object to compare
 * @returns: boolean indicates if instance are equal.
-* 
+*
 * Copy instance data from @src to @target.
 */
 gboolean
@@ -216,23 +220,31 @@
       if (this_first->basic_unit_size != this_second->basic_unit_size) {
 	  	goto not_equal;
 	}
-	  
+
       if (this_first->slice_num != this_second->slice_num) {
 	  	goto not_equal;
 	}
 
+      if (this_first->I_slice_num != this_second->I_slice_num) {
+		goto not_equal;
+	}
+
+      if (this_first->P_slice_num != this_second->P_slice_num) {
+		goto not_equal;
+	}
+
       if (this_first->disable_deblocking_filter_idc != this_second->disable_deblocking_filter_idc) {
 	  	goto not_equal;
-	}  
+	}
 
       if (this_first->delimiter_type != this_second->delimiter_type) {
 	  	goto not_equal;
-	}  	  
+	}
 
       if (this_first->idr_interval != this_second->idr_interval) {
 	  	goto not_equal;
-	}  	  
-	  	  
+	}
+
 
 	ret = TRUE;
 
@@ -240,7 +252,7 @@
 
 	if (ret != TRUE) {
 		return ret;
-	}		
+	}
 
       /* TODO: add comparison for properties */
       {
@@ -283,7 +295,7 @@
 	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, basic_unit_size);
 	*basic_unit_size = obj->basic_unit_size;
 	return MIX_RESULT_SUCCESS;
-}	
+}
 
 
 MIX_RESULT mix_videoconfigparamsenc_h264_set_dlk (MixVideoConfigParamsEncH264 * obj,
@@ -298,13 +310,15 @@
 	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, disable_deblocking_filter_idc);
 	*disable_deblocking_filter_idc = obj->disable_deblocking_filter_idc;
 	return MIX_RESULT_SUCCESS;
-}	
+}
 
 
 MIX_RESULT mix_videoconfigparamsenc_h264_set_slice_num(MixVideoConfigParamsEncH264 * obj,
 		guint slice_num) {
 	MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
 	obj->slice_num = slice_num;
+	obj->I_slice_num = slice_num;
+	obj->P_slice_num = slice_num;
 	return MIX_RESULT_SUCCESS;
 }
 
@@ -313,7 +327,35 @@
 	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, slice_num);
 	*slice_num = obj->slice_num;
 	return MIX_RESULT_SUCCESS;
-}	
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint I_slice_num) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+	obj->I_slice_num = I_slice_num;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint * I_slice_num) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, I_slice_num);
+	*I_slice_num = obj->I_slice_num;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint P_slice_num) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+	obj->P_slice_num = P_slice_num;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint * P_slice_num) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, P_slice_num);
+	*P_slice_num = obj->P_slice_num;
+	return MIX_RESULT_SUCCESS;
+}
 
 MIX_RESULT mix_videoconfigparamsenc_h264_set_delimiter_type (MixVideoConfigParamsEncH264 * obj,
 		MixDelimiterType delimiter_type) {
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h
index 1885846..4eddcb5 100644
--- a/mix_video/src/mixvideoconfigparamsenc_h264.h
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.h
@@ -1,6 +1,6 @@
-/* 
+/*
 INTEL CONFIDENTIAL
-Copyright 2009 Intel Corporation All Rights Reserved. 
+Copyright 2009 Intel Corporation All Rights Reserved.
 The source code contained or described herein and all documents related to the source code ("Material") are owned by Intel Corporation or its suppliers or licensors. Title to the Material remains with Intel Corporation or its suppliers and licensors. The Material contains trade secrets and proprietary and confidential information of Intel or its suppliers and licensors. The Material is protected by worldwide copyright and trade secret laws and treaty provisions. No part of the Material may be used, copied, reproduced, modified, published, uploaded, posted, transmitted, distributed, or disclosed in any way without Intel’s prior express written permission.
 
 No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
@@ -16,7 +16,7 @@
 
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSENC_H264:
-* 
+*
 * Get type of class.
 */
 #define MIX_TYPE_VIDEOCONFIGPARAMSENC_H264 (mix_videoconfigparamsenc_h264_get_type ())
@@ -30,7 +30,7 @@
 /**
 * MIX_IS_VIDEOCONFIGPARAMSENC_H264:
 * @obj: an object.
-* 
+*
 * Checks if the given object is an instance of #MixVideoConfigParamsEncH264
 */
 #define MIX_IS_VIDEOCONFIGPARAMSENC_H264(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264))
@@ -44,7 +44,7 @@
 /**
 * MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS:
 * @klass: a class.
-* 
+*
 * Checks if the given class is #MixVideoConfigParamsEncH264Class
 */
 #define MIX_IS_VIDEOCONFIGPARAMSENC_H264_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264))
@@ -52,7 +52,7 @@
 /**
 * MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS:
 * @obj: a #MixParams object.
-* 
+*
 * Get the class instance of the object.
 */
 #define MIX_VIDEOCONFIGPARAMSENC_H264_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIX_TYPE_VIDEOCONFIGPARAMSENC_H264, MixVideoConfigParamsEncH264Class))
@@ -73,37 +73,43 @@
   /*< public > */
 
   /* TODO: Add H.264 configuration paramters */
-  
-  /* The basic unit size used by rate control */  
+
+  /* The basic unit size used by rate control */
   guint basic_unit_size;
-  
+
   /* Number of slices in one frame */
   guint slice_num;
-  
+
+  /* Number of slices in one I frame */
+  guint I_slice_num;
+
+  /* Number of slices in one P frame */
+  guint P_slice_num;
+
   /* enable/disable deblocking */
-  guint8 disable_deblocking_filter_idc;	
+  guint8 disable_deblocking_filter_idc;
 
   /* delimiter_type */
   MixDelimiterType delimiter_type;
 
   guint idr_interval;
-  
-  /* Reserved for future use */  
+
+  /* Reserved for future use */
   void *reserved1;
-  
-  /* Reserved for future use */  
+
+  /* Reserved for future use */
   void *reserved2;
-  
-  /* Reserved for future use */  
+
+  /* Reserved for future use */
   void *reserved3;
-  
-  /* Reserved for future use */  
+
+  /* Reserved for future use */
   void *reserved4;
 };
 
 /**
 * MixVideoConfigParamsEncH264Class:
-* 
+*
 * MI-X VideoConfig object class
 */
 struct _MixVideoConfigParamsEncH264Class
@@ -117,7 +123,7 @@
 /**
 * mix_videoconfigparamsenc_h264_get_type:
 * @returns: type
-* 
+*
 * Get the type of object.
 */
 GType mix_videoconfigparamsenc_h264_get_type (void);
@@ -125,7 +131,7 @@
 /**
 * mix_videoconfigparamsenc_h264_new:
 * @returns: A newly allocated instance of #MixVideoConfigParamsEncH264
-* 
+*
 * Use this method to create new instance of #MixVideoConfigParamsEncH264
 */
 MixVideoConfigParamsEncH264 *mix_videoconfigparamsenc_h264_new (void);
@@ -133,7 +139,7 @@
 * mix_videoconfigparamsenc_h264_ref:
 * @mix: object to add reference
 * @returns: the #MixVideoConfigParamsEncH264 instance where reference count has been increased.
-* 
+*
 * Add reference count.
 */
 MixVideoConfigParamsEncH264
@@ -142,7 +148,7 @@
 /**
 * mix_videoconfigparamsenc_h264_unref:
 * @obj: object to unref.
-* 
+*
 * Decrement reference count of the object.
 */
 #define mix_videoconfigparamsenc_h264_unref(obj) mix_params_unref(MIX_PARAMS(obj))
@@ -155,7 +161,7 @@
 /**
  * mix_videoconfigparamsenc_h264_set_bus:
  * @obj: #MixVideoConfigParamsEncH264 object
- * @basic_unit_size: The basic unit size used by rate control  
+ * @basic_unit_size: The basic unit size used by rate control
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Set The basic unit size used by rate control
@@ -166,7 +172,7 @@
 /**
  * mix_videoconfigparamsenc_h264_get_bus:
  * @obj: #MixVideoConfigParamsEncH264 object
- * @basic_unit_size: The basic unit size to be returned  
+ * @basic_unit_size: The basic unit size to be returned
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Get The basic unit size used by rate control
@@ -218,6 +224,51 @@
 MIX_RESULT mix_videoconfigparamsenc_h264_get_slice_num(MixVideoConfigParamsEncH264 * obj,
 		guint * slice_num);
 
+
+/**
+ * mix_videoconfigparamsenc_h264_set_I_slice_num:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @I_slice_num: Number of slices in one I frame
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set the Number of slices in one I frame
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_set_I_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint I_slice_num);
+
+/**
+ * mix_videoconfigparamsenc_h264_get_I_slice_num:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @I_slice_num: Number of slices in one I frame to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get the Number of slices in one I frame
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_get_I_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint * I_slice_num);
+
+/**
+ * mix_videoconfigparamsenc_h264_set_P_slice_num:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @P_slice_num: Number of slices in one P frame
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set the Number of slices in one P frame
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_set_P_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint P_slice_num);
+
+/**
+ * mix_videoconfigparamsenc_h264_get_P_slice_num:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @P_slice_num: Number of slices in one P frame to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get the Number of slices in one P frame
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_get_P_slice_num(MixVideoConfigParamsEncH264 * obj,
+		guint * P_slice_num);
+
 /**
  * mix_videoconfigparamsenc_h264_set_delimiter_type:
  * @obj: #MixVideoConfigParamsEncH264 object
diff --git a/mix_video/src/mixvideodecodeparams.c b/mix_video/src/mixvideodecodeparams.c
index 0c74eb0..1e403ab 100644
--- a/mix_video/src/mixvideodecodeparams.c
+++ b/mix_video/src/mixvideodecodeparams.c
@@ -36,6 +36,7 @@
 
 	self->timestamp = 0;
 	self->discontinuity = FALSE;
+	self->new_sequence = FALSE;
 	self->reserved1 = NULL;
 	self->reserved2 = NULL;
 	self->reserved3 = NULL;
@@ -203,3 +204,17 @@
 	return MIX_RESULT_SUCCESS;
 }
 
+MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj,
+		gboolean new_sequence) {
+	MIX_VIDEODECODEPARAMS_SETTER_CHECK_INPUT (obj);
+	obj->new_sequence = new_sequence;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj,
+		gboolean *new_sequence) {
+	MIX_VIDEODECODEPARAMS_GETTER_CHECK_INPUT (obj, new_sequence);
+	*new_sequence = obj->new_sequence;
+	return MIX_RESULT_SUCCESS;
+}
+
diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h
index dfd614b..50ec502 100644
--- a/mix_video/src/mixvideodecodeparams.h
+++ b/mix_video/src/mixvideodecodeparams.h
@@ -80,6 +80,9 @@
 	/* Indicates a discontinuity in the stream */
 	gboolean discontinuity;
 
+    /* output only, indicate if stream contains a new sequence */
+    gboolean new_sequence;
+    
 	/* Reserved for future use */	
 	void *reserved1;
 	
@@ -167,7 +170,7 @@
 /**
  * mix_videodecodeparams_set_discontinuity:
  * @obj: #MixVideoDecodeParams object
- * @discontinuity: Flag to in Indicates a discontinuity in the stream.     
+ * @discontinuity: Flag to indicate a discontinuity in the stream.     
  * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
  *
  * Set discontinuity flag
@@ -187,6 +190,31 @@
 MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj,
 		gboolean *discontinuity);
 
+
+/**
+ * mix_videodecodeparams_set_new_sequence:
+ * @obj: #MixVideoDecodeParams object
+ * @new_sequence: Flag to indicate if stream contains a new sequence.     
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set new_sequence flag
+ */
+MIX_RESULT mix_videodecodeparams_set_new_sequence(MixVideoDecodeParams * obj,
+		gboolean new_sequence);
+
+
+/**
+ * mix_videodecodeparams_get_new_sequence:
+ * @obj: #MixVideoDecodeParams object
+ * @new_sequence: new_sequence flag to be returned     
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get new_sequence flag
+ */
+MIX_RESULT mix_videodecodeparams_get_new_sequence(MixVideoDecodeParams * obj,
+		gboolean *new_sequence);
+		
+
 G_END_DECLS
 
 #endif /* __MIX_VIDEODECODEPARAMS_H__ */
diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h
index 9f8651a..29bb2ac 100644
--- a/mix_video/src/mixvideodef.h
+++ b/mix_video/src/mixvideodef.h
@@ -11,8 +11,8 @@
  * @title: MI-X Video Data Definitons And Common Error Code
  * @short_description: MI-X Video data definitons and common error code
  * @include: mixvideodef.h
- * 
- * The section includes the definition of enum and struct as well as 
+ *
+ * The section includes the definition of enum and struct as well as
  * <note>
  * <title>Common Video Error Return Codes of MI-X video functions</title>
  * <itemizedlist>
@@ -44,6 +44,10 @@
 	MIX_RESULT_OUTOFSURFACES,
 	MIX_RESULT_DROPFRAME,
 	MIX_RESULT_NOTIMPL,
+	MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW,
+	MIX_RESULT_NOT_PERMITTED,
+	MIX_RESULT_ERROR_PROCESS_STREAM,
+	MIX_RESULT_MISSING_CONFIG,
 	MIX_RESULT_VIDEO_LAST
 } MIX_VIDEO_ERROR_CODE;
 
@@ -89,7 +93,7 @@
     MIX_RAW_TARGET_FORMAT_YUV420 = 1,
     MIX_RAW_TARGET_FORMAT_YUV422 = 2,
     MIX_RAW_TARGET_FORMAT_YUV444 = 4,
-    MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000,    
+    MIX_RAW_TARGET_FORMAT_PROTECTED = 0x80000000,
     MIX_RAW_TARGET_FORMAT_LAST
 } MixRawTargetFormat;
 
@@ -109,6 +113,7 @@
     MIX_RATE_CONTROL_NONE = 1,
     MIX_RATE_CONTROL_CBR = 2,
     MIX_RATE_CONTROL_VBR = 4,
+    MIX_RATE_CONTROL_VCM = 8,
     MIX_RATE_CONTROL_LAST
 } MixRateControl;
 
@@ -134,38 +139,65 @@
     MIX_DELIMITER_ANNEXB
 } MixDelimiterType;
 
+typedef enum {
+    MIX_VIDEO_NONIR,
+    MIX_VIDEO_CIR, 		/*Cyclic intra refresh*/
+    MIX_VIDEO_AIR, 		/*Adaptive intra refresh*/
+    MIX_VIDEO_BOTH,
+    MIX_VIDEO_LAST
+} MixVideoIntraRefreshType;
+
+typedef struct _MixAIRParams
+{
+	guint air_MBs;
+	guint air_threshold;
+	guint air_auto;
+} MixAIRParams;
 
 typedef enum {
 	MIX_ENC_PARAMS_START_UNUSED = 0x01000000,
 	MIX_ENC_PARAMS_BITRATE,
-	MIX_ENC_PARAMS_SLICE_SIZE,
+	MIX_ENC_PARAMS_INIT_QP,
+	MIX_ENC_PARAMS_MIN_QP,
+	MIX_ENC_PARAMS_WINDOW_SIZE,
+	MIX_ENC_PARAMS_TARGET_PERCENTAGE,
+	MIX_ENC_PARAMS_SLICE_NUM,
+	MIX_ENC_PARAMS_I_SLICE_NUM,
+	MIX_ENC_PARAMS_P_SLICE_NUM,
 	MIX_ENC_PARAMS_RESOLUTION,
 	MIX_ENC_PARAMS_GOP_SIZE,
 	MIX_ENC_PARAMS_FRAME_RATE,
 	MIX_ENC_PARAMS_FORCE_KEY_FRAME,
 	MIX_ENC_PARAMS_IDR_INTERVAL,
 	MIX_ENC_PARAMS_RC_MODE,
-	MIX_ENC_PARAMS_MAX_ENCODED_SLICE_SIZE,
-	MIX_ENC_PARAMS_QP,
+	MIX_ENC_PARAMS_MTU_SLICE_SIZE,
+	MIX_ENC_PARAMS_REFRESH_TYPE,
+	MIX_ENC_PARAMS_AIR,
 	MIX_ENC_PARAMS_CIR_FRAME_CNT,
 	MIX_ENC_PARAMS_LAST
 } MixEncParamsType;
 
 typedef struct _MixEncDynamicParams {
 	guint bitrate;
+	guint init_QP;
+	guint min_QP;
+	guint window_size;
+	guint target_percentage;
 	guint slice_num;
+	guint I_slice_num;
+	guint P_slice_num;
 	guint width;
 	guint height;
 	guint frame_rate_num;
-	guint frame_rate_denom;	
+	guint frame_rate_denom;
 	guint intra_period;
-	guint idr_interval;	
-	guint QP;
+	guint idr_interval;
 	guint CIR_frame_cnt;
 	guint max_slice_size;
 	gboolean force_idr;
 	MixRateControl rc_mode;
-	
+	MixVideoIntraRefreshType refresh_type;
+	MixAIRParams air_params;
 } MixEncDynamicParams;
 
 G_END_DECLS
diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c
index c78423f..7f34abd 100644
--- a/mix_video/src/mixvideoformat.c
+++ b/mix_video/src/mixvideoformat.c
@@ -62,6 +62,7 @@
     self->end_picture_pending = FALSE;
     self->video_frame = NULL;
     self->extra_surfaces = 0;
+    self->config_params = NULL;
 }
 
 static void mix_videoformat_class_init(MixVideoFormatClass * klass) {
@@ -117,6 +118,12 @@
 	  MIXUNREF(mix->surfacepool, mix_surfacepool_unref);
 	}
 
+    if (mix->config_params)
+	{
+	    mix_videoconfigparams_unref(mix->config_params);
+	    mix->config_params = NULL;
+	}	
+
 	//libVA cleanup (vaTerminate is called from MixVideo object)
 	if (mix->va_display) {
 		if (mix->va_context != VA_INVALID_ID)
@@ -223,6 +230,13 @@
 	mix->framemgr = frame_mgr;
 	mix_framemanager_ref(mix->framemgr);
 
+	if (mix->config_params)
+	{
+	    mix_videoconfigparams_unref(mix->config_params);
+	}
+	mix->config_params = config_params;
+	mix_videoconfigparams_ref(mix->config_params);
+
 	mix->va_display = va_display;
 
 	if (mix->mime_type)  //Clean up any previous mime_type
diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h
index 2499934..7a395b0 100644
--- a/mix_video/src/mixvideoformat.h
+++ b/mix_video/src/mixvideoformat.h
@@ -88,6 +88,7 @@
     gboolean end_picture_pending;
     MixVideoFrame* video_frame;    
     guint extra_surfaces;
+    MixVideoConfigParamsDec * config_params;
 };
 
 /**
diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c
index 04d517d..1108a0b 100644
--- a/mix_video/src/mixvideoformat_h264.c
+++ b/mix_video/src/mixvideoformat_h264.c
@@ -157,6 +157,94 @@
 	return ret;
 }
 
+MIX_RESULT mix_video_h264_update_config_params(
+    MixVideoFormat *mix,
+    vbp_data_h264 *data)
+{
+    MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
+
+    if (parent->picture_width == 0 ||
+        parent->picture_height == 0 ||
+        data->new_sps)
+    {
+        parent->picture_width = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
+        parent->picture_height = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
+
+        mix_videoconfigparamsdec_set_picture_res(
+            mix->config_params,
+            parent->picture_width,
+            parent->picture_height);
+    }
+
+
+    // video_range has default value of 0.
+    mix_videoconfigparamsdec_set_video_range(
+        mix->config_params,
+        data->codec_data->video_full_range_flag);
+
+
+    uint8 color_matrix;
+
+
+
+    switch (data->codec_data->matrix_coefficients)
+    {
+        case 1:
+            color_matrix = VA_SRC_BT709;
+            break;
+
+        // ITU-R Recommendation BT.470-6 System B, G (MP4), same as
+        // SMPTE 170M/BT601
+        case 5:
+        case 6:
+            color_matrix = VA_SRC_BT601;
+            break;
+
+        default:
+            // unknown color matrix, set to 0 so color space flag will not be set.
+            color_matrix = 0;
+            break;
+    }
+    mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix);
+
+    mix_videoconfigparamsdec_set_pixel_aspect_ratio(
+        mix->config_params,
+        data->codec_data->sar_width,
+        data->codec_data->sar_height);
+
+    mix_videoconfigparamsdec_set_bit_rate(
+        mix->config_params,
+         data->codec_data->bit_rate);
+
+    return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_video_h264_handle_new_sequence(
+    MixVideoFormat *mix,
+    vbp_data_h264 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    LOG_V("new sequence is received.\n");
+
+    // original picture resolution
+    int width = mix->picture_width;
+    int height = mix->picture_height;
+
+    mix_video_h264_update_config_params(mix, data);
+
+    if (width != mix->picture_width || height != mix->picture_height)
+    {
+        // flush frame manager only if resolution is changed.
+        ret = mix_framemanager_flush(mix->framemgr);
+    }
+
+    // TO DO:  re-initialize VA
+
+    return ret;
+}
+
+
 MIX_RESULT mix_videofmt_h264_initialize_va(
     MixVideoFormat *mix,
     vbp_data_h264 *data)
@@ -166,7 +254,7 @@
     VAConfigAttrib attrib;
 
     MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
-    MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+    //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
 
     if (parent->va_initialized)
     {
@@ -193,7 +281,7 @@
 
     if (vret != VA_STATUS_SUCCESS)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E("vaCreateConfig failed\n");
         goto cleanup;
     }
@@ -215,7 +303,7 @@
     parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces);
     if (parent->va_surfaces == NULL)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E( "parent->va_surfaces == NULL. \n");
         goto cleanup;
     }
@@ -227,15 +315,15 @@
 
     vret = vaCreateSurfaces(
         parent->va_display,
-        (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16,
-        (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16,
+        parent->picture_width,
+        parent->picture_height,
         VA_RT_FORMAT_YUV420,
         parent->va_num_surfaces,
         parent->va_surfaces);
 
     if (vret != VA_STATUS_SUCCESS)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E( "Error allocating surfaces\n");
         goto cleanup;
     }
@@ -281,7 +369,7 @@
 
     if (vret != VA_STATUS_SUCCESS)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E( "Error initializing video driver\n");
         goto cleanup;
     }
@@ -507,7 +595,7 @@
 
         if (vret != VA_STATUS_SUCCESS)
         {
-            ret = MIX_RESULT_FAIL;
+            ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
             LOG_E( "Video driver returned error from vaCreateBuffer\n");
             goto cleanup;
         }
@@ -529,7 +617,7 @@
 
         if (vret != VA_STATUS_SUCCESS)
         {
-            ret = MIX_RESULT_FAIL;
+            ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
             LOG_E( "Video driver returned error from vaCreateBuffer\n");
             goto cleanup;
         }
@@ -567,7 +655,7 @@
 
     if (vret != VA_STATUS_SUCCESS)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E( "Video driver returned error from vaCreateBuffer\n");
         goto cleanup;
     }
@@ -598,7 +686,7 @@
 
     if (vret != VA_STATUS_SUCCESS)
     {
-        ret = MIX_RESULT_FAIL;
+        ret = MIX_RESULT_NO_MEMORY; // MIX_RESULT_FAIL;
         LOG_E( "Video driver returned error from vaCreateBuffer\n");
         goto cleanup;
     }
@@ -909,7 +997,8 @@
     MixVideoFormat *mix,
     MixBuffer * bufin,
     guint64 ts,
-    gboolean discontinuity)
+    gboolean discontinuity,
+    MixVideoDecodeParams * decode_params)
 {
     uint32 pret = 0;
     MixVideoFormat *parent = NULL;
@@ -929,7 +1018,7 @@
     LOG_V( "Called parse for current frame\n");
     if ((pret != VBP_DONE) &&(pret != VBP_OK))
     {
-        ret = MIX_RESULT_DROPFRAME;
+        ret = MIX_RESULT_ERROR_PROCESS_STREAM; // MIX_RESULT_DROPFRAME;
         LOG_E( "vbp_parse failed.\n");
         goto cleanup;
     }
@@ -948,13 +1037,27 @@
 
     if (data->has_sps == 0 || data->has_pps == 0)
     {
-        ret = MIX_RESULT_SUCCESS;
+        ret = MIX_RESULT_MISSING_CONFIG; // MIX_RESULT_SUCCESS;
         LOG_V("SPS or PPS is not available.\n");
         goto cleanup;
     }
 
+    if (data->new_sps)
+    {
+        decode_params->new_sequence = data->new_sps;
+
+        ret = mix_video_h264_handle_new_sequence(parent, data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_V("mix_video_h264_handle_new_sequence failed.\n");
+            goto cleanup;
+        }
+    }
+
     if (parent->va_initialized == FALSE)
     {
+        mix_video_h264_update_config_params(parent, data);
+
         LOG_V("try initializing VA...\n");
         ret = mix_videofmt_h264_initialize_va(parent, data);
         if (ret != MIX_RESULT_SUCCESS)
@@ -1029,8 +1132,6 @@
     vbp_data_h264 *data = NULL;
     MixVideoFormat *parent = NULL;
     MixIOVec *header = NULL;
-    guint pic_width_in_codec_data = 0;
-    guint pic_height_in_codec_data = 0;
 
     if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL)
     {
@@ -1152,17 +1253,7 @@
 
     LOG_V( "Queried parser for header data\n");
 
-    // Update the pic size according to the parsed codec_data
-    pic_width_in_codec_data  = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
-    pic_height_in_codec_data = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
-    mix_videoconfigparamsdec_set_picture_res (config_params, pic_width_in_codec_data, pic_height_in_codec_data);
-
-    if (parent->picture_width == 0 || parent->picture_height == 0)
-    {
-        // Update picture resolution only if it is not set. The derived picture res from mbs may not be accurate.
-        parent->picture_width  = pic_width_in_codec_data;
-        parent->picture_height = pic_height_in_codec_data;
-    }
+    mix_video_h264_update_config_params(mix, data);
 
     ret = mix_videofmt_h264_initialize_va(mix, data);
     if (ret != MIX_RESULT_SUCCESS)
@@ -1246,6 +1337,8 @@
         return MIX_RESULT_FAIL;
     }
 
+    decode_params->new_sequence = FALSE;
+
     //From now on, we exit this function through cleanup:
 
     LOG_V( "Locking\n");
@@ -1262,7 +1355,8 @@
             mix,
             bufin[i],
             ts,
-            discontinuity);
+            discontinuity,
+            decode_params);
 
         if (ret != MIX_RESULT_SUCCESS)
         {
@@ -1470,7 +1564,7 @@
 
 						pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id =
 							((MixVideoFrame *)self->last_decoded_frame)->frame_id;
-						LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id);
+        					LOG_V( "Reference frame not found, substituting %d\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id);
 
 					}
 					else
diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h
index 84bfc78..bcfe8bd 100644
--- a/mix_video/src/mixvideoformat_h264.h
+++ b/mix_video/src/mixvideoformat_h264.h
@@ -41,7 +41,7 @@
 	GHashTable *dpb_surface_table;
 #ifdef DECODER_ROBUSTNESS
 	//Can improve which frame is used for this at a later time
-	MixVideoFrame  *last_decoded_frame;  //last surface decoded, to be used as reference frame when reference frames are missing
+    	MixVideoFrame  *last_decoded_frame;  //last surface decoded, to be used as reference frame when reference frames are missing
 #endif
 };
 
@@ -96,7 +96,7 @@
 
 /* H.264 vmethods */
 MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg);
-MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix,
+MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, 
 				  MixVideoConfigParamsDec * config_params,
 				  MixFrameManager * frame_mgr,
 				  MixBufferPool * input_buf_pool,
@@ -116,12 +116,12 @@
 
 
 MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix,
-                                        vbp_data_h264 *data,
+                                        vbp_data_h264 *data, 
 					guint64 timestamp,
 					gboolean discontinuity);
 
 
-MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix,
+MIX_RESULT mix_videofmt_h264_release_input_buffers(MixVideoFormat *mix, 
 					guint64 timestamp);
 
 
diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c
index b11ab88..0ac6667 100644
--- a/mix_video/src/mixvideoformat_mp42.c
+++ b/mix_video/src/mixvideoformat_mp42.c
@@ -10,6 +10,8 @@
 #include "mixvideolog.h"
 #include "mixvideoformat_mp42.h"
 
+// Value of VOP type defined here follows MP4 spec, and has the same value of corresponding frame type
+// defined in enumeration MixFrameType (except sprite (S))
 enum {
 	MP4_VOP_TYPE_I = 0,
 	MP4_VOP_TYPE_P = 1,
@@ -17,24 +19,6 @@
 	MP4_VOP_TYPE_S = 3,
 };
 
-/*
- * This is for divx packed stream
- */
-typedef struct _PackedStream PackedStream;
-struct _PackedStream {
-	vbp_picture_data_mp42 *picture_data;
-	MixBuffer *mix_buffer;
-};
-
-/*
- * Clone and destroy vbp_picture_data_mp42
- */
-static vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data(
-		vbp_picture_data_mp42 *picture_data);
-static void mix_videoformat_mp42_free_picture_data(
-		vbp_picture_data_mp42 *picture_data);
-static void mix_videoformat_mp42_flush_packed_stream_queue(
-		GQueue *packed_stream_queue);
 
 /* The parent class. The pointer will be saved
  * in this class's initialization. The pointer
@@ -57,8 +41,9 @@
 
 	self->last_frame = NULL;
 	self->last_vop_coding_type = -1;
-
-	self->packed_stream_queue = NULL;
+	self->last_vop_time_increment = 0;
+	self->next_nvop_for_PB_frame = FALSE;
+	self->iq_matrix_buf_sent = FALSE;
 
 	/* NOTE: we don't need to do this here.
 	 * This just demostrates how to access
@@ -104,6 +89,7 @@
 	MixVideoFormat *parent = NULL;
 	gint32 vbp_ret = VBP_OK;
 	MixVideoFormat_MP42 *self = NULL;
+    gint idx = 0;
 
 	LOG_V("Begin\n");
 
@@ -127,32 +113,32 @@
 	g_mutex_lock(parent->objectlock);
 
 	/* unref reference frames */
-	{
-		gint idx = 0;
-		for (idx = 0; idx < 2; idx++) {
-			if (self->reference_frames[idx] != NULL) {
-				mix_videoframe_unref(self->reference_frames[idx]);
-				self->reference_frames[idx] = NULL;
-			}
+	for (idx = 0; idx < 2; idx++) {
+		if (self->reference_frames[idx] != NULL) {
+			mix_videoframe_unref(self->reference_frames[idx]);
+			self->reference_frames[idx] = NULL;
 		}
 	}
-
+    if (self->last_frame)
+    {
+        mix_videoframe_unref(self->last_frame);
+        self->last_frame = NULL;
+    }
+    self->next_nvop_for_PB_frame = FALSE;
+    self->iq_matrix_buf_sent = FALSE;
 
 	/* Reset state */
 	parent->initialized = TRUE;
-	parent->parse_in_progress = FALSE;
+	parent->end_picture_pending = FALSE;
 	parent->discontinuity_frame_in_progress = FALSE;
 	parent->current_timestamp = (guint64)-1;
 
 	/* Close the parser */
-	vbp_ret = vbp_close(parent->parser_handle);
-	parent->parser_handle = NULL;
-
-	if (self->packed_stream_queue) {
-		mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue);
-		g_queue_free(self->packed_stream_queue);
-	}
-	self->packed_stream_queue = NULL;
+	if (parent->parser_handle)
+	{
+    	vbp_ret = vbp_close(parent->parser_handle);
+	    parent->parser_handle = NULL;
+    }	    
 
 	g_mutex_unlock(parent->objectlock);
 
@@ -183,938 +169,1097 @@
 	return MIX_RESULT_NOTIMPL;
 }
 
-MIX_RESULT mix_videofmt_mp42_initialize(MixVideoFormat *mix,
-		MixVideoConfigParamsDec * config_params, MixFrameManager * frame_mgr,
-		MixBufferPool * input_buf_pool, MixSurfacePool ** surface_pool,
-		VADisplay va_display) {
-	uint32 vbp_ret = 0;
-	MIX_RESULT ret = MIX_RESULT_FAIL;
 
-	vbp_data_mp42 *data = NULL;
-	MixVideoFormat *parent = NULL;
-	MixIOVec *header = NULL;
+MIX_RESULT mix_videofmt_mp42_update_config_params(
+    MixVideoFormat *mix,
+    vbp_data_mp42 *data)
+{
+    MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
+    //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);  
 
-	VAProfile va_profile = VAProfileMPEG4AdvancedSimple;
-	VAConfigAttrib attrib;
+    if (parent->picture_width == 0 || 
+        parent->picture_height == 0 ||
+       parent->picture_width < data->codec_data.video_object_layer_width ||
+       parent->picture_height < data->codec_data.video_object_layer_height)
+    {
+        parent->picture_width = data->codec_data.video_object_layer_width;
+        parent->picture_height = data->codec_data.video_object_layer_height;
 
-	VAStatus va_ret = VA_STATUS_SUCCESS;
-	guint number_extra_surfaces = 0;
-	VASurfaceID *surfaces = NULL;
-	guint numSurfaces = 0;
+        mix_videoconfigparamsdec_set_picture_res(
+            mix->config_params, 
+            parent->picture_width, 
+            parent->picture_height);        
+    }
+    
 
-	MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+    // video_range has default value of 0. Y ranges from 16 to 235.
+    mix_videoconfigparamsdec_set_video_range(mix->config_params, data->codec_data.video_range);
+    
+    uint8  color_matrix;
+    
+    switch (data->codec_data.matrix_coefficients)
+    {
+        case 1:
+            color_matrix = VA_SRC_BT709;
+            break;
 
-	if (mix == NULL || config_params == NULL || frame_mgr == NULL) {
-		return MIX_RESULT_NULL_PTR;
-	}
+        // ITU-R Recommendation BT.470-6 System B, G (MP4), same as 
+        // SMPTE 170M/BT601
+        case 5:
+        case 6:
+            color_matrix = VA_SRC_BT601;
+            break;
 
-	if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
-		return MIX_RESULT_INVALID_PARAM;
-	}
+        default:
+            // unknown color matrix, set to 0 so color space flag will not be set.
+            color_matrix = 0;
+            break;        
+    }            
+    mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix); 
 
-	LOG_V("begin\n");
-
-	if (parent_class->initialize) {
-		ret = parent_class->initialize(mix, config_params, frame_mgr,
-				input_buf_pool, surface_pool, va_display);
-		if (ret != MIX_RESULT_SUCCESS) {
-			LOG_E("Failed to initialize parent!\n");
-			return ret;
-		}
-	}
-
-	parent = MIX_VIDEOFORMAT(mix);
-
-	g_mutex_lock(parent->objectlock);
-
-	parent->initialized = FALSE;
-
-	vbp_ret = vbp_open(VBP_MPEG4, &(parent->parser_handle));
-
-	if (vbp_ret != VBP_OK) {
-		LOG_E("Failed to call vbp_open()\n");
-		ret = MIX_RESULT_FAIL;
-		goto cleanup;
-	}
-
-	/*
-	 * avidemux doesn't pass codec_data, we need handle this.
-	 */
-
-	LOG_V("Try to get header data from config_param\n");
-
-	ret = mix_videoconfigparamsdec_get_header(config_params, &header);
-	if (ret == MIX_RESULT_SUCCESS && header != NULL) {
-
-		LOG_V("Found header data from config_param\n");
-		vbp_ret = vbp_parse(parent->parser_handle, header->data, header->data_size,
-				TRUE);
-
-		LOG_V("vbp_parse() returns 0x%x\n", vbp_ret);
-
-		g_free(header->data);
-		g_free(header);
-
-		if (!((vbp_ret == VBP_OK) || (vbp_ret == VBP_DONE))) {
-			LOG_E("Failed to call vbp_parse() to parse header data!\n");
-			goto cleanup;
-		}
-
-		/* Get the header data and save */
-
-		LOG_V("Call vbp_query()\n");
-		vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
-		LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
-
-		if ((vbp_ret != VBP_OK) || (data == NULL)) {
-			LOG_E("Failed to call vbp_query() to query header data parsing result\n");
-			goto cleanup;
-		}
-
-		if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) {
-			va_profile = VAProfileMPEG4AdvancedSimple;
-			LOG_V("The profile is VAProfileMPEG4AdvancedSimple from header data\n");
-		} else {
-			va_profile = VAProfileMPEG4Simple;
-			LOG_V("The profile is VAProfileMPEG4Simple from header data\n");
-		}
-	}
-
-	va_display = parent->va_display;
-
-	/* We are requesting RT attributes */
-	attrib.type = VAConfigAttribRTFormat;
-
-	va_ret = vaGetConfigAttributes(va_display, va_profile, VAEntrypointVLD,
-			&attrib, 1);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		LOG_E("Failed to call vaGetConfigAttributes()\n");
-		goto cleanup;
-	}
-
-	if ((attrib.value & VA_RT_FORMAT_YUV420) == 0) {
-		LOG_E("The attrib.value is wrong!\n");
-		goto cleanup;
-	}
-
-	va_ret = vaCreateConfig(va_display, va_profile, VAEntrypointVLD, &attrib,
-			1, &(parent->va_config));
-
-	if (va_ret != VA_STATUS_SUCCESS) {
-		LOG_E("Failed to call vaCreateConfig()!\n");
-		goto cleanup;
-	}
-
-	ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
-			&number_extra_surfaces);
-
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to call mix_videoconfigparams_get_extra_surface_allocation()!\n");
-		goto cleanup;
-	}
-
-	parent->va_num_surfaces = number_extra_surfaces + 4;
-	if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) {
-		parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM;
-	}
-
-	numSurfaces = parent->va_num_surfaces;
-
-	parent->va_surfaces = g_malloc(sizeof(VASurfaceID) * numSurfaces);
-	if (!parent->va_surfaces) {
-		LOG_E("Not enough memory to allocate surfaces!\n");
-		ret = MIX_RESULT_NO_MEMORY;
-		goto cleanup;
-	}
-
-	surfaces = parent->va_surfaces;
-
-	va_ret = vaCreateSurfaces(va_display, parent->picture_width,
-			parent->picture_height, VA_RT_FORMAT_YUV420, numSurfaces,
-			surfaces);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		LOG_E("Failed to call vaCreateSurfaces()!\n");
-		goto cleanup;
-	}
-
-	parent->surfacepool = mix_surfacepool_new();
-	if (parent->surfacepool == NULL) {
-		LOG_E("Not enough memory to create surface pool!\n");
-		ret = MIX_RESULT_NO_MEMORY;
-		goto cleanup;
-	}
-
-	*surface_pool = parent->surfacepool;
-
-	ret = mix_surfacepool_initialize(parent->surfacepool, surfaces,
-			numSurfaces, va_display);
-
-	/* Initialize and save the VA context ID
-	 * Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
-	 */
-	va_ret = vaCreateContext(va_display, parent->va_config,
-			parent->picture_width, parent->picture_height, 0, surfaces,
-			numSurfaces, &(parent->va_context));
-
-	if (va_ret != VA_STATUS_SUCCESS) {
-		LOG_E("Failed to call vaCreateContext()!\n");
-		ret = MIX_RESULT_FAIL;
-		goto cleanup;
-	}
-
-	/*
-	 * Packed stream queue
-	 */
-
-	self->packed_stream_queue = g_queue_new();
-	if (!self->packed_stream_queue) {
-		LOG_E("Failed to crate packed stream queue!\n");
-		ret = MIX_RESULT_NO_MEMORY;
-		goto cleanup;
-	}
-
-	self->last_frame = NULL;
-	self->last_vop_coding_type = -1;
-	parent->initialized = FALSE;
-	ret = MIX_RESULT_SUCCESS;
-
-	cleanup:
-
-	g_mutex_unlock(parent->objectlock);
-
-	LOG_V("End\n");
-
-	return ret;
+    mix_videoconfigparamsdec_set_pixel_aspect_ratio(
+        mix->config_params,
+        data->codec_data.par_width,
+        data->codec_data.par_height);
+    
+    return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[],
-		gint bufincnt, MixVideoDecodeParams * decode_params) {
-	uint32 vbp_ret = 0;
-	MixVideoFormat *parent = NULL;
-	MIX_RESULT ret = MIX_RESULT_FAIL;
-	guint64 ts = 0;
-	vbp_data_mp42 *data = NULL;
-	gboolean discontinuity = FALSE;
-	MixInputBufferEntry *bufentry = NULL;
-	gint i = 0;
 
-	LOG_V("Begin\n");
+MIX_RESULT mix_videofmt_mp42_initialize_va(
+    MixVideoFormat *mix,
+    vbp_data_mp42 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    VAStatus vret = VA_STATUS_SUCCESS;
+    VAConfigAttrib attrib;
+    VAProfile va_profile;      
+    MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
+    //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);  
 
-	if (mix == NULL || bufin == NULL || decode_params == NULL) {
-		return MIX_RESULT_NULL_PTR;
+    LOG_V( "Begin\n");
+
+    if (parent->va_initialized)
+    {
+        LOG_W("va already initialized.\n");
+        return MIX_RESULT_SUCCESS;
+    }
+    
+    //We are requesting RT attributes
+    attrib.type = VAConfigAttribRTFormat;
+    attrib.value = VA_RT_FORMAT_YUV420;
+
+    //Initialize and save the VA config ID
+	if ((data->codec_data.profile_and_level_indication & 0xF8) == 0xF0) 
+	{
+		va_profile = VAProfileMPEG4AdvancedSimple;
+	} 
+	else 
+	{
+		va_profile = VAProfileMPEG4Simple;
 	}
+		
+    vret = vaCreateConfig(
+        parent->va_display, 
+        va_profile, 
+        VAEntrypointVLD, 
+        &attrib, 
+        1, 
+        &(parent->va_config));
 
-	if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
-		return MIX_RESULT_INVALID_PARAM;
-	}
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E("vaCreateConfig failed\n");
+        goto cleanup;
+    }
 
-	parent = MIX_VIDEOFORMAT(mix);
+    // add 1 more surface for packed frame (PB frame), and another one
+    // for partial frame handling
+	parent->va_num_surfaces = parent->extra_surfaces + 4 + 1 + 1;
+	//if (parent->va_num_surfaces > MIX_VIDEO_MP42_SURFACE_NUM) 
+	//	parent->va_num_surfaces = MIX_VIDEO_MP42_SURFACE_NUM;
+                
+    parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces);
+    if (parent->va_surfaces == NULL)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "parent->va_surfaces == NULL. \n");
+        goto cleanup;
+    }
+    
+    vret = vaCreateSurfaces(
+        parent->va_display, 
+        parent->picture_width,
+        parent->picture_height,
+        VA_RT_FORMAT_YUV420,
+        parent->va_num_surfaces, 
+        parent->va_surfaces);
+    
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error allocating surfaces\n");
+        goto cleanup;
+    }
+    
+    LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces);
+    
+    //Initialize the surface pool
+    ret = mix_surfacepool_initialize(
+        parent->surfacepool,
+        parent->va_surfaces, 
+        parent->va_num_surfaces, 
+        parent->va_display);
+    
+    switch (ret)
+    {
+        case MIX_RESULT_SUCCESS:
+            break;
+        case MIX_RESULT_ALREADY_INIT:  //This case is for future use when we can be  initialized multiple times.  It is to detect when we have not been reset before re-initializing.
+        default:
+            ret = MIX_RESULT_ALREADY_INIT;
+            LOG_E( "Error init surface pool\n");
+            goto cleanup;
+            break;
+    }
+    
+    //Initialize and save the VA context ID
+    //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
+    vret = vaCreateContext(
+        parent->va_display, 
+        parent->va_config,
+        parent->picture_width, 
+        parent->picture_height,
+        0, 
+        parent->va_surfaces, 
+        parent->va_num_surfaces,
+        &(parent->va_context));
+    
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error initializing video driver\n");
+        goto cleanup;
+    }
+    
+    parent->va_initialized = TRUE;
+    
+cleanup:
+    /* nothing to clean up */      
+      
+    return ret;
 
-	g_mutex_lock(parent->objectlock);
-
-	ret = mix_videodecodeparams_get_timestamp(decode_params, &ts);
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to get timestamp\n");
-		goto cleanup;
-	}
-
-	LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts);
-
-	ret
-			= mix_videodecodeparams_get_discontinuity(decode_params,
-					&discontinuity);
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to get discontinuity\n");
-		goto cleanup;
-	}
-
-	/*  If this is a new frame and we haven't retrieved parser
-	 *	workload data from previous frame yet, do so
-	 */
-
-	if ((ts != parent->current_timestamp) && (parent->parse_in_progress)) {
-
-		LOG_V("timestamp changed and parsing is still in progress\n");
-
-		/* this is new data and the old data parsing is not complete, continue
-		 * to parse the old data
-		 */
-		vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
-		LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
-
-		if ((vbp_ret != VBP_OK) || (data == NULL)) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("vbp_ret != VBP_OK || data == NULL\n");
-			goto cleanup;
-		}
-
-		ret = mix_videofmt_mp42_process_decode(mix, data,
-				parent->current_timestamp,
-				parent->discontinuity_frame_in_progress);
-
-		if (ret != MIX_RESULT_SUCCESS) {
-			/* We log this but need to process 
-			 * the new frame data, so do not return
-			 */
-			LOG_W("process_decode failed.\n");
-		}
-
-		/* we are done parsing for old data */
-		parent->parse_in_progress = FALSE;
-	}
-
-	parent->current_timestamp = ts;
-	parent->discontinuity_frame_in_progress = discontinuity;
-
-	/* we parse data buffer one by one */
-	for (i = 0; i < bufincnt; i++) {
-
-		LOG_V(
-				"Calling parse for current frame, parse handle %d, buf %x, size %d\n",
-				(int) parent->parser_handle, (guint) bufin[i]->data,
-				bufin[i]->size);
-
-		vbp_ret = vbp_parse(parent->parser_handle, bufin[i]->data,
-				bufin[i]->size, FALSE);
-
-		LOG_V("vbp_parse() returns 0x%x\n", vbp_ret);
-
-		/* The parser failed to parse */
-		if (vbp_ret != VBP_DONE && vbp_ret != VBP_OK) {
-			LOG_E("vbp_parse() ret = %d\n", vbp_ret);
-			ret = MIX_RESULT_FAIL;
-			goto cleanup;
-		}
-
-		LOG_V("vbp_parse() ret = %d\n", vbp_ret);
-
-		if (vbp_ret == VBP_OK || vbp_ret == VBP_DONE) {
-
-			LOG_V("Now, parsing is done (VBP_DONE)!\n");
-
-			vbp_ret = vbp_query(parent->parser_handle, (void *) &data);
-			LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
-
-			if ((vbp_ret != VBP_OK) || (data == NULL)) {
-				ret = MIX_RESULT_FAIL;
-				goto cleanup;
-			}
-
-			/* Increase the ref count of this input buffer */
-			mix_buffer_ref(bufin[i]);
-
-			/* Create a new MixInputBufferEntry
-			 * TODO: make this from a pool later 
-			 */
-			bufentry = g_malloc(sizeof(MixInputBufferEntry));
-			if (bufentry == NULL) {
-				ret = MIX_RESULT_NO_MEMORY;
-				goto cleanup;
-			}
-
-			bufentry->buf = bufin[i];
-			bufentry->timestamp = ts;
-
-			LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_DONE = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp);
-
-			/* Enqueue this input buffer */
-			g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry);
-
-			/* process and decode data */
-			ret
-					= mix_videofmt_mp42_process_decode(mix, data, ts,
-							discontinuity);
-
-			if (ret != MIX_RESULT_SUCCESS) {
-				/* We log this but continue since we need 
-				 * to complete our processing
-				 */
-				LOG_W("process_decode failed.\n");
-			}
-
-			LOG_V("Called process and decode for current frame\n");
-
-			parent->parse_in_progress = FALSE;
-
-		}
-#if 0
-		/*
-		 * The DHG parser checks for next_sc, if next_sc is a start code, it thinks the current parsing is done: VBP_DONE.
-		 * For our situtation, this not the case. The start code is always begin with the gstbuffer. At the end of frame,
-		 * the start code is never found.
-		 */
-
-		else if (vbp_ret == VBP_OK) {
-
-			LOG_V("Now, parsing is not done (VBP_OK)!\n");
-
-			LOG_V(
-					"Enqueuing buffer and going on to next (if any) for this frame\n");
-
-			/* Increase the ref count of this input buffer */
-			mix_buffer_ref(bufin[i]);
-
-			/* Create a new MixInputBufferEntry
-			 * TODO make this from a pool later
-			 */
-			bufentry = g_malloc(sizeof(MixInputBufferEntry));
-			if (bufentry == NULL) {
-				ret = MIX_RESULT_FAIL;
-				goto cleanup;
-			}
-
-			bufentry->buf = bufin[i];
-			bufentry->timestamp = ts;
-			LOG_I("bufentry->buf = %x bufentry->timestamp FOR VBP_OK = %"G_GINT64_FORMAT"\n", bufentry->buf, bufentry->timestamp);
-
-			/* Enqueue this input buffer */
-			g_queue_push_tail(parent->inputbufqueue, (gpointer) bufentry);
-			parent->parse_in_progress = TRUE;
-		}
-#endif
-	}
-
-	cleanup:
-
-	g_mutex_unlock(parent->objectlock);
-
-	LOG_V("End\n");
-
-	return ret;
 }
 
-MIX_RESULT mix_videofmt_mp42_process_decode(MixVideoFormat *mix,
-		vbp_data_mp42 *data, guint64 timestamp, gboolean discontinuity) {
+MIX_RESULT mix_videofmt_mp42_decode_a_slice(
+    MixVideoFormat *mix,
+    vbp_data_mp42* data,
+    vbp_picture_data_mp42* pic_data)
+{  
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    VAStatus vret = VA_STATUS_SUCCESS;
+    VADisplay vadisplay = NULL;
+    VAContextID vacontext;
+    guint buffer_id_cnt = 0;
+    gint frame_type = -1;
+    // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+    VABufferID buffer_ids[4];
+    MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);    
+    VAPictureParameterBufferMPEG4* pic_params = &(pic_data->picture_param);
+    vbp_slice_data_mp42* slice_data = &(pic_data->slice_data);
+    VASliceParameterBufferMPEG4* slice_params = &(slice_data->slice_param);
+    
+    LOG_V( "Begin\n");
 
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	VAStatus va_ret = VA_STATUS_SUCCESS;
-	VADisplay va_display = NULL;
-	VAContextID va_context;
+    vadisplay = mix->va_display;
+    vacontext = mix->va_context;
 
-	MixVideoFormat_MP42 *self = NULL;
-	vbp_picture_data_mp42 *picture_data = NULL;
-	VAPictureParameterBufferMPEG4 *picture_param = NULL;
-	VAIQMatrixBufferMPEG4 *iq_matrix_buffer = NULL;
-	vbp_slice_data_mp42 *slice_data = NULL;
-	VASliceParameterBufferMPEG4 *slice_param = NULL;
+    if (!mix->end_picture_pending)
+    {
+        LOG_E("picture decoder is not started!\n");
+        ret = MIX_RESULT_FAIL;
+        goto cleanup;
+    }
 
-	gint frame_type = -1;
-	guint buffer_id_number = 0;
-	guint buffer_id_cnt = 0;
-	VABufferID *buffer_ids = NULL;
-	MixVideoFrame *frame = NULL;
-
-	gint idx = 0, jdx = 0;
-	gulong surface = 0;
-
-	MixBuffer *mix_buffer = NULL;
-	gboolean is_from_queued_data = FALSE;
-
-	LOG_V("Begin\n");
-
-	if ((mix == NULL) || (data == NULL)) {
-		return MIX_RESULT_NULL_PTR;
-	}
-
-	if (!MIX_IS_VIDEOFORMAT_MP42(mix)) {
-		return MIX_RESULT_INVALID_PARAM;
-	}
-
-	self = MIX_VIDEOFORMAT_MP42(mix);
-
-	LOG_V("data->number_pictures = %d\n", data->number_pictures);
-
-	if (data->number_pictures == 0) {
-		LOG_W("data->number_pictures == 0\n");
-		mix_videofmt_mp42_release_input_buffers(mix, timestamp);
-		return ret;
-	}
-
-	is_from_queued_data = FALSE;
-
-	/* Do we have packed frames? */
-	if (data->number_pictures > 1) {
-
-		/*
-
-		 Assumption:
-
-		 1. In one packed frame, there's only one P or I frame and the
-		 reference frame will be the first one in the packed frame
-		 2. In packed frame, there's no skipped frame(vop_coded = 0)
-		 3. In one packed frame, if there're n B frames, there will be
-		 n N-VOP frames to follow the packed frame.
-		 The timestamp of each N-VOP frame will be used for each B frames
-		 in the packed frame
-		 4. N-VOP frame is the frame with vop_coded = 0.
-
-		 {P, B, B, B }, N, N, N, P, P, P, I, ...
-
-		 */
-
-		MixInputBufferEntry *bufentry = NULL;
-		PackedStream *packed_stream = NULL;
-		vbp_picture_data_mp42 *cloned_picture_data = NULL;
-
-		LOG_V("This is packed frame\n");
-
-		/*
-		 * Is the packed_frame_queue empty? If not, how come
-		 * a packed frame can follow another packed frame without
-		 * necessary number of N-VOP between them?
-		 */
-
-		if (!g_queue_is_empty(self->packed_stream_queue)) {
-			ret = MIX_RESULT_DROPFRAME;
-			LOG_E("The previous packed frame is not fully processed yet!\n");
-			goto cleanup;
-		}
-
-		/* Packed frame shall be something like this {P, B, B, B, ... B } */
-		for (idx = 0; idx < data->number_pictures; idx++) {
-			picture_data = &(data->picture_data[idx]);
-			picture_param = &(picture_data->picture_param);
-			frame_type = picture_param->vop_fields.bits.vop_coding_type;
-
-			/* Is the first frame in the packed frames a reference frame? */
-			if (idx == 0 && frame_type != MP4_VOP_TYPE_I && frame_type
-					!= MP4_VOP_TYPE_P) {
-				ret = MIX_RESULT_DROPFRAME;;
-				LOG_E("The first frame in packed frame is not I or B\n");
-				goto cleanup;
-			}
-
-			if (idx != 0 && frame_type != MP4_VOP_TYPE_B) {
-				ret = MIX_RESULT_DROPFRAME;;
-				LOG_E("The frame other than the first one in packed frame is not B\n");
-				goto cleanup;
-			}
-
-			if (picture_data->vop_coded == 0) {
-				ret = MIX_RESULT_DROPFRAME;
-				LOG_E("In packed frame, there's unexpected skipped frame\n");
-				goto cleanup;
-			}
-		}
-
-		LOG_V("The packed frame looks valid\n");
-
-		/* Okay, the packed-frame looks ok. Now, we enqueue all the B frames */
-		bufentry
-				= (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue);
-		if (bufentry == NULL) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("There's data in in inputbufqueue\n");
-			goto cleanup;
-		}
-
-		LOG_V("Enqueue all B frames in the packed frame\n");
-
-		mix_buffer = bufentry->buf;
-		for (idx = 1; idx < data->number_pictures; idx++) {
-			picture_data = &(data->picture_data[idx]);
-			cloned_picture_data = mix_videoformat_mp42_clone_picture_data(
-					picture_data);
-			if (!cloned_picture_data) {
-				ret = MIX_RESULT_NO_MEMORY;
-				LOG_E("Failed to allocate memory for cloned picture_data\n");
-				goto cleanup;
-			}
-
-			packed_stream = g_malloc(sizeof(PackedStream));
-			if (packed_stream == NULL) {
-				ret = MIX_RESULT_NO_MEMORY;
-				LOG_E("Failed to allocate memory for packed_stream\n");
-				goto cleanup;
-			}
-
-			packed_stream->mix_buffer = mix_buffer_ref(mix_buffer);
-			packed_stream->picture_data = cloned_picture_data;
-
-			g_queue_push_tail(self->packed_stream_queue,
-					(gpointer) packed_stream);
-		}
-
-		LOG_V("Prepare to decode the first frame in the packed frame\n");
-
-		/* we are going to process the firs frame */
-		picture_data = &(data->picture_data[0]);
-
-	} else {
-
-		LOG_V("This is a single frame\n");
-
-		/* Okay, we only have one frame */
-		if (g_queue_is_empty(self->packed_stream_queue)) {
-			/* If the packed_stream_queue is empty, everything is fine */
-			picture_data = &(data->picture_data[0]);
-
-			LOG_V("There's no packed frame not processed yet\n");
-
-		} else {
-			/*	The packed_stream_queue is not empty, is this frame N-VOP? */
-			picture_data = &(data->picture_data[0]);
-			if (picture_data->vop_coded != 0) {
-
-				LOG_V("The packed frame queue is not empty, we will flush it\n");
-
-				/* 
-				 * Unexpected! We flush the packed_stream_queue and begin to process the 
-				 * current frame if it is not a B frame
-				 */
-				mix_videoformat_mp42_flush_packed_stream_queue(
-						self->packed_stream_queue);
-
-				picture_param = &(picture_data->picture_param);
-				frame_type = picture_param->vop_fields.bits.vop_coding_type;
-
-				if (frame_type == MP4_VOP_TYPE_B) {
-					ret = MIX_RESULT_DROPFRAME;
-					LOG_E("The frame right after packed frame is B frame!\n");
-					goto cleanup;
-				}
-
-			} else {
-				/*	This is N-VOP, process B frame from the packed_stream_queue */
-				PackedStream *packed_stream = NULL;
-
-				LOG_V("N-VOP found, we ignore it and start to process the B frame from the packed frame queue\n");
-
-				packed_stream = (PackedStream *) g_queue_pop_head(
-						self->packed_stream_queue);
-				picture_data = packed_stream->picture_data;
-				mix_buffer = packed_stream->mix_buffer;
-				g_free(packed_stream);
-				is_from_queued_data = TRUE;
-			}
-		}
-	}
-
-	picture_param = &(picture_data->picture_param);
-	iq_matrix_buffer = &(picture_data->iq_matrix_buffer);
-
-	if (picture_param == NULL) {
-		ret = MIX_RESULT_NULL_PTR;
-		LOG_E("picture_param == NULL\n");
-		goto cleanup;
-	}
-
-	/* If the frame type is not I, P or B */
-	frame_type = picture_param->vop_fields.bits.vop_coding_type;
-	if (frame_type != MP4_VOP_TYPE_I && frame_type != MP4_VOP_TYPE_P
-			&& frame_type != MP4_VOP_TYPE_B) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("frame_type is not I, P or B. frame_type = %d\n", frame_type);
-		goto cleanup;
-	}
-
-	/*
-	 * This is a skipped frame (vop_coded = 0)
-	 * Please note that this is not a N-VOP (DivX).
-	 */
-	if (picture_data->vop_coded == 0) {
-
-		MixVideoFrame *skip_frame = NULL;
-		gulong frame_id = VA_INVALID_SURFACE;
-
-		LOG_V("vop_coded == 0\n");
-		if (self->last_frame == NULL) {
-			LOG_W("Previous frame is NULL\n");
-
-			/*
-			 * We shouldn't get a skipped frame
-			 * before we are able to get a real frame
-			 */
-			ret = MIX_RESULT_DROPFRAME;
-			goto cleanup;
-		}
-
-		skip_frame = mix_videoframe_new();
-		ret = mix_videoframe_set_is_skipped(skip_frame, TRUE);
-		mix_videoframe_ref(self->last_frame);
-
-		ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id);
-		ret = mix_videoframe_set_frame_id(skip_frame, frame_id);
-		ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P);
-		ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame);
-		ret = mix_videoframe_set_timestamp(skip_frame, timestamp);
-		ret = mix_videoframe_set_discontinuity(skip_frame, FALSE);
-
-		LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n",
-				(guint)skip_frame, (guint)frame_id, timestamp);
-
-		/* Release our input buffers */
-		ret = mix_videofmt_mp42_release_input_buffers(mix, timestamp);
-
-		/* Enqueue the skipped frame using frame manager */
-		ret = mix_framemanager_enqueue(mix->framemgr, skip_frame);
-		goto cleanup;
-	}
-
-	/*
-	 * Decide the number of buffer to use
-	 */
-
-	buffer_id_number = picture_data->number_slices * 2 + 2;
-	LOG_V("number_slices is %d, allocating %d buffer_ids\n",
-			picture_data->number_slices, buffer_id_number);
-
-	/*
-	 * Check for B frames after a seek
-	 * We need to have both reference frames in hand before we can decode a B frame
-	 * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME
-	 */
-	if (frame_type == MP4_VOP_TYPE_B) {
-
-		if (self->reference_frames[1] == NULL) {
-			LOG_W("Insufficient reference frames for B frame\n");
-			ret = MIX_RESULT_DROPFRAME;
-			goto cleanup;
-		}
-	}
-
-	buffer_ids = g_malloc(sizeof(VABufferID) * buffer_id_number);
-	if (buffer_ids == NULL) {
-		ret = MIX_RESULT_NO_MEMORY;
-		LOG_E("Failed to allocate buffer_ids!\n");
-		goto cleanup;
-	}
-
-	LOG_V("Getting a new surface\n");LOG_V("frame type is %d\n", frame_type);
-
-	/* Get a frame from the surface pool */
-	ret = mix_surfacepool_get(mix->surfacepool, &frame);
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to get frame from surface pool!\n");
-		goto cleanup;
-	}
-
-	/*
-	 * Set the frame type for the frame object (used in reordering by frame manager)
-	 */
-	ret = mix_videoframe_set_frame_type(frame, frame_type);
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to set frame type!\n");
-		goto cleanup;
-	}
-
-	/* If I or P frame, update the reference array */
-	if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) {
-		LOG_V("Updating forward/backward references for libva\n");
-
-		self->last_vop_coding_type = frame_type;
-		mix_videofmt_mp42_handle_ref_frames(mix, frame_type, frame);
-	}
-
-	LOG_V("Setting reference frames in picparams, frame_type = %d\n",
-			frame_type);
-
+    // update reference pictures
+    frame_type = pic_params->vop_fields.bits.vop_coding_type;
+    
 	switch (frame_type) {
 	case MP4_VOP_TYPE_I:
-		picture_param->forward_reference_picture = VA_INVALID_SURFACE;
-		picture_param->backward_reference_picture = VA_INVALID_SURFACE;
-		LOG_V("I frame, surface ID %u\n", (guint) frame->frame_id);
+		pic_params->forward_reference_picture = VA_INVALID_SURFACE;
+		pic_params->backward_reference_picture = VA_INVALID_SURFACE;
 		break;
+		
 	case MP4_VOP_TYPE_P:
-		picture_param-> forward_reference_picture
+		pic_params-> forward_reference_picture
 				= self->reference_frames[0]->frame_id;
-		picture_param-> backward_reference_picture = VA_INVALID_SURFACE;
-
-		LOG_V("P frame, surface ID %u, forw ref frame is %u\n",
-				(guint) frame->frame_id,
-				(guint) self->reference_frames[0]->frame_id);
+		pic_params-> backward_reference_picture = VA_INVALID_SURFACE;
 		break;
+		
 	case MP4_VOP_TYPE_B:
-
-		picture_param->vop_fields.bits.backward_reference_vop_coding_type
+		pic_params->vop_fields.bits.backward_reference_vop_coding_type
 				= self->last_vop_coding_type;
-
-		picture_param->forward_reference_picture
-				= self->reference_frames[1]->frame_id;
-		picture_param->backward_reference_picture
+		pic_params->forward_reference_picture
+				= self->reference_frames[1]->frame_id;				
+		pic_params->backward_reference_picture
 				= self->reference_frames[0]->frame_id;
-
-		LOG_V("B frame, surface ID %u, forw ref %d, back ref %d\n",
-				(guint) frame->frame_id,
-				(guint) picture_param->forward_reference_picture,
-				(guint) picture_param->backward_reference_picture);
 		break;
+		
 	case MP4_VOP_TYPE_S:
-		LOG_W("MP4_VOP_TYPE_S, Will never reach here\n");
+		pic_params-> forward_reference_picture
+				= self->reference_frames[0]->frame_id;
+		pic_params-> backward_reference_picture = VA_INVALID_SURFACE;
 		break;
 
 	default:
 		LOG_W("default, Will never reach here\n");
+		ret = MIX_RESULT_FAIL;
+		goto cleanup;
 		break;
 
 	}
+	
+    //Now for slices
 
-	/* Libva buffer set up */
-	va_display = mix->va_display;
-	va_context = mix->va_context;
+    LOG_V( "Creating libva picture parameter buffer\n");
 
-	LOG_V("Creating libva picture parameter buffer\n");
+    //First the picture parameter buffer
+    vret = vaCreateBuffer(
+        vadisplay, 
+        vacontext,
+        VAPictureParameterBufferType,
+        sizeof(VAPictureParameterBufferMPEG4),
+        1,
+        pic_params,
+        &buffer_ids[buffer_id_cnt]);
 
-	/* First the picture parameter buffer */
-	buffer_id_cnt = 0;
-	va_ret = vaCreateBuffer(va_display, va_context,
-			VAPictureParameterBufferType,
-			sizeof(VAPictureParameterBufferMPEG4), 1, picture_param,
-			&buffer_ids[buffer_id_cnt]);
-	buffer_id_cnt++;
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaCreateBuffer\n");
+        goto cleanup;
+    }
 
-	if (va_ret != VA_STATUS_SUCCESS) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("Failed to create va buffer of type VAPictureParameterBufferMPEG4!\n");
+    buffer_id_cnt++;
+            
+    if (pic_params->vol_fields.bits.quant_type && self->iq_matrix_buf_sent == FALSE) 
+    {
+        LOG_V( "Creating libva IQMatrix buffer\n");
+        // only send IQ matrix for the first slice in the picture
+        vret = vaCreateBuffer(
+            vadisplay,
+            vacontext,
+            VAIQMatrixBufferType,
+            sizeof(VAIQMatrixBufferMPEG4),
+            1,
+            &(data->iq_matrix_buffer),
+            &buffer_ids[buffer_id_cnt]);
+    
+        if (vret != VA_STATUS_SUCCESS)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E( "Video driver returned error from vaCreateBuffer\n");
+            goto cleanup;
+        }
+        self->iq_matrix_buf_sent = TRUE;
+        buffer_id_cnt++;      
+    }           
+
+    vret = vaCreateBuffer(
+        vadisplay, 
+        vacontext,
+        VASliceParameterBufferType,
+        sizeof(VASliceParameterBufferMPEG4),
+        1,
+        slice_params,
+        &buffer_ids[buffer_id_cnt]);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaCreateBuffer\n");
+        goto cleanup;
+    }
+
+    buffer_id_cnt++;
+
+
+    //Do slice data
+
+    //slice data buffer pointer
+    //Note that this is the original data buffer ptr;
+    // offset to the actual slice data is provided in
+    // slice_data_offset in VASliceParameterBufferMP42
+
+    vret = vaCreateBuffer(
+        vadisplay, 
+        vacontext,
+        VASliceDataBufferType,
+        slice_data->slice_size, //size
+        1,        //num_elements
+        slice_data->buffer_addr + slice_data->slice_offset,
+        &buffer_ids[buffer_id_cnt]);
+
+    buffer_id_cnt++;
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaCreateBuffer\n");
+        goto cleanup;
+    }
+     
+    
+    LOG_V( "Calling vaRenderPicture\n");
+    
+    //Render the picture
+    vret = vaRenderPicture(
+        vadisplay,
+        vacontext,
+        buffer_ids,
+        buffer_id_cnt);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaRenderPicture\n");
+        goto cleanup;
+    }
+    
+
+cleanup:
+    LOG_V( "End\n");
+
+    return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_mp42_decode_end(
+    MixVideoFormat *mix, 
+    gboolean drop_picture)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;    
+    VAStatus vret = VA_STATUS_SUCCESS;
+    MixVideoFormat* parent = MIX_VIDEOFORMAT(mix);
+    //MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);  
+    
+    if (!parent->end_picture_pending)
+    {
+        if (parent->video_frame)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E("Unexpected: video_frame is not unreferenced.\n");
+        }
+        goto cleanup;
+    }    
+
+    if (parent->video_frame == NULL)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E("Unexpected: video_frame has been unreferenced.\n");
+        goto cleanup;
+        
+    }
+    vret = vaEndPicture(parent->va_display, parent->va_context);
+    
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_DROPFRAME;
+        LOG_E( "Video driver returned error from vaEndPicture\n");
+        goto cleanup;
+    }
+    
+#if 0	/* we don't call vaSyncSurface here, the call is moved to mix_video_render() */
+    
+    LOG_V( "Calling vaSyncSurface\n");
+
+    //Decode the picture
+    vret = vaSyncSurface(vadisplay, surface);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaSyncSurface\n");
+        goto cleanup;
+    }
+#endif
+
+    if (drop_picture)
+    {
+        // we are asked to drop this decoded picture
+        mix_videoframe_unref(parent->video_frame);
+        parent->video_frame = NULL;
+        goto cleanup;
+    }
+  
+    //Enqueue the decoded frame using frame manager
+    ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error enqueuing frame object\n");
+        goto cleanup;
+    }
+    else
+    {
+        // video frame is passed to frame manager
+        parent->video_frame = NULL;
+    }
+    
+cleanup:
+   if (parent->video_frame)
+   {
+        /* this always indicates an error */        
+        mix_videoframe_unref(parent->video_frame);
+        parent->video_frame = NULL;
+   }
+   parent->end_picture_pending = FALSE;
+   return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_mp42_decode_continue(
+    MixVideoFormat *mix, 
+    vbp_data_mp42 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    VAStatus vret = VA_STATUS_SUCCESS;
+    int i;   
+    gint frame_type = -1;
+    vbp_picture_data_mp42* pic_data = NULL;
+    VAPictureParameterBufferMPEG4* pic_params = NULL;
+    MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+
+    /*
+	 Packed Frame Assumption:
+
+ 	 1. In one packed frame, there's only one P or I frame and only one B frame.
+	 2. In packed frame, there's no skipped frame (vop_coded = 0)
+	 3. For one packed frame, there will be one N-VOP frame to follow the packed frame (may not immediately).
+	 4. N-VOP frame is the frame with vop_coded = 0.
+	 5. The timestamp of  N-VOP frame will be used for P or I frame in the packed frame
+
+
+	 I, P, {P, B}, B, N, P, N, I, ...
+	 I, P, {P, B}, N, P, N, I, ...
+
+	 The first N is placeholder for P frame in the packed frame
+	 The second N is a skipped frame
+	 */ 	 
+
+    pic_data = data->picture_data;
+	for (i = 0; i < data->number_picture_data; i++, pic_data = pic_data->next_picture_data)
+	{
+	    pic_params = &(pic_data->picture_param);
+	    frame_type = pic_params->vop_fields.bits.vop_coding_type;
+    	if (frame_type == MP4_VOP_TYPE_S && pic_params->no_of_sprite_warping_points > 1)
+    	{
+    	    // hardware only support up to one warping point (stationary or translation)
+    	    LOG_E("sprite with %d warping points is not supported by HW.\n",
+    	        pic_params->no_of_sprite_warping_points);    	        
+    	    return MIX_RESULT_DROPFRAME;
+    	}
+        
+    	if (pic_data->vop_coded == 0)
+    	{
+    	    // this should never happen
+    	    LOG_E("VOP is not coded.\n");
+    	    return MIX_RESULT_DROPFRAME;
+    	}
+    	
+        if (pic_data->new_picture_flag == 1 || mix->end_picture_pending == FALSE)
+        {
+            if (pic_data->new_picture_flag == 0)
+            {
+                LOG_W("First slice of picture is lost!\n");
+            }
+            
+            gulong surface = 0;                    
+            if (mix->end_picture_pending)
+            {
+                // this indicates the start of a new frame in the packed frame
+                LOG_V("packed frame is found.\n");
+
+                // Update timestamp for packed frame as timestamp is for the B frame!
+                if (mix->video_frame && pic_params->vop_time_increment_resolution)
+                {
+                    guint64 ts, ts_inc;
+                    mix_videoframe_get_timestamp(mix->video_frame, &ts);
+                    ts_inc= self->last_vop_time_increment - pic_data->vop_time_increment + 
+                        pic_params->vop_time_increment_resolution;
+                    ts_inc = ts_inc % pic_params->vop_time_increment_resolution;
+                    LOG_V("timestamp is incremented by %d at %d resolution.\n",
+                        ts_inc, pic_params->vop_time_increment_resolution);
+                    // convert to nano-second
+                    ts_inc = ts_inc * 1e9 / pic_params->vop_time_increment_resolution;
+                    LOG_V("timestamp of P frame in packed frame is updated from %"G_GINT64_FORMAT"  to %"G_GUINT64_FORMAT".\n",
+                        ts, ts + ts_inc);
+
+                    ts += ts_inc; 
+                    mix_videoframe_set_timestamp(mix->video_frame, ts);
+                }
+                
+                mix_videofmt_mp42_decode_end(mix, FALSE);
+                self->next_nvop_for_PB_frame = TRUE;
+            }     
+            if (self->next_nvop_for_PB_frame == TRUE && frame_type != MP4_VOP_TYPE_B)
+            {
+                LOG_E("The second frame in the packed frame is not B frame.\n");
+                self->next_nvop_for_PB_frame = FALSE;
+                return MIX_RESULT_DROPFRAME;
+            }
+            
+        	//Get a frame from the surface pool
+        	ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame));
+        	if (ret != MIX_RESULT_SUCCESS)
+        	{
+        		LOG_E( "Error getting frame from surfacepool\n");
+        		return MIX_RESULT_FAIL;
+        	}
+	
+        	/* the following calls will always succeed */
+
+            // set frame type
+            if (frame_type == MP4_VOP_TYPE_S)
+            {
+                // sprite is treated as P frame in the display order
+                mix_videoframe_set_frame_type(mix->video_frame, MP4_VOP_TYPE_P);
+            }
+            else
+            {
+                mix_videoframe_set_frame_type(mix->video_frame, frame_type);
+            }
+            
+
+            // set frame structure
+            if (pic_data->picture_param.vol_fields.bits.interlaced)
+            {
+                // only MPEG-4 studio profile can have field coding. All other profiles 
+                // use frame coding only, i.e, no field VOP.  (see vop_structure in MP4 spec)
+                mix_videoframe_set_frame_structure(
+                    mix->video_frame, 
+                    VA_BOTTOM_FIELD | VA_TOP_FIELD);
+
+                LOG_W("Interlaced content, set frame structure to 3 (TOP | BOTTOM field) !\n");                                    
+            }        
+            else
+            {   
+                mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE);
+            }
+
+            //Set the discontinuity flag
+            mix_videoframe_set_discontinuity(
+                mix->video_frame, 
+                mix->discontinuity_frame_in_progress);
+
+            //Set the timestamp
+            mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp);	
+       
+            //Get our surface ID from the frame object
+            ret = mix_videoframe_get_frame_id(mix->video_frame, &surface);    
+            if (ret != MIX_RESULT_SUCCESS)
+            {
+                LOG_E( "Error getting surface ID from frame object\n");
+                goto cleanup;
+            }    
+
+            /* If I or P frame, update the reference array */
+        	if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) 
+        	{
+        		LOG_V("Updating forward/backward references for libva\n");
+
+        		self->last_vop_coding_type = frame_type;
+        		self->last_vop_time_increment = pic_data->vop_time_increment;
+        		mix_videofmt_mp42_handle_ref_frames(mix, frame_type, mix->video_frame);
+                if (self->last_frame != NULL) 
+                {
+                    mix_videoframe_unref(self->last_frame);
+                }
+                self->last_frame = mix->video_frame;
+                mix_videoframe_ref(self->last_frame);
+        	}        
+    	
+            //Now we can begin the picture
+            vret = vaBeginPicture(mix->va_display, mix->va_context, surface);
+            if (vret != VA_STATUS_SUCCESS)
+            {
+                ret = MIX_RESULT_FAIL;
+                LOG_E( "Video driver returned error from vaBeginPicture\n");
+                goto cleanup;
+            }            
+
+            // vaBeginPicture needs a matching vaEndPicture 
+            mix->end_picture_pending = TRUE;
+            self->iq_matrix_buf_sent = FALSE;
+        }
+        
+        
+        ret = mix_videofmt_mp42_decode_a_slice(mix, data, pic_data);
+		if (ret != 	MIX_RESULT_SUCCESS)
+    	{
+	    	LOG_E( "mix_videofmt_mp42_decode_a_slice failed, error =  %#X.", ret);
+		    goto cleanup;
+		}		
+	}
+
+cleanup:
+    // nothing to cleanup;
+
+    return ret;
+}
+
+
+MIX_RESULT mix_videofmt_mp42_decode_begin(
+    MixVideoFormat *mix, 
+    vbp_data_mp42* data)
+{
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;	
+	gint frame_type = -1;
+    VAPictureParameterBufferMPEG4* pic_params = NULL;
+    MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+    vbp_picture_data_mp42 *pic_data = NULL;
+
+    pic_data = data->picture_data;
+    pic_params = &(pic_data->picture_param);
+    frame_type = pic_params->vop_fields.bits.vop_coding_type;
+    
+    if (self->next_nvop_for_PB_frame)
+    {
+        // if we are waiting for n-vop for packed frame, and the new frame is coded, the coding type 
+        // of this frame must be B. 
+        // for example: {PB} B N P B B P...
+        if (pic_data->vop_coded == 1 && frame_type != MP4_VOP_TYPE_B)
+        {
+            LOG_E("Invalid coding type while waiting for n-vop for packed frame.\n");
+            // timestamp of P frame in the queue is not correct
+            mix_framemanager_flush(mix->framemgr);
+            self->next_nvop_for_PB_frame = FALSE;
+        }
+    }
+    
+    if (pic_data->vop_coded == 0)
+    {
+        if (self->last_frame == NULL)
+        {
+            LOG_E("The forward reference frame is NULL, couldn't reconstruct skipped frame.\n");
+            mix_framemanager_flush(mix->framemgr);
+            self->next_nvop_for_PB_frame = FALSE;
+            return MIX_RESULT_DROPFRAME;
+        }
+        
+        if (self->next_nvop_for_PB_frame)
+        {
+            // P frame is already in queue, just need to update time stamp.
+            mix_videoframe_set_timestamp(self->last_frame, mix->current_timestamp);
+            self->next_nvop_for_PB_frame = FALSE;
+        }
+        else
+        {
+            // handle skipped frame
+            MixVideoFrame *skip_frame = NULL;
+            gulong frame_id = VA_INVALID_SURFACE;
+	
+    		skip_frame = mix_videoframe_new();
+    		ret = mix_videoframe_set_is_skipped(skip_frame, TRUE);
+    		ret = mix_videoframe_get_frame_id(self->last_frame, &frame_id);
+    		ret = mix_videoframe_set_frame_id(skip_frame, frame_id);
+    		ret = mix_videoframe_set_frame_type(skip_frame, MP4_VOP_TYPE_P);
+    		ret = mix_videoframe_set_real_frame(skip_frame, self->last_frame);
+    		// add a reference as skip_frame holds the last_frame.
+    		mix_videoframe_ref(self->last_frame);
+    		ret = mix_videoframe_set_timestamp(skip_frame, mix->current_timestamp);
+    		ret = mix_videoframe_set_discontinuity(skip_frame, FALSE);
+
+    		LOG_V("Processing skipped frame %x, frame_id set to %d, ts %"G_GINT64_FORMAT"\n",
+    				(guint)skip_frame, (guint)frame_id, mix->current_timestamp);
+
+    		/* Enqueue the skipped frame using frame manager */
+    		ret = mix_framemanager_enqueue(mix->framemgr, skip_frame);         
+        }
+
+        if (data->number_picture_data > 1)
+        {
+            LOG_E("Unexpected to have more picture data following a not-coded VOP.\n");
+            //picture data is thrown away. No issue if picture data is for N-VOP. if picture data is for
+            // coded picture, a frame is lost.
+        }
+        return MIX_RESULT_SUCCESS;
+    }
+    else
+    {     
+        /*
+             * Check for B frames after a seek
+             * We need to have both reference frames in hand before we can decode a B frame
+             * If we don't have both reference frames, we must return MIX_RESULT_DROPFRAME
+             */
+        if (frame_type == MP4_VOP_TYPE_B) 
+        {        
+            if (self->reference_frames[0] == NULL ||
+                self->reference_frames[1] == NULL) 
+            {
+                LOG_W("Insufficient reference frames for B frame\n");
+                return MIX_RESULT_DROPFRAME;
+            }
+        }
+        else if (frame_type == MP4_VOP_TYPE_P || frame_type == MP4_VOP_TYPE_S)
+        {
+            if (self->reference_frames[0] == NULL)
+            {
+                LOG_W("Reference frames for P/S frame is missing\n");
+                return MIX_RESULT_DROPFRAME;
+            }
+        }
+        
+        // all sanity check passes, continue decoding through mix_videofmt_mp42_decode_continue
+        ret = mix_videofmt_mp42_decode_continue(mix, data);
+    }  
+   
+	return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_mp42_decode_a_buffer(
+    MixVideoFormat *mix, 
+    MixBuffer * bufin,
+    guint64 ts,
+    gboolean discontinuity) 
+{
+    uint32 pret = 0;
+    MixVideoFormat *parent = NULL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+	vbp_data_mp42 *data = NULL;
+
+    LOG_V( "Begin\n");
+
+    parent = MIX_VIDEOFORMAT(mix);
+
+	pret = vbp_parse(parent->parser_handle, 
+		bufin->data, 
+		bufin->size,
+		FALSE);
+        
+	if (pret != VBP_OK)
+    {
+        ret = MIX_RESULT_DROPFRAME;
+        LOG_E( "vbp_parse failed.\n");
+        goto cleanup;
+    }
+    else
+    {
+        LOG_V("vbp_parse succeeded.\n");
+    }
+
+	//query for data
+	pret = vbp_query(parent->parser_handle, (void *) &data);
+
+	if ((pret != VBP_OK) || (data == NULL))
+	{
+	    // never happen!
+	    ret = MIX_RESULT_FAIL;
+	    LOG_E( "vbp_query failed.\n");
+        goto cleanup;
+	}
+    else
+    {
+        LOG_V("vbp_query succeeded.\n");
+    }
+
+    if (parent->va_initialized == FALSE)
+    {    
+        mix_videofmt_mp42_update_config_params(parent, data);
+        
+        LOG_V("try initializing VA...\n");
+        ret = mix_videofmt_mp42_initialize_va(parent, data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_mp42_initialize_va failed.\n");
+            goto cleanup; 
+        }
+    }
+
+    // check if any slice is parsed, we may just receive configuration data
+    if (data->number_picture_data == 0)
+    {
+        ret = MIX_RESULT_SUCCESS;
+        LOG_V("slice is not available.\n");
+        goto cleanup;      
+    }
+
+    guint64 last_ts = parent->current_timestamp;    
+    parent->current_timestamp = ts;
+    parent->discontinuity_frame_in_progress = discontinuity;
+
+    if (last_ts != ts)
+    {
+		// finish decoding the last frame
+		ret = mix_videofmt_mp42_decode_end(parent, FALSE);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_mp42_decode_end failed.\n");
+            goto cleanup; 
+        }
+
+        // start decoding a new frame
+		ret = mix_videofmt_mp42_decode_begin(parent, data); 
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_mp42_decode_begin failed.\n");
+            goto cleanup; 
+        }        
+    }
+    else
+    {
+        ret = mix_videofmt_mp42_decode_continue(parent, data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_mp42_decode_continue failed.\n");
+            goto cleanup; 
+        }
+    }
+
+cleanup:
+
+    LOG_V( "End\n");
+
+	return ret;
+}
+
+
+
+MIX_RESULT mix_videofmt_mp42_initialize(
+    MixVideoFormat *mix, 
+	MixVideoConfigParamsDec * config_params,
+    MixFrameManager * frame_mgr,
+	MixBufferPool * input_buf_pool,
+	MixSurfacePool ** surface_pool,
+	VADisplay va_display ) 
+{
+	uint32 pret = 0;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+	enum _vbp_parser_type ptype = VBP_MPEG4;
+	vbp_data_mp42 *data = NULL;
+	MixVideoFormat *parent = NULL;
+	MixIOVec *header = NULL;
+	
+	if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL)
+	{
+		LOG_E( "NUll pointer passed in\n");
+		return MIX_RESULT_NULL_PTR;
+	}
+
+	LOG_V( "Begin\n");
+
+	/* Chainup parent method. */
+
+	if (parent_class->initialize) {
+		ret = parent_class->initialize(mix, config_params,
+				frame_mgr, input_buf_pool, surface_pool, 
+				va_display);
+	}
+
+	if (ret != MIX_RESULT_SUCCESS)
+	{
+		LOG_E( "Error initializing\n");
+		return ret;
+	}
+
+	if (!MIX_IS_VIDEOFORMAT_MP42(mix))
+		return MIX_RESULT_INVALID_PARAM;
+
+	parent = MIX_VIDEOFORMAT(mix);
+	//MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
+
+	LOG_V( "Locking\n");
+	//From now on, we exit this function through cleanup:
+	g_mutex_lock(parent->objectlock);
+
+	parent->surfacepool = mix_surfacepool_new();
+	*surface_pool = parent->surfacepool;
+
+	if (parent->surfacepool == NULL)
+	{
+		ret = MIX_RESULT_NO_MEMORY;
+		LOG_E( "parent->surfacepool == NULL.\n");
 		goto cleanup;
 	}
 
-	LOG_V("Creating libva VAIQMatrixBufferMPEG4 buffer\n");
+    ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
+            &parent->extra_surfaces);
 
-	if (picture_param->vol_fields.bits.quant_type) {
-		va_ret = vaCreateBuffer(va_display, va_context, VAIQMatrixBufferType,
-				sizeof(VAIQMatrixBufferMPEG4), 1, iq_matrix_buffer,
-				&buffer_ids[buffer_id_cnt]);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+    	ret = MIX_RESULT_FAIL;
+    	LOG_E( "Cannot get extra surface allocation setting\n");
+    	goto cleanup;
+    }    
 
-		if (va_ret != VA_STATUS_SUCCESS) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("Failed to create va buffer of type VAIQMatrixBufferType!\n");
-			goto cleanup;
-		}
-		buffer_id_cnt++;
+	//Load the bitstream parser
+	pret = vbp_open(ptype, &(parent->parser_handle));
+
+    if (!(pret == VBP_OK))
+	{
+		ret = MIX_RESULT_FAIL;
+		LOG_E( "Error opening parser\n");
+		goto cleanup;
+	}
+	LOG_V( "Opened parser\n");
+
+
+    ret = mix_videoconfigparamsdec_get_header(config_params, &header);
+    
+    if ((ret != MIX_RESULT_SUCCESS) || (header == NULL))
+    {
+        // Delay initializing VA if codec configuration data is not ready, but don't return an error.
+        ret = MIX_RESULT_SUCCESS;
+        LOG_W( "Codec data is not available in the configuration parameter.\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle);
+
+	pret = vbp_parse(parent->parser_handle, header->data, 
+			header->data_size, TRUE);
+
+    if (pret != VBP_OK)
+    {
+    	ret = MIX_RESULT_FAIL;
+    	LOG_E( "Error parsing header data\n");
+    	goto cleanup;
+    }
+
+    LOG_V( "Parsed header\n");
+
+   //Get the header data and save
+    pret = vbp_query(parent->parser_handle, (void *)&data);
+
+	if ((pret != VBP_OK) || (data == NULL))
+	{
+		ret = MIX_RESULT_FAIL;
+		LOG_E( "Error reading parsed header data\n");
+		goto cleanup;
 	}
 
-	/* Now for slices */
-	for (jdx = 0; jdx < picture_data->number_slices; jdx++) {
+	LOG_V( "Queried parser for header data\n");
+	
+    mix_videofmt_mp42_update_config_params(mix, data);
 
-		slice_data = &(picture_data->slice_data[jdx]);
-		slice_param = &(slice_data->slice_param);
+    ret = mix_videofmt_mp42_initialize_va(mix, data);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error initializing va. \n");
+        goto cleanup;
+    }
 
-		LOG_V(
-				"Creating libva slice parameter buffer, for slice %d\n",
-				jdx);
 
-		/* Do slice parameters */
-		va_ret = vaCreateBuffer(va_display, va_context,
-				VASliceParameterBufferType,
-				sizeof(VASliceParameterBufferMPEG4), 1, slice_param,
-				&buffer_ids[buffer_id_cnt]);
-		if (va_ret != VA_STATUS_SUCCESS) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("Failed to create va buffer of type VASliceParameterBufferMPEG4!\n");
-			goto cleanup;
-		}
-		buffer_id_cnt++;
-
-		/* Do slice data */
-		va_ret = vaCreateBuffer(va_display, va_context, VASliceDataBufferType,
-				slice_data->slice_size, 1, slice_data->buffer_addr
-						+ slice_data->slice_offset, &buffer_ids[buffer_id_cnt]);
-		if (va_ret != VA_STATUS_SUCCESS) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("Failed to create va buffer of type VASliceDataBufferType!\n");
-			goto cleanup;
-		}
-		buffer_id_cnt++;
-	}
-
-	/* Get our surface ID from the frame object */
-	ret = mix_videoframe_get_frame_id(frame, &surface);
+cleanup:
 	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to get frame id: ret = 0x%x\n", ret);
-		goto cleanup;
+	    if (parent->parser_handle)
+	    {
+            pret = vbp_close(parent->parser_handle);
+            parent->parser_handle = NULL;
+    	}
+        parent->initialized = FALSE;
+
+	} else {
+        parent->initialized = TRUE;
+	}
+    
+	if (header != NULL)
+	{
+		if (header->data != NULL)
+			g_free(header->data);
+		g_free(header);
+		header = NULL;
 	}
 
-	LOG_V("Calling vaBeginPicture\n");
 
-	/* Now we can begin the picture */
-	va_ret = vaBeginPicture(va_display, va_context, surface);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("Failed to vaBeginPicture(): va_ret = 0x%x\n", va_ret);
-		goto cleanup;
+	LOG_V( "Unlocking\n");
+    g_mutex_unlock(parent->objectlock);
+
+
+	return ret;
+}
+
+
+MIX_RESULT mix_videofmt_mp42_decode(MixVideoFormat *mix, MixBuffer * bufin[],
+                gint bufincnt, MixVideoDecodeParams * decode_params) {
+
+  	int i = 0;
+    MixVideoFormat *parent = NULL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+	guint64 ts = 0;
+	gboolean discontinuity = FALSE;
+
+    LOG_V( "Begin\n");
+
+    if (mix == NULL || bufin == NULL || decode_params == NULL )
+	{
+		LOG_E( "NUll pointer passed in\n");
+        return MIX_RESULT_NULL_PTR;
 	}
 
-	LOG_V("Calling vaRenderPicture\n");
+	/* Chainup parent method.
+		We are not chaining up to parent method for now.
+	 */
 
-	/* Render the picture */
-	va_ret = vaRenderPicture(va_display, va_context, buffer_ids, buffer_id_cnt);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("Failed to vaRenderPicture(): va_ret = 0x%x\n", va_ret);
-		goto cleanup;
+#if 0
+    if (parent_class->decode) {
+        return parent_class->decode(mix, bufin, bufincnt, decode_params);
+	}
+#endif
+
+	if (!MIX_IS_VIDEOFORMAT_MP42(mix))
+		return MIX_RESULT_INVALID_PARAM;
+
+	parent = MIX_VIDEOFORMAT(mix);
+
+	ret = mix_videodecodeparams_get_timestamp(decode_params, &ts);
+	if (ret != MIX_RESULT_SUCCESS)
+	{
+	    // never happen
+		return MIX_RESULT_FAIL;
 	}
 
-	LOG_V("Calling vaEndPicture\n");
-
-	/* End picture */
-	va_ret = vaEndPicture(va_display, va_context);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("Failed to vaEndPicture(): va_ret = 0x%x\n", va_ret);
-		goto cleanup;
+	ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity);
+	if (ret != MIX_RESULT_SUCCESS)
+	{
+	    // never happen
+		return MIX_RESULT_FAIL;
 	}
 
-#if 0   /* we don't call vaSyncSurface here, the call is moved to mix_video_render() */
-	LOG_V("Calling vaSyncSurface\n");
+	//From now on, we exit this function through cleanup:
 
-	/* Decode the picture */
-	va_ret = vaSyncSurface(va_display, surface);
-	if (va_ret != VA_STATUS_SUCCESS) {
-		ret = MIX_RESULT_FAIL;
-		LOG_E("Failed to vaSyncSurface(): va_ret = 0x%x\n", va_ret);
-		goto cleanup;
-	}
-#endif 
+	LOG_V( "Locking\n");
+    g_mutex_lock(parent->objectlock);
 
-	/* Set the discontinuity flag */
-	mix_videoframe_set_discontinuity(frame, discontinuity);
+	LOG_I("ts after mix_videodecodeparams_get_timestamp() = %"G_GINT64_FORMAT"\n", ts);
 
-	/* Set the timestamp */
-	mix_videoframe_set_timestamp(frame, timestamp);
+	for (i = 0; i < bufincnt; i++)
+	{
+	    // decode a buffer at a time
+        ret = mix_videofmt_mp42_decode_a_buffer(
+            mix, 
+            bufin[i],
+            ts,
+            discontinuity);
+        
+		if (ret != MIX_RESULT_SUCCESS)
+		{
+			LOG_E("mix_videofmt_mp42_decode_a_buffer failed.\n");
+   			goto cleanup;
+		}        
+    }
 
-	LOG_V("Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp);
 
-	/* Enqueue the decoded frame using frame manager */
-	ret = mix_framemanager_enqueue(mix->framemgr, frame);
-	if (ret != MIX_RESULT_SUCCESS) {
-		LOG_E("Failed to mix_framemanager_enqueue()!\n");
-		goto cleanup;
-	}
+cleanup:
 
-	/* For I or P frames, save this frame off for skipped frame handling */
-	if ((frame_type == MP4_VOP_TYPE_I) || (frame_type == MP4_VOP_TYPE_P)) {
-		if (self->last_frame != NULL) {
-			mix_videoframe_unref(self->last_frame);
-		}
-		self->last_frame = frame;
-		mix_videoframe_ref(frame);
-	}
+	LOG_V( "Unlocking\n");
+ 	g_mutex_unlock(parent->objectlock);
 
-	ret = MIX_RESULT_SUCCESS;
-
-	cleanup:
-
-	if (ret != MIX_RESULT_SUCCESS && frame != NULL) {
-		mix_videoframe_unref(frame);
-	}
-
-	if (ret != MIX_RESULT_SUCCESS) {
-		mix_videoformat_mp42_flush_packed_stream_queue(
-				self->packed_stream_queue);
-	}
-
-	g_free(buffer_ids);
-	mix_videofmt_mp42_release_input_buffers(mix, timestamp);
-
-	if (is_from_queued_data) {
-		if (mix_buffer) {
-			mix_buffer_unref(mix_buffer);
-		}
-		mix_videoformat_mp42_free_picture_data(picture_data);
-	}
-
-	LOG_V("End\n");
+    LOG_V( "End\n");
 
 	return ret;
 }
@@ -1123,26 +1268,13 @@
 
 	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 	MixVideoFormat_MP42 *self = MIX_VIDEOFORMAT_MP42(mix);
-	MixInputBufferEntry *bufentry = NULL;
 
 	LOG_V("Begin\n");
 
 	g_mutex_lock(mix->objectlock);
 
-	mix_videoformat_mp42_flush_packed_stream_queue(self->packed_stream_queue);
-
-	/*
-	 * Clear the contents of inputbufqueue
-	 */
-	while (!g_queue_is_empty(mix->inputbufqueue)) {
-		bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
-		if (bufentry == NULL) {
-			continue;
-		}
-
-		mix_buffer_unref(bufentry->buf);
-		g_free(bufentry);
-	}
+    // drop any decode-pending picture, and ignore return value
+     mix_videofmt_mp42_decode_end(mix, TRUE);
 
 	/*
 	 * Clear parse_in_progress flag and current timestamp
@@ -1150,16 +1282,21 @@
 	mix->parse_in_progress = FALSE;
 	mix->discontinuity_frame_in_progress = FALSE;
 	mix->current_timestamp = (guint64)-1;
+	self->next_nvop_for_PB_frame = FALSE;
 
-	{
-		gint idx = 0;
-		for (idx = 0; idx < 2; idx++) {
-			if (self->reference_frames[idx] != NULL) {
-				mix_videoframe_unref(self->reference_frames[idx]);
-				self->reference_frames[idx] = NULL;
-			}
+	gint idx = 0;
+	for (idx = 0; idx < 2; idx++) {
+		if (self->reference_frames[idx] != NULL) {
+			mix_videoframe_unref(self->reference_frames[idx]);
+			self->reference_frames[idx] = NULL;
 		}
 	}
+	if (self->last_frame)
+	{
+	    mix_videoframe_unref(self->last_frame);
+	    self->last_frame = NULL;
+	}
+	
 
 	/* Call parser flush */
 	vbp_flush(mix->parser_handle);
@@ -1174,8 +1311,6 @@
 MIX_RESULT mix_videofmt_mp42_eos(MixVideoFormat *mix) {
 
 	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	vbp_data_mp42 *data = NULL;
-	uint32 vbp_ret = 0;
 
 	LOG_V("Begin\n");
 
@@ -1189,29 +1324,10 @@
 
 	g_mutex_lock(mix->objectlock);
 
-	/* if a frame is in progress, process the frame */
-	if (mix->parse_in_progress) {
-		/* query for data */
-		vbp_ret = vbp_query(mix->parser_handle, (void *) &data);
-		LOG_V("vbp_query() returns 0x%x\n", vbp_ret);
-
-		if ((vbp_ret != VBP_OK) || (data == NULL)) {
-			ret = MIX_RESULT_FAIL;
-			LOG_E("vbp_ret != VBP_OK || data == NULL\n");
-			goto cleanup;
-		}
-
-		/* process and decode data */
-		ret = mix_videofmt_mp42_process_decode(mix, data,
-				mix->current_timestamp, mix->discontinuity_frame_in_progress);
-		mix->parse_in_progress = FALSE;
-
-	}
-
+    mix_videofmt_mp42_decode_end(mix, FALSE);
+	
 	ret = mix_framemanager_eos(mix->framemgr);
 
-	cleanup:
-
 	g_mutex_unlock(mix->objectlock);
 
 	LOG_V("End\n");
@@ -1294,125 +1410,6 @@
 MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix,
 		guint64 timestamp) {
 
-	MixInputBufferEntry *bufentry = NULL;
-	gboolean done = FALSE;
-
-	LOG_V("Begin\n");
-
-	if (mix == NULL) {
-		return MIX_RESULT_NULL_PTR;
-	}
-
-	/* Dequeue and release all input buffers for this frame */
-	LOG_V("Releasing all the MixBuffers for this frame\n");
-
-	/*
-	 * While the head of the queue has timestamp == current ts
-	 * dequeue the entry, unref the MixBuffer, and free the struct
-	 */
-	done = FALSE;
-	while (!done) {
-		bufentry
-				= (MixInputBufferEntry *) g_queue_peek_head(mix->inputbufqueue);
-		if (bufentry == NULL) {
-			break;
-		}
-
-		LOG_V("head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n",
-				(guint)bufentry->buf, timestamp, bufentry->timestamp);
-
-		if (bufentry->timestamp != timestamp) {
-			LOG_V("buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n",
-					(guint)bufentry->buf, timestamp, bufentry->timestamp);
-
-			done = TRUE;
-			break;
-		}
-
-		bufentry = (MixInputBufferEntry *) g_queue_pop_head(mix->inputbufqueue);
-		LOG_V("Unref this MixBuffers %x\n", (guint) bufentry->buf);
-
-		mix_buffer_unref(bufentry->buf);
-		g_free(bufentry);
-	}
-
-	LOG_V("End\n");
-
+    // not used, to be removed
 	return MIX_RESULT_SUCCESS;
 }
-
-vbp_picture_data_mp42 *mix_videoformat_mp42_clone_picture_data(
-		vbp_picture_data_mp42 *picture_data) {
-
-	gboolean succ = FALSE;
-
-	if (!picture_data) {
-		return NULL;
-	}
-
-	if (picture_data->number_slices == 0) {
-		return NULL;
-	}
-
-	vbp_picture_data_mp42 *cloned_picture_data = g_try_new0(
-			vbp_picture_data_mp42, 1);
-	if (cloned_picture_data == NULL) {
-		goto cleanup;
-	}
-
-	memcpy(cloned_picture_data, picture_data, sizeof(vbp_picture_data_mp42));
-
-	cloned_picture_data->number_slices = picture_data->number_slices;
-	cloned_picture_data->slice_data = g_try_new0(vbp_slice_data_mp42,
-			picture_data->number_slices);
-	if (cloned_picture_data->slice_data == NULL) {
-		goto cleanup;
-	}
-
-	memcpy(cloned_picture_data->slice_data, picture_data->slice_data,
-			sizeof(vbp_slice_data_mp42) * (picture_data->number_slices));
-
-	succ = TRUE;
-
-	cleanup:
-
-	if (!succ) {
-		mix_videoformat_mp42_free_picture_data(cloned_picture_data);
-		return NULL;
-	}
-
-	return cloned_picture_data;
-}
-
-void mix_videoformat_mp42_free_picture_data(vbp_picture_data_mp42 *picture_data) {
-	if (picture_data) {
-		if (picture_data->slice_data) {
-			g_free(picture_data->slice_data);
-		}
-		g_free(picture_data);
-	}
-}
-
-void mix_videoformat_mp42_flush_packed_stream_queue(GQueue *packed_stream_queue) {
-
-	PackedStream *packed_stream = NULL;
-
-	if (packed_stream_queue == NULL) {
-		return;
-	}
-	while (!g_queue_is_empty(packed_stream_queue)) {
-		packed_stream = (PackedStream *) g_queue_pop_head(packed_stream_queue);
-		if (packed_stream == NULL) {
-			continue;
-		}
-
-		if (packed_stream->picture_data) {
-			mix_videoformat_mp42_free_picture_data(packed_stream->picture_data);
-		}
-
-		if (packed_stream->mix_buffer) {
-			mix_buffer_unref(packed_stream->mix_buffer);
-		}
-		g_free(packed_stream);
-	}
-}
diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h
index 49a1299..fc80c95 100644
--- a/mix_video/src/mixvideoformat_mp42.h
+++ b/mix_video/src/mixvideoformat_mp42.h
@@ -40,8 +40,13 @@
 	MixVideoFrame * reference_frames[2];
 	MixVideoFrame * last_frame;
 	gint last_vop_coding_type;
+	guint last_vop_time_increment;
 
-	GQueue *packed_stream_queue;
+    /* indicate if future n-vop is a placeholder of a packed frame */
+    gboolean next_nvop_for_PB_frame;
+    
+    /* indicate if iq_matrix_buffer is sent to driver */
+    gboolean iq_matrix_buf_sent;
 };
 
 /**
diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c
index bf4d1f4..9f21a5a 100644
--- a/mix_video/src/mixvideoformat_vc1.c
+++ b/mix_video/src/mixvideoformat_vc1.c
@@ -178,7 +178,7 @@
 	guint height = 0;
 
 	guint i = 0;
-	guchar* p = header->data;
+	guchar* p = NULL;
 	MIX_RESULT res = MIX_RESULT_SUCCESS;
 
 	if (!config_params || !header)
@@ -187,6 +187,8 @@
 		return (MIX_RESULT_NULL_PTR);
 	}
 
+    p = header->data;
+
 	res = mix_videoconfigparamsdec_get_picture_res(
 		config_params,
 		&width,
@@ -240,6 +242,57 @@
 }
 
 
+MIX_RESULT mix_videofmt_vc1_update_config_params(
+    MixVideoFormat *mix,
+    vbp_data_vc1 *data)
+{
+    MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
+
+    if (parent->picture_width == 0 || parent->picture_height == 0)
+    {
+        parent->picture_width = data->se_data->CODED_WIDTH;
+        parent->picture_height = data->se_data->CODED_HEIGHT;
+
+        mix_videoconfigparamsdec_set_picture_res(
+            mix->config_params, 
+            parent->picture_width, 
+            parent->picture_height);        
+    }
+    
+
+    // scaling has been performed on the decoded image.
+    mix_videoconfigparamsdec_set_video_range(mix->config_params, 1);
+
+    uint8 color_matrix;
+    
+    switch (data->se_data->MATRIX_COEF)
+    {
+        case 1:
+            color_matrix = VA_SRC_BT709;
+            break;
+
+        // ITU-R BT.1700, ITU-R BT.601-5, and SMPTE 293M-1996.
+        case 6:
+            color_matrix = VA_SRC_BT601;
+            break;
+
+        default:
+            // unknown color matrix, set to 0 so color space flag will not be set.
+            color_matrix = 0;
+            break;        
+    }   
+    mix_videoconfigparamsdec_set_color_matrix(mix->config_params, color_matrix);
+
+    mix_videoconfigparamsdec_set_pixel_aspect_ratio(
+        mix->config_params,
+        data->se_data->ASPECT_HORIZ_SIZE,
+        data->se_data->ASPECT_VERT_SIZE);
+
+    return MIX_RESULT_SUCCESS;
+    
+}
+
+
 
 MIX_RESULT mix_videofmt_vc1_initialize(MixVideoFormat *mix, 
                 MixVideoConfigParamsDec * config_params,
@@ -368,6 +421,8 @@
 
 	LOG_V( "Queried parser for header data\n");
 
+    mix_videofmt_vc1_update_config_params(parent, data);
+
         //Time for libva initialization
 
         vadisplay = parent->va_display;
diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c
index f39f77f..f35fb32 100644
--- a/mix_video/src/mixvideoformatenc.c
+++ b/mix_video/src/mixvideoformatenc.c
@@ -30,8 +30,8 @@
 static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix);
 static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default(
 	MixVideoFormatEnc *mix, guint *max_size);
-MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params_enc, 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params_enc,
 	MixEncParamsType params_type);
 
 
@@ -57,28 +57,52 @@
 	self->va_config = 0;
 	self->mime_type = NULL;
 	self->frame_rate_num= 0;
-	self->frame_rate_denom = 1;	
+	self->frame_rate_denom = 1;
 	self->picture_width = 0;
 	self->picture_height = 0;
+
+	/*
+	* bitrate control
+	*/
 	self->initial_qp = 0;
 	self->min_qp = 0;
-	self->intra_period = 0;
+	self->target_percentage = 95;
+	self->window_size = 500;
 	self->bitrate = 0;
+
+	self->intra_period = 0;
 	self->share_buf_mode = FALSE;
 	self->ci_frame_id = NULL;
 	self->ci_frame_num = 0;
        self->drawable = 0x0;
-       self->need_display = TRUE;	   
+       self->need_display = TRUE;
 
       self->va_rcmode = VA_RC_NONE;
       self->va_format = VA_RT_FORMAT_YUV420;
       self->va_entrypoint = VAEntrypointEncSlice;
-      self->va_profile = VAProfileH264Baseline;	   
+      self->va_profile = VAProfileH264Baseline;
       self->level = 30;
+
+      self->refresh_type = MIX_VIDEO_NONIR;
       self->CIR_frame_cnt = 15; //default value
+
+      /*
+	  * Parameters for AIR intra refresh mode
+	  */
+      self->air_params.air_MBs = 0;
+      self->air_params.air_threshold = 0;
+      self->air_params.air_auto = 0;
+
+      self->max_slice_size = 0;
+
       self->force_key_frame = FALSE;
       self->new_header_required = FALSE;
-	
+      self->render_mss_required = FALSE;
+      self->render_QP_required = FALSE;
+      self->render_AIR_required = FALSE;
+      self->render_framerate_required = FALSE;
+      self->render_bitrate_required = FALSE;
+
 	//add more properties here
 }
 
@@ -112,13 +136,13 @@
 	/* clean up here. */
 
     if (obj == NULL) {
-        LOG_E( "obj == NULL\n");				
-        return;	
+        LOG_E( "obj == NULL\n");
+        return;
     }
-	
-    MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj); 
-    
-    LOG_V( "\n");		
+
+    MixVideoFormatEnc *mix = MIX_VIDEOFORMATENC(obj);
+
+    LOG_V( "\n");
 
     if(mix->objectlock) {
         g_mutex_free(mix->objectlock);
@@ -128,9 +152,9 @@
 	//MiVideo object calls the _deinitialize() for frame manager
 	if (mix->framemgr)
 	{
-	  mix_framemanager_unref(mix->framemgr);  
+	  mix_framemanager_unref(mix->framemgr);
 	  mix->framemgr = NULL;
-	}	
+	}
 
 	if (mix->mime_type)
     {
@@ -139,10 +163,10 @@
         else
             g_string_free(mix->mime_type, FALSE);
     }
-    
+
 	if (mix->ci_frame_id)
         g_free (mix->ci_frame_id);
-	
+
 
 	if (mix->surfacepool)
 	{
@@ -168,7 +192,7 @@
 /* Default vmethods implementation */
 static MIX_RESULT mix_videofmtenc_getcaps_default(MixVideoFormatEnc *mix,
         GString *msg) {
-    LOG_V( "Begin\n");	
+    LOG_V( "Begin\n");
     return MIX_RESULT_SUCCESS;
 }
 
@@ -178,16 +202,16 @@
         MixBufferPool * input_buf_pool,
         MixSurfacePool ** surface_pool,
         VADisplay va_display) {
-    
-    LOG_V( "Begin\n");	
-	
+
+    LOG_V( "Begin\n");
+
     if (mix == NULL ||config_params_enc == NULL) {
-        LOG_E( 
-                "!mix || config_params_enc == NULL\n");				
+        LOG_E(
+                "!mix || config_params_enc == NULL\n");
         return MIX_RESULT_NULL_PTR;
     }
-	
-    
+
+
     MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
 	//TODO check return values of getter fns for config_params
@@ -195,233 +219,298 @@
 	g_mutex_lock(mix->objectlock);
 
 	mix->framemgr = frame_mgr;
-	mix_framemanager_ref(mix->framemgr);	
+	mix_framemanager_ref(mix->framemgr);
 
 	mix->va_display = va_display;
-	
-    LOG_V( 
+
+    LOG_V(
             "Start to get properities from parent params\n");
-    
+
     /* get properties from param (parent) Object*/
-    ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, 
+    ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc,
             &(mix->bitrate));
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_bps\n");			            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_bps\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
     }
-    
+
     ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc,
             &(mix->frame_rate_num), &(mix->frame_rate_denom));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_frame_rate\n");            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_frame_rate\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
-    
+    }
+
     ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc,
             &(mix->initial_qp));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
-        
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_init_qp\n");               
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_init_qp\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
-    
-    
+    }
+
+
     ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc,
             &(mix->min_qp));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_min_qp\n");             
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_min_qp\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }				  
-    
+    }
+
+    ret = mix_videoconfigparamsenc_get_target_percentage(config_params_enc,
+            &(mix->target_percentage));
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_target_percentage\n");
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }
+
+    ret = mix_videoconfigparamsenc_get_window_size (config_params_enc,
+            &(mix->window_size));
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_window_size\n");
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }
+
     ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc,
             &(mix->intra_period));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
-        
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_intra_period\n");               
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_intra_period\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }				  
-    
+    }
+
     ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc,
             &(mix->picture_width), &(mix->picture_height));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_picture_res\n");              
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_picture_res\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }	
-    
+    }
+
     ret = mix_videoconfigparamsenc_get_share_buf_mode (config_params_enc,
             &(mix->share_buf_mode));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n");                
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_share_buf_mode\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
-    
-    
+    }
+
+
     ret = mix_videoconfigparamsenc_get_ci_frame_info (config_params_enc,
             &(mix->ci_frame_id),  &(mix->ci_frame_num));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_ci_frame_info\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }			
-    
-    
+    }
+
+
     ret = mix_videoconfigparamsenc_get_drawable (config_params_enc,
             &(mix->drawable));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_drawable\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_drawable\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }	
+    }
 
     ret = mix_videoconfigparamsenc_get_need_display (config_params_enc,
             &(mix->need_display));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_drawable\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_drawable\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
+    }
 
     ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc,
             &(mix->va_rcmode));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_rc_mode\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_rc_mode\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
+    }
 
     ret = mix_videoconfigparamsenc_get_raw_format (config_params_enc,
             &(mix->va_format));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_format\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_format\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
+    }
 
     ret = mix_videoconfigparamsenc_get_profile (config_params_enc,
             (MixProfile *) &(mix->va_profile));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_profile\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_profile\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }			
+    }
 
     ret = mix_videoconfigparamsenc_get_level (config_params_enc,
             &(mix->level));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_level\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_level\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }			
+    }
 
-    ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, 
+    ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc,
             &(mix->CIR_frame_cnt));
-    
+
     if (ret != MIX_RESULT_SUCCESS) {
         //TODO cleanup
 
-        LOG_E( 
-                "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");                            
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
-    }		
+    }
 
-    
-    LOG_V( 
+
+    ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc,
+            &(mix->max_slice_size));
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_max_slice_size\n");
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }
+
+
+    ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc,
+            &(mix->refresh_type));
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_refresh_type\n");
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }
+
+    ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc,
+            &(mix->air_params));
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_get_AIR_params\n");
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }
+
+    LOG_V(
             "======Video Encode Parent Object properities======:\n");
-    
-    LOG_I( "mix->bitrate = %d\n", 
+
+    LOG_I( "mix->bitrate = %d\n",
             mix->bitrate);
-    LOG_I( "mix->frame_rate = %d\n", 
-            mix->frame_rate_denom / mix->frame_rate_denom);		
-    LOG_I( "mix->initial_qp = %d\n", 
-            mix->initial_qp);		
-    LOG_I( "mix->min_qp = %d\n", 
-            mix->min_qp);		
-    LOG_I( "mix->intra_period = %d\n", 
-            mix->intra_period);		
-    LOG_I( "mix->picture_width = %d\n", 
-            mix->picture_width);		
-    LOG_I( "mix->picture_height = %d\n", 
-            mix->picture_height);	
-    LOG_I( "mix->share_buf_mode = %d\n", 
-            mix->share_buf_mode);		
-    LOG_I( "mix->ci_frame_id = 0x%08x\n", 
-            mix->ci_frame_id);		
-    LOG_I( "mix->ci_frame_num = %d\n", 
-            mix->ci_frame_num);	
-    LOG_I( "mix->drawable = 0x%08x\n", 
-            mix->drawable);	
-    LOG_I( "mix->need_display = %d\n", 
-            mix->need_display);	
-    LOG_I( "mix->va_format = %d\n", 
-            mix->va_format);	
-    LOG_I( "mix->va_profile = %d\n", 
-            mix->va_profile);	
-    LOG_I( "mix->va_rcmode = %d\n\n", 
-            mix->va_rcmode);		
-    
+    LOG_I( "mix->frame_rate = %d\n",
+            mix->frame_rate_denom / mix->frame_rate_denom);
+    LOG_I( "mix->initial_qp = %d\n",
+            mix->initial_qp);
+    LOG_I( "mix->min_qp = %d\n",
+            mix->min_qp);
+    LOG_I( "mix->intra_period = %d\n",
+            mix->intra_period);
+    LOG_I( "mix->picture_width = %d\n",
+            mix->picture_width);
+    LOG_I( "mix->picture_height = %d\n",
+            mix->picture_height);
+    LOG_I( "mix->share_buf_mode = %d\n",
+            mix->share_buf_mode);
+    LOG_I( "mix->ci_frame_id = 0x%08x\n",
+            mix->ci_frame_id);
+    LOG_I( "mix->ci_frame_num = %d\n",
+            mix->ci_frame_num);
+    LOG_I( "mix->drawable = 0x%08x\n",
+            mix->drawable);
+    LOG_I( "mix->need_display = %d\n",
+            mix->need_display);
+    LOG_I( "mix->va_format = %d\n",
+            mix->va_format);
+    LOG_I( "mix->va_profile = %d\n",
+            mix->va_profile);
+    LOG_I( "mix->va_rcmode = %d\n\n",
+            mix->va_rcmode);
+    LOG_I( "mix->CIR_frame_cnt = %d\n\n",
+            mix->CIR_frame_cnt);
+    LOG_I( "mix->max_slice_size = %d\n\n",
+            mix->max_slice_size);
+
     g_mutex_unlock(mix->objectlock);
-    
-    LOG_V( "end\n");	
-    
+
+    LOG_V( "end\n");
+
     return MIX_RESULT_SUCCESS;
 }
 
@@ -450,24 +539,24 @@
 	MixVideoFormatEnc *mix, guint *max_size) {
 
 
-	return MIX_RESULT_SUCCESS;	
+	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params_enc, 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params_enc,
 	MixEncParamsType params_type) {
 
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;	
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
 	if (mix == NULL ||config_params_enc == NULL) {
-		LOG_E( 
-			"!mix || config_params_enc == NULL\n");				
+		LOG_E(
+			"!mix || config_params_enc == NULL\n");
 		return MIX_RESULT_NULL_PTR;
 	}
 
 
 	MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
-	
+
 
 	g_mutex_lock(mix->objectlock);
 
@@ -480,38 +569,119 @@
 			if (ret != MIX_RESULT_SUCCESS) {
 				//TODO cleanup
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_bit_rate\n");                            
+					"Failed to mix_videoconfigparamsenc_get_bit_rate\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}	
+			}
 
-			mix->new_header_required = TRUE;
+			mix->render_bitrate_required = TRUE;
 		}
 			break;
-		case MIX_ENC_PARAMS_SLICE_SIZE:
+
+		case MIX_ENC_PARAMS_INIT_QP:
+		{
+			ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_init_qp\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_bitrate_required = TRUE;
+		}
+			break;
+
+		case MIX_ENC_PARAMS_MIN_QP:
+		{
+			ret = mix_videoconfigparamsenc_get_min_qp (config_params_enc, &(mix->min_qp));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_min_qp\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_bitrate_required = TRUE;
+		}
+			break;
+
+		case MIX_ENC_PARAMS_WINDOW_SIZE:
+		{
+			ret = mix_videoconfigparamsenc_get_window_size (config_params_enc, &(mix->window_size));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to MIX_ENC_PARAMS_WINDOW_SIZE\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_bitrate_required = TRUE;
+		}
+			break;
+
+		case MIX_ENC_PARAMS_TARGET_PERCENTAGE:
+		{
+			ret = mix_videoconfigparamsenc_get_target_percentage (config_params_enc, &(mix->target_percentage));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to MIX_ENC_PARAMS_TARGET_PERCENTAGE\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_bitrate_required = TRUE;
+		}
+			break;
+
+		case MIX_ENC_PARAMS_MTU_SLICE_SIZE:
+		{
+			ret = mix_videoconfigparamsenc_get_max_slice_size(config_params_enc, &(mix->max_slice_size));
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_get_max_slice_size\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_mss_required = TRUE;
+
+		}
+
+		case MIX_ENC_PARAMS_SLICE_NUM:
 		{
 			/*
 			* This type of dynamic control will be handled in H.264 override method
 			*/
 		}
 			break;
-			
-		case MIX_ENC_PARAMS_RC_MODE:	
+
+		case MIX_ENC_PARAMS_RC_MODE:
 		{
 			ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode));
 			if (ret != MIX_RESULT_SUCCESS) {
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_rate_control\n");                            
+					"Failed to mix_videoconfigparamsenc_get_rate_control\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}	
+			}
 
-			mix->new_header_required = TRUE;					
+			/*
+			* We only can change the RC mode to re-start encoding session
+			*/
+
 		}
 			break;
-			
+
 		case MIX_ENC_PARAMS_RESOLUTION:
 		{
 
@@ -520,12 +690,12 @@
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_picture_res\n");                            
+					"Failed to mix_videoconfigparamsenc_get_picture_res\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}	
+			}
 
-			mix->new_header_required = TRUE;			
+			mix->new_header_required = TRUE;
 		}
 			break;
 		case MIX_ENC_PARAMS_GOP_SIZE:
@@ -536,12 +706,12 @@
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_intra_period\n");                            
+					"Failed to mix_videoconfigparamsenc_get_intra_period\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}				
+			}
 
-			mix->new_header_required = TRUE;						
+			mix->new_header_required = TRUE;
 
 		}
 			break;
@@ -552,35 +722,52 @@
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_frame_rate\n");                            
+					"Failed to mix_videoconfigparamsenc_get_frame_rate\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}				
-			
-			mix->new_header_required = TRUE;			
+			}
+
+			mix->render_framerate_required = TRUE;
 		}
 			break;
+
 		case MIX_ENC_PARAMS_FORCE_KEY_FRAME:
 		{
-			mix->new_header_required = TRUE;			
-			
+			mix->new_header_required = TRUE;
+
 		}
 			break;
-		case MIX_ENC_PARAMS_QP:
+
+		case MIX_ENC_PARAMS_REFRESH_TYPE:
 		{
-			ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp));
+			ret = mix_videoconfigparamsenc_get_refresh_type(config_params_enc, &(mix->refresh_type));
 			if (ret != MIX_RESULT_SUCCESS) {
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_init_qp\n");                            
+					"Failed to mix_videoconfigparamsenc_get_refresh_type\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}			
-
-			mix->new_header_required = TRUE;			
+			}
 		}
 			break;
+
+		case MIX_ENC_PARAMS_AIR:
+		{
+			ret = mix_videoconfigparamsenc_get_AIR_params(config_params_enc, &(mix->air_params));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_AIR_params\n");
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}
+
+			mix->render_AIR_required = TRUE;
+		}
+			break;
+
 		case MIX_ENC_PARAMS_CIR_FRAME_CNT:
 		{
 			ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt));
@@ -588,29 +775,29 @@
 				//TODO cleanup
 
 				LOG_E(
-					"Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");                            
+					"Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");
 				g_mutex_unlock(mix->objectlock);
 				return MIX_RESULT_FAIL;
-			}			
+			}
 		}
 			break;
-			
+
 		default:
-			break;
-	}
+            break;
+    }
 
- 	g_mutex_unlock(mix->objectlock);	
+    g_mutex_unlock(mix->objectlock);
 
-    	return MIX_RESULT_SUCCESS;	
+    return MIX_RESULT_SUCCESS;
 }
 
 /* mixvideoformatenc class methods implementation */
 
 MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) {
     MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
-    
-    LOG_V( "Begin\n");	
-    
+
+    LOG_V( "Begin\n");
+
     if (klass->getcaps) {
         return klass->getcaps(mix, msg);
     }
@@ -624,26 +811,26 @@
         MixSurfacePool ** surface_pool,
         VADisplay va_display) {
     MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
-    
+
     /*frame_mgr and input_buf_pool is reserved for future use*/
 	if (klass->initialize) {
         return klass->initialize(mix, config_params_enc, frame_mgr,
                 input_buf_pool, surface_pool, va_display);
     }
-    
+
     return MIX_RESULT_FAIL;
-    
+
 }
 
 MIX_RESULT mix_videofmtenc_encode(MixVideoFormatEnc *mix, MixBuffer * bufin[],
         gint bufincnt, MixIOVec * iovout[], gint iovoutcnt,
         MixVideoEncodeParams * encode_params) {
-    
+
     MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
     if (klass->encode) {
         return klass->encode(mix, bufin, bufincnt, iovout, iovoutcnt, encode_params);
     }
-    
+
     return MIX_RESULT_FAIL;
 }
 
@@ -652,7 +839,7 @@
     if (klass->flush) {
         return klass->flush(mix);
     }
-    
+
     return MIX_RESULT_FAIL;
 }
 
@@ -661,7 +848,7 @@
     if (klass->eos) {
         return klass->eos(mix);
     }
-    
+
     return MIX_RESULT_FAIL;
 }
 
@@ -670,28 +857,28 @@
     if (klass->deinitialize) {
         return klass->deinitialize(mix);
     }
-    
+
     return MIX_RESULT_FAIL;
 }
 
 MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint * max_size) {
-    
+
     MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
     if (klass->encode) {
         return klass->getmaxencodedbufsize(mix, max_size);
     }
-    
+
     return MIX_RESULT_FAIL;
 }
 
-MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params_enc, 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params_enc,
 	MixEncParamsType params_type) {
 
-    MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);	
+    MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
     if (klass->set_dynamic_config) {
         return klass->set_dynamic_config(mix, config_params_enc, params_type);
     }
-    
-    return MIX_RESULT_FAIL;	
+
+    return MIX_RESULT_FAIL;
 }
diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h
index b66cc6a..3b208b4 100644
--- a/mix_video/src/mixvideoformatenc.h
+++ b/mix_video/src/mixvideoformatenc.h
@@ -54,8 +54,8 @@
 typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix);
 typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix);
 typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size);
-typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params, 
+typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params,
 	MixEncParamsType params_type);
 
 struct _MixVideoFormatEnc {
@@ -73,35 +73,54 @@
     VAContextID va_context;
     VAConfigID va_config;
     GString *mime_type;
-    
+
     guint frame_rate_num;
     guint frame_rate_denom;
     guint picture_width;
     guint picture_height;
-    
+
+    guint intra_period;
+
+    /*
+    * Following is for bitrate control
+    */
     guint initial_qp;
     guint min_qp;
-    guint intra_period;
     guint bitrate;
-    
-    gboolean share_buf_mode;	
+    guint target_percentage;
+    guint window_size;
+
+    gboolean share_buf_mode;
     gulong *	ci_frame_id;
-    guint	ci_frame_num;	
+    guint	ci_frame_num;
 
     gboolean force_key_frame;
     gboolean new_header_required;
-    guint 	CIR_frame_cnt;	
-    
+
+    MixVideoIntraRefreshType refresh_type;
+
+    guint 	CIR_frame_cnt;
+
+    MixAIRParams air_params;
+
+    guint	max_slice_size;
+
+    gboolean render_mss_required;
+    gboolean render_QP_required;
+    gboolean render_AIR_required;
+    gboolean render_framerate_required;
+    gboolean render_bitrate_required;
+
     gulong    drawable;
-    gboolean need_display;	
+    gboolean need_display;
 
     VAProfile va_profile;
     VAEntrypoint va_entrypoint;
     guint va_format;
-    guint va_rcmode; 	
-    guint8 level;	
-	
-    
+    guint va_rcmode;
+    guint8 level;
+
+
     MixBufferPool *inputbufpool;
     GQueue *inputbufqueue;
 };
@@ -124,7 +143,7 @@
 	MixVideoFmtEncFlushFunc flush;
 	MixVideoFmtEncEndOfStreamFunc eos;
 	MixVideoFmtEncDeinitializeFunc deinitialize;
-	MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize;	
+	MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize;
 	MixVideoFmtEncSetDynamicEncConfigFunc set_dynamic_config;
 };
 
@@ -166,7 +185,7 @@
 /* TODO: change method parameter list */
 MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg);
 
-MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix, 
+MIX_RESULT mix_videofmtenc_initialize(MixVideoFormatEnc *mix,
         MixVideoConfigParamsEnc * enc_config_params,
         MixFrameManager * frame_mgr,
         MixBufferPool * input_buf_pool,
@@ -183,11 +202,11 @@
 
 MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix);
 
-MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, 
+MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix,
 	guint *max_size);
 
-MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params, 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params,
 	MixEncParamsType params_type);
 
 G_END_DECLS
diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c
index db532e4..ac45be8 100644
--- a/mix_video/src/mixvideoformatenc_h264.c
+++ b/mix_video/src/mixvideoformatenc_h264.c
@@ -40,6 +40,7 @@
 
     /* member initialization */
     self->encoded_frames = 0;
+    self->frame_num = 0;
     self->pic_skipped = FALSE;
     self->is_intra = TRUE;
     self->cur_frame = NULL;
@@ -227,6 +228,24 @@
         goto cleanup;
     }
 
+    ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264,
+            &self->I_slice_num);
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n");
+        goto cleanup;
+    }
+
+    ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264,
+            &self->P_slice_num);
+
+    if (ret != MIX_RESULT_SUCCESS) {
+        LOG_E(
+                "Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n");
+        goto cleanup;
+    }
+
     ret = mix_videoconfigparamsenc_h264_get_delimiter_type (config_params_enc_h264,
             &self->delimiter_type);
 
@@ -254,6 +273,10 @@
             self->disable_deblocking_filter_idc);
     LOG_I( "self->slice_num = %d\n",
             self->slice_num);
+    LOG_I( "self->I_slice_num = %d\n",
+            self->I_slice_num);
+    LOG_I( "self->P_slice_num = %d\n",
+            self->P_slice_num);
     LOG_I ("self->delimiter_type = %d\n",
             self->delimiter_type);
     LOG_I ("self->idr_interval = %d\n",
@@ -423,6 +446,19 @@
         goto cleanup;
     }
 
+
+    if (parent->va_rcmode == VA_RC_VCM) {
+
+	/*
+	* Following three features are only enabled in VCM mode
+	*/
+	parent->render_mss_required = TRUE;
+	parent->render_AIR_required = TRUE;
+	parent->render_bitrate_required = TRUE;
+	self->slice_num = (parent->picture_height + 15) / 16; //if we are in VCM, we will set slice num to max value
+    }
+
+
     /*TODO: compute the surface number*/
     int numSurfaces;
 
@@ -802,6 +838,7 @@
 #endif
     /*reset the properities*/
     self->encoded_frames = 0;
+    self->frame_num = 0;
     self->pic_skipped = FALSE;
     self->is_intra = TRUE;
 
@@ -1103,6 +1140,8 @@
     guint slice_height;
     guint slice_index;
     guint slice_height_in_mb;
+    guint max_slice_num;
+    guint min_slice_num;
 
     if (mix == NULL) {
         LOG_E("mix == NULL\n");
@@ -1119,7 +1158,26 @@
 
     parent = MIX_VIDEOFORMATENC(&(mix->parent));
 
-    slice_num = mix->slice_num;
+    max_slice_num = (parent->picture_height + 15) / 16;
+    min_slice_num = 1;
+
+    if (mix->is_intra) {
+        slice_num = mix->I_slice_num;
+    }
+    else {
+        slice_num = mix->P_slice_num;
+    }
+
+    if (slice_num < min_slice_num) {
+        LOG_W ("Slice Number is too small");
+        slice_num = min_slice_num;
+    }
+
+    if (slice_num > max_slice_num) {
+        LOG_W ("Slice Number is too big");
+        slice_num = max_slice_num;
+    }
+
     slice_height = parent->picture_height / slice_num;
 
     slice_height += 15;
@@ -1127,18 +1185,6 @@
 
     slice_num = mix->slice_num = (parent->picture_height + 15) / slice_height;
 
-#if 0
-    if (!mix->is_intra){
-	slice_num = 9;
-
-	slice_height = parent->picture_height / slice_num;
-
-	slice_height += 15;
-	slice_height &= (~15);
-
-    }
-#endif
-
 #if 1
     va_status = vaCreateBuffer (parent->va_display, parent->va_context,
             VAEncSliceParameterBufferType,
@@ -1285,13 +1331,20 @@
 
     LOG_I( "encoded_frames = %d\n",
             mix->encoded_frames);
+    LOG_I( "frame_num = %d\n",
+            mix->frame_num);
     LOG_I( "is_intra = %d\n",
             mix->is_intra);
     LOG_I( "ci_frame_id = 0x%08x\n",
             (guint) parent->ci_frame_id);
 
+    if (parent->new_header_required) {
+        mix->frame_num = 0;
+    }
+
     /* determine the picture type*/
-    if ((mix->encoded_frames % parent->intra_period) == 0) {
+    //if ((mix->encoded_frames % parent->intra_period) == 0) {
+    if ((mix->frame_num % parent->intra_period) == 0) {
         mix->is_intra = TRUE;
     } else {
         mix->is_intra = FALSE;
@@ -1648,6 +1701,7 @@
 
     if (mix->encoded_frames == 0) {
         mix->encoded_frames ++;
+        mix->frame_num ++;
         mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index];
         mix->coded_buf_index ++;
         mix->coded_buf_index %=2;
@@ -1656,7 +1710,8 @@
 
 
         /* determine the picture type*/
-        if ((mix->encoded_frames % parent->intra_period) == 0) {
+        //if ((mix->encoded_frames % parent->intra_period) == 0) {
+        if ((mix->frame_num % parent->intra_period) == 0) {
             mix->is_intra = TRUE;
         } else {
             mix->is_intra = FALSE;
@@ -1697,6 +1752,8 @@
     int num_seg = 0;
     guint total_size = 0;
     guint size = 0;
+    guint status = 0;
+    gboolean slice_size_overflow = FALSE;
 
     coded_seg = (VACodedBufferSegment *)buf;
     num_seg = 1;
@@ -1704,6 +1761,13 @@
     while (1) {
         total_size += coded_seg->size;
 
+        status = coded_seg->status;
+
+        if (!slice_size_overflow) {
+
+		slice_size_overflow = status & VA_CODED_BUF_STATUS_SLICE_OVERFLOW_MASK;
+        }
+
         if (coded_seg->next == NULL)
             break;
 
@@ -1887,10 +1951,10 @@
 
     }
 
-    VASurfaceStatus status;
+    VASurfaceStatus va_surface_status;
 
     /*query the status of current surface*/
-    va_status = vaQuerySurfaceStatus(va_display, surface,  &status);
+    va_status = vaQuerySurfaceStatus(va_display, surface,  &va_surface_status);
     if (va_status != VA_STATUS_SUCCESS)
     {
         LOG_E(
@@ -1898,7 +1962,7 @@
         ret = MIX_RESULT_FAIL;
         goto cleanup;
     }
-    mix->pic_skipped = status & VASurfaceSkipped;
+    mix->pic_skipped = va_surface_status & VASurfaceSkipped;
 
     if (parent->need_display) {
 		ret = mix_videoframe_set_sync_flag(mix->cur_frame, TRUE);
@@ -1933,6 +1997,7 @@
 #endif
 
     mix->encoded_frames ++;
+    mix->frame_num ++;
     mix->last_coded_buf = mix->coded_buf[mix->coded_buf_index];
     mix->coded_buf_index ++;
     mix->coded_buf_index %=2;
@@ -1965,6 +2030,14 @@
 
     LOG_V( "end\n");
 
+    /*
+    * The error level of MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW
+    * is lower than other errors, so if any other errors happen, we won't
+    * return slice size overflow
+    */
+    if (ret == MIX_RESULT_SUCCESS && slice_size_overflow)
+        ret = MIX_RESULT_VIDEO_ENC_SLICESIZE_OVERFLOW;
+
     return ret;
 }
 
@@ -2170,7 +2243,8 @@
     if (!MIX_IS_VIDEOFORMATENC_H264(mix))
         return MIX_RESULT_INVALID_PARAM;
 
-    if (mix->encoded_frames == 0 || parent->new_header_required) {
+    //if (mix->encoded_frames == 0 || parent->new_header_required) {
+    if (mix->frame_num == 0 || parent->new_header_required) {
         ret = mix_videofmtenc_h264_send_seq_params (mix);
         if (ret != MIX_RESULT_SUCCESS)
         {
@@ -2182,6 +2256,58 @@
 	 parent->new_header_required = FALSE; //Set to require new header filed to FALSE
     }
 
+    if (parent->render_mss_required && parent->max_slice_size != 0) {
+        ret = mix_videofmtenc_h264_send_max_slice_size(mix);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_E(
+                    "Failed mix_videofmtenc_h264_send_max_slice_size\n");
+            return MIX_RESULT_FAIL;
+        }
+
+        parent->render_mss_required = FALSE;
+    }
+
+    if (parent->render_bitrate_required) {
+        ret = mix_videofmtenc_h264_send_dynamic_bitrate(mix);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_E(
+                    "Failed mix_videofmtenc_h264_send_dynamic_bitrate\n");
+            return MIX_RESULT_FAIL;
+        }
+
+        parent->render_bitrate_required = FALSE;
+    }
+
+    if (parent->render_AIR_required &&
+       (parent->refresh_type == MIX_VIDEO_AIR || parent->refresh_type == MIX_VIDEO_BOTH))
+    {
+
+        ret = mix_videofmtenc_h264_send_AIR (mix);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_E(
+                    "Failed mix_videofmtenc_h264_send_AIR\n");
+            return MIX_RESULT_FAIL;
+        }
+
+        parent->render_AIR_required = FALSE;
+    }
+
+    if (parent->render_framerate_required) {
+
+        ret = mix_videofmtenc_h264_send_dynamic_framerate (mix);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_E(
+                    "Failed mix_videofmtenc_h264_send_dynamic_framerate\n");
+            return MIX_RESULT_FAIL;
+        }
+
+        parent->render_framerate_required = FALSE;
+    }
+
     ret = mix_videofmtenc_h264_send_picture_parameter (mix);
 
     if (ret != MIX_RESULT_SUCCESS)
@@ -2234,18 +2360,20 @@
 	}
 
 	/*
-	* For case params_type == MIX_ENC_PARAMS_SLICE_SIZE
+	* For case params_type == MIX_ENC_PARAMS_SLICE_NUM
 	* we don't need to chain up to parent method, as we will handle
 	* dynamic slice height change inside this method, and other dynamic
 	* controls will be handled in parent method.
 	*/
-	if (params_type == MIX_ENC_PARAMS_SLICE_SIZE) {
+	if (params_type == MIX_ENC_PARAMS_SLICE_NUM) {
 
 		g_mutex_lock(parent->objectlock);
 
 		ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264,
 			&self->slice_num);
 
+		self->I_slice_num = self->P_slice_num = self->slice_num;
+
 		if (ret != MIX_RESULT_SUCCESS) {
 			LOG_E(
 				"Failed to mix_videoconfigparamsenc_h264_get_slice_num\n");
@@ -2257,6 +2385,44 @@
 
 		g_mutex_unlock(parent->objectlock);
 
+	}
+	else if (params_type == MIX_ENC_PARAMS_I_SLICE_NUM) {
+
+		g_mutex_lock(parent->objectlock);
+
+		ret = mix_videoconfigparamsenc_h264_get_I_slice_num (config_params_enc_h264,
+			&self->I_slice_num);
+
+		if (ret != MIX_RESULT_SUCCESS) {
+			LOG_E(
+				"Failed to mix_videoconfigparamsenc_h264_get_I_slice_num\n");
+
+			g_mutex_unlock(parent->objectlock);
+
+			return ret;
+		}
+
+		g_mutex_unlock(parent->objectlock);
+
+	}
+	else if (params_type == MIX_ENC_PARAMS_P_SLICE_NUM) {
+
+		g_mutex_lock(parent->objectlock);
+
+		ret = mix_videoconfigparamsenc_h264_get_P_slice_num (config_params_enc_h264,
+			&self->P_slice_num);
+
+		if (ret != MIX_RESULT_SUCCESS) {
+			LOG_E(
+				"Failed to mix_videoconfigparamsenc_h264_get_P_slice_num\n");
+
+			g_mutex_unlock(parent->objectlock);
+
+			return ret;
+		}
+
+		g_mutex_unlock(parent->objectlock);
+
 	} else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) {
 
 		g_mutex_lock(parent->objectlock);
@@ -2300,3 +2466,319 @@
 
 }
 
+MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix)
+{
+    VAStatus va_status;
+
+    if (mix == NULL) {
+        LOG_E("mix == NULL\n");
+        return MIX_RESULT_NULL_PTR;
+    }
+
+    LOG_V( "Begin\n\n");
+
+    MixVideoFormatEnc *parent = NULL;
+
+    if (!MIX_IS_VIDEOFORMATENC_H264(mix))
+        return MIX_RESULT_INVALID_PARAM;
+
+    parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+    if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) {
+
+	LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_bitrate\n");
+	return VA_STATUS_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer *  misc_enc_param_buf;
+    VAEncMiscParameterRateControl * bitrate_control_param;
+    VABufferID misc_param_buffer_id;
+
+    va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+            VAEncMiscParameterBufferType,
+            sizeof(VAEncMiscParameterBuffer) + sizeof (VAEncMiscParameterRateControl),
+            1, NULL,
+            &misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    misc_enc_param_buf->type = VAEncMiscParameterTypeRateControl;
+    bitrate_control_param = (VAEncMiscParameterRateControl *)misc_enc_param_buf->data;
+
+    bitrate_control_param->bits_per_second = parent->bitrate;
+    bitrate_control_param->initial_qp = parent->initial_qp;
+    bitrate_control_param->min_qp = parent->min_qp;
+    bitrate_control_param->target_percentage = parent->target_percentage;
+    bitrate_control_param->window_size = parent->window_size;
+
+    va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaUnmapBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+
+    va_status = vaRenderPicture(parent->va_display, parent->va_context,
+            &misc_param_buffer_id, 1);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaRenderPicture\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    return MIX_RESULT_SUCCESS;
+
+}
+
+MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix)
+{
+    VAStatus va_status;
+
+    if (mix == NULL) {
+        LOG_E("mix == NULL\n");
+        return MIX_RESULT_NULL_PTR;
+    }
+
+    LOG_V( "Begin\n\n");
+
+    MixVideoFormatEnc *parent = NULL;
+
+    if (!MIX_IS_VIDEOFORMATENC_H264(mix))
+        return MIX_RESULT_INVALID_PARAM;
+
+    parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+    if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) {
+
+	LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_max_slice_size\n");
+	return VA_STATUS_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer *  misc_enc_param_buf;
+    VAEncMiscParameterMaxSliceSize * max_slice_size_param;
+    VABufferID misc_param_buffer_id;
+
+    va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+            VAEncMiscParameterBufferType,
+            sizeof(VAEncMiscParameterBuffer) + sizeof(VAEncMiscParameterMaxSliceSize),
+            1, NULL,
+            &misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+
+    va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    misc_enc_param_buf->type = VAEncMiscParameterTypeMaxSliceSize;
+    max_slice_size_param = (VAEncMiscParameterMaxSliceSize *)misc_enc_param_buf->data;
+
+    max_slice_size_param->max_slice_size = parent->max_slice_size;
+
+    va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaUnmapBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    LOG_I( "max slice size = %d\n",
+            max_slice_size_param->max_slice_size);
+
+    va_status = vaRenderPicture(parent->va_display, parent->va_context,
+            &misc_param_buffer_id, 1);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaRenderPicture\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix)
+{
+    VAStatus va_status;
+
+    if (mix == NULL) {
+        LOG_E("mix == NULL\n");
+        return MIX_RESULT_NULL_PTR;
+    }
+
+    LOG_V( "Begin\n\n");
+
+    MixVideoFormatEnc *parent = NULL;
+
+    if (!MIX_IS_VIDEOFORMATENC_H264(mix))
+        return MIX_RESULT_INVALID_PARAM;
+
+    parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+
+    if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) {
+
+	LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_AIR\n");
+	return VA_STATUS_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer *  misc_enc_param_buf;
+    VAEncMiscParameterAIR * air_param;
+    VABufferID misc_param_buffer_id;
+
+    va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+            VAEncMiscParameterBufferType,
+            sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterAIR),
+            1, NULL,
+            &misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    misc_enc_param_buf->type = VAEncMiscParameterTypeAIR;
+    air_param = (VAEncMiscParameterAIR *)misc_enc_param_buf->data;
+
+    air_param->air_auto = parent->air_params.air_auto;
+    air_param->air_num_mbs = parent->air_params.air_MBs;
+    air_param->air_threshold = parent->air_params.air_threshold;
+
+    va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaUnmapBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    va_status = vaRenderPicture(parent->va_display, parent->va_context,
+            &misc_param_buffer_id, 1);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaRenderPicture\n");
+        return MIX_RESULT_FAIL;
+    }
+
+
+    LOG_I( "air_threshold = %d\n",
+            air_param->air_threshold);
+
+    return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix)
+{
+    VAStatus va_status;
+
+    if (mix == NULL) {
+        LOG_E("mix == NULL\n");
+        return MIX_RESULT_NULL_PTR;
+    }
+
+    LOG_V( "Begin\n\n");
+
+
+    MixVideoFormatEnc *parent = NULL;
+
+    if (!MIX_IS_VIDEOFORMATENC_H264(mix))
+        return MIX_RESULT_INVALID_PARAM;
+
+    parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+    if (parent->va_rcmode != MIX_RATE_CONTROL_VCM) {
+
+	LOG_W ("Not in VCM mode, but call mix_videofmtenc_h264_send_dynamic_framerate\n");
+	return VA_STATUS_SUCCESS;
+    }
+
+    VAEncMiscParameterBuffer *  misc_enc_param_buf;
+    VAEncMiscParameterFrameRate * framerate_param;
+    VABufferID misc_param_buffer_id;
+
+    va_status = vaCreateBuffer(parent->va_display, parent->va_context,
+            VAEncMiscParameterBufferType,
+            sizeof(misc_enc_param_buf) + sizeof(VAEncMiscParameterFrameRate),
+            1, NULL,
+            &misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    va_status = vaMapBuffer (parent->va_display, misc_param_buffer_id, (void **)&misc_enc_param_buf);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaCreateBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    misc_enc_param_buf->type = VAEncMiscParameterTypeFrameRate;
+    framerate_param = (VAEncMiscParameterFrameRate *)misc_enc_param_buf->data;
+    framerate_param->framerate =
+		(unsigned int) (parent->frame_rate_num + parent->frame_rate_denom /2 ) / parent->frame_rate_denom;
+
+    va_status = vaUnmapBuffer(parent->va_display, misc_param_buffer_id);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaUnmapBuffer\n");
+        return MIX_RESULT_FAIL;
+    }
+
+    va_status = vaRenderPicture(parent->va_display, parent->va_context,
+            &misc_param_buffer_id, 1);
+    if (va_status != VA_STATUS_SUCCESS)
+    {
+        LOG_E(
+                "Failed to vaRenderPicture\n");
+        return MIX_RESULT_FAIL;
+    }
+
+
+    LOG_I( "frame rate = %d\n",
+            framerate_param->framerate);
+
+    return MIX_RESULT_SUCCESS;
+
+}
+
diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h
index 2e7b12d..6cd9d83 100644
--- a/mix_video/src/mixvideoformatenc_h264.h
+++ b/mix_video/src/mixvideoformatenc_h264.h
@@ -40,14 +40,14 @@
     VABufferID      last_coded_buf;
     VABufferID      seq_param_buf;
     VABufferID      pic_param_buf;
-    VABufferID      slice_param_buf;	
+    VABufferID      slice_param_buf;
     VASurfaceID *   ci_shared_surfaces;
     VASurfaceID *   surfaces;
-    guint           surface_num;	
+    guint           surface_num;
 
-    MixVideoFrame  *cur_frame;	//current input frame to be encoded;	
+    MixVideoFrame  *cur_frame;	//current input frame to be encoded;
     MixVideoFrame  *ref_frame;  //reference frame
-    MixVideoFrame  *rec_frame;	//reconstructed frame;	
+    MixVideoFrame  *rec_frame;	//reconstructed frame;
     MixVideoFrame  *last_frame;	//last frame;
     MixVideoFrame  *lookup_frame;
 #ifdef ANDROID
@@ -59,9 +59,12 @@
     MixDelimiterType delimiter_type;
     guint idr_interval;
     guint slice_num;
-    guint va_rcmode; 
+    guint I_slice_num;
+    guint P_slice_num;
+    guint va_rcmode;
 
     guint       encoded_frames;
+    guint       frame_num;
     gboolean    pic_skipped;
 
     gboolean    is_intra;
@@ -123,7 +126,7 @@
 
 /* H.264 vmethods */
 MIX_RESULT mix_videofmtenc_h264_getcaps(MixVideoFormatEnc *mix, GString *msg);
-MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix, 
+MIX_RESULT mix_videofmtenc_h264_initialize(MixVideoFormatEnc *mix,
         MixVideoConfigParamsEnc * config_params_enc,
         MixFrameManager * frame_mgr,
         MixBufferPool * input_buf_pool,
@@ -135,20 +138,25 @@
 MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix);
 MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix);
 MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix);
-MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
-	MixVideoConfigParamsEnc * config_params_enc, 
+MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix,
+	MixVideoConfigParamsEnc * config_params_enc,
 	MixEncParamsType params_type);
 
 /* Local Methods */
 
 MIX_RESULT mix_videofmtenc_h264_get_max_encoded_buf_size (MixVideoFormatEnc *mix, guint *max_size);
-MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin, 
+MIX_RESULT mix_videofmtenc_h264_process_encode (MixVideoFormatEnc_H264 *mix, MixBuffer * bufin,
         MixIOVec * iovout);
 MIX_RESULT mix_videofmtenc_h264_AnnexB_to_length_prefixed (
         guint8 * bufin, guint bufin_len, guint8* bufout, guint *bufout_len);
 
 MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix);
 
+MIX_RESULT mix_videofmtenc_h264_send_dynamic_bitrate (MixVideoFormatEnc_H264 *mix);
+MIX_RESULT mix_videofmtenc_h264_send_max_slice_size (MixVideoFormatEnc_H264 *mix);
+MIX_RESULT mix_videofmtenc_h264_send_dynamic_framerate (MixVideoFormatEnc_H264 *mix);
+MIX_RESULT mix_videofmtenc_h264_send_AIR (MixVideoFormatEnc_H264 *mix);
+
 G_END_DECLS
 
 #endif /* __MIX_VIDEOFORMATENC_H264_H__ */