Decoder changes for RTP, partial frame handling, frame manager update for frame gap handling, handling slice loss.

Change-Id: I0a9fd7d7f6a656e8be0f9bb427da25edde1dbe6c
diff --git a/mix_vbp/ChangeLog b/mix_vbp/ChangeLog
index 139597f..98a1ee8 100644
--- a/mix_vbp/ChangeLog
+++ b/mix_vbp/ChangeLog
@@ -1,2 +1,8 @@
+2010-09-15 Andy Qiu <tao.q.tao@intel.com>
+	* Merged changes for Android
 
+2010-09-02 Andy Qiu <junhai.qiu@intel.com>
+	* change H.264 parser to support partial frame parsing
+	* change H.264 parser to support byte stream parsing
+	* change version number to 0.1.17
 
diff --git a/mix_vbp/configure.ac b/mix_vbp/configure.ac
index 93a9081..7b9edae 100644
--- a/mix_vbp/configure.ac
+++ b/mix_vbp/configure.ac
@@ -1,8 +1,8 @@
-AC_INIT("", "", [linda.s.cline@intel.com])
+AC_INIT([""],[""],[linda.s.cline@intel.com])
 
 AC_CONFIG_MACRO_DIR(m4)
 
-AS_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 15)
+UMG_MIX_VERSION(mixvbp, MIXVBP, 0, 1, 17)
 
 dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
 AM_MAINTAINER_MODE
@@ -12,11 +12,11 @@
 dnl make aclocal work in maintainer mode
 AC_SUBST(ACLOCAL_AMFLAGS, "-I m4")
 
-AM_CONFIG_HEADER(config.h)
+AC_CONFIG_HEADERS([config.h])
 
 dnl check for tools
 AC_PROG_CC
-AC_PROG_LIBTOOL
+LT_INIT
 
 MIX_CFLAGS="-Wall -Werror"
 
diff --git a/mix_vbp/m4/as-mix-version.m4 b/mix_vbp/m4/as-mix-version.m4
index f0301b1..82f6c95 100644
--- a/mix_vbp/m4/as-mix-version.m4
+++ b/mix_vbp/m4/as-mix-version.m4
@@ -1,9 +1,9 @@
 dnl as-mix-version.m4 
 
-dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+dnl UMG_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
 
 dnl example
-dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
+dnl UMG_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
 dnl for a 0.3.2 release version
 
 dnl this macro
@@ -11,7 +11,7 @@
 dnl - defines [$PREFIX], VERSION
 dnl - AC_SUBST's all defined vars
 
-AC_DEFUN([AS_MIX_VERSION],
+AC_DEFUN([UMG_MIX_VERSION],
 [
   PACKAGE=[$1]
   [$2]_MAJOR=[$3]
diff --git a/mix_vbp/mixvbp.spec b/mix_vbp/mixvbp.spec
index da15b9d..77445d3 100644
--- a/mix_vbp/mixvbp.spec
+++ b/mix_vbp/mixvbp.spec
@@ -6,14 +6,15 @@
 
 Summary: MIX Video Bitstream Parser
 Name: mixvbp
-Version: 0.1.15
+Version: 0.1.17
 Release: 1
-Source0: %{name}-%{version}.tar.gz
+Source0: %{name}-%{version}.tar.bz2
 NoSource: 0
 License: Proprietary
 Group: System Environment/Libraries
 BuildRoot: %{_tmppath}/%{name}-root
 ExclusiveArch: i586
+BuildRequires: glib2-devel libva-devel
 
 %description
 MIX Video Bitstream Parser is an user library interface for various video format bitstream parsing
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
index d7be02f..d174f12 100644
--- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/h264parse_dpb.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
index bc301da..c30167b 100644
--- a/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
+++ b/mix_vbp/viddec_fw/fw/codecs/h264/parser/viddec_h264_parse.c
@@ -1,8 +1,4 @@
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/Makefile.am b/mix_vbp/viddec_fw/fw/parser/Makefile.am
index c94b935..89c995c 100644
--- a/mix_vbp/viddec_fw/fw/parser/Makefile.am
+++ b/mix_vbp/viddec_fw/fw/parser/Makefile.am
@@ -168,7 +168,6 @@
 	./include/viddec_pm_utils_bstream.h \
 	./include/viddec_pm_utils_list.h \
 	./include/viddec_vc1_parse.h \
-	../include/stdint.h \
 	../include/viddec_debug.h \
 	../include/viddec_fw_version.h \
 	../../include/viddec_fw_common_defs.h \
@@ -177,7 +176,6 @@
 	../../include/viddec_fw_item_types.h \
 	../../include/viddec_fw_parser_host.h \
 	../../include/viddec_fw_workload.h \
-	../../fw/include/stdint.h \
 	../../fw/include/viddec_debug.h \
 	../../fw/include/viddec_fw_version.h \
 	../../fw/codecs/h264/include/h264.h \
diff --git a/mix_vbp/viddec_fw/fw/parser/main.c b/mix_vbp/viddec_fw/fw/parser/main.c
index 6b5f402..4ba89f7 100644
--- a/mix_vbp/viddec_fw/fw/parser/main.c
+++ b/mix_vbp/viddec_fw/fw/parser/main.c
@@ -1,8 +1,4 @@
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
index 90dea6a..38392e1 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_h264_parser.c
@@ -7,10 +7,6 @@
  */
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
@@ -25,8 +21,23 @@
 #include "vbp_h264_parser.h"
 
 
-/* number of bytes used to encode length of NAL payload. Default is 4 bytes. */
-static int NAL_length_size = 4;
+typedef enum
+{
+    H264_BS_LENGTH_PREFIXED,
+    H264_BS_SC_PREFIXED,
+    H264_BS_SINGLE_NAL
+} H264_BS_PATTERN;
+
+/* number of bytes used to encode length of NAL payload.  If parser does not receive configuration data
+and NAL_length_size is equal to zero when bitstream parsing begins, we assume bitstream is in AnnexB
+byte stream format. */
+static int NAL_length_size = 0;
+
+/* indicate if stream is length prefixed */
+static int length_prefix_verified = 0;
+
+static H264_BS_PATTERN bitstream_pattern = H264_BS_SC_PREFIXED;
+
 
 /* default scaling list table */
 unsigned char Default_4x4_Intra[16] =
@@ -77,7 +88,7 @@
     16,16,16,16
 };
 
-unsigned char quant8_flat[64] = 
+unsigned char quant8_flat[64] =
 { 
     16,16,16,16,16,16,16,16,
     16,16,16,16,16,16,16,16,
@@ -234,6 +245,10 @@
 
 	pcontext->query_data = NULL;
 
+    NAL_length_size = 0;
+    length_prefix_verified = 0;
+    bitstream_pattern = H264_BS_SC_PREFIXED;
+
 	return VBP_OK;
 }
 
@@ -553,9 +568,11 @@
 	VAIQMatrixBufferH264* IQ_matrix_buf)
 {
   	int i;
+    int lists_to_set = 6 + 2 * (parser->info.active_PPS.transform_8x8_mode_flag ? 1 : 0);
+    
   	if (parser->info.active_PPS.pic_scaling_matrix_present_flag)
   	{
-		for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+		for (i = 0; i < lists_to_set; i++)
     	{
       		if (parser->info.active_PPS.pic_scaling_list_present_flag[i])
       		{
@@ -657,7 +674,7 @@
     	/* PPS matrix not present, use SPS information */
     	if (parser->info.active_SPS.seq_scaling_matrix_present_flag)
     	{
-      		for (i = 0; i < 6 + 2 * parser->info.active_PPS.transform_8x8_mode_flag; i++)
+      		for (i = 0; i < lists_to_set; i++)
       		{
 				if (parser->info.active_SPS.seq_scaling_list_present_flag[i])
 				{
@@ -815,7 +832,12 @@
 		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_format;  			
 	 
 	codec_data->video_format =
-		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;  			
+		parser->info.active_SPS.sps_disp.vui_seq_parameters.video_signal_type_present_flag;  
+
+    /* picture order type and count */
+    codec_data->log2_max_pic_order_cnt_lsb_minus4 = parser->info.active_SPS.log2_max_pic_order_cnt_lsb_minus4;
+    codec_data->pic_order_cnt_type = parser->info.active_SPS.pic_order_cnt_type;
+    
 }
 
 
@@ -835,6 +857,12 @@
 		/* a new picture is parsed */
 		query_data->num_pictures++;
 	}
+
+	if (query_data->num_pictures == 0)
+	{
+	    /* partial frame */
+	    query_data->num_pictures = 1;
+        }
 	
 	if (query_data->num_pictures > MAX_NUM_PICTURES)
 	{
@@ -845,14 +873,16 @@
 	int pic_data_index = query_data->num_pictures - 1;
 	if (pic_data_index < 0)
 	{
-		ETRACE("MB address does not start from 0!");
+		WTRACE("MB address does not start from 0!");
 		return VBP_DATA;
 	}
 		
 	pic_data = &(query_data->pic_data[pic_data_index]);	
 	pic_parms = pic_data->pic_parms;
+
+	// relax this condition to support partial frame parsing
 	
-	if (parser->info.SliceHeader.first_mb_in_slice == 0)
+	//if (parser->info.SliceHeader.first_mb_in_slice == 0)
 	{
 		/**
 		* picture parameter only needs to be set once,
@@ -1237,6 +1267,11 @@
 		ETRACE("number of slices per picture exceeds the limit (%d).", MAX_NUM_SLICES);
 		return VBP_DATA;
 	}
+	
+	/*if (pic_data->num_slices > 1)
+	{
+	    ITRACE("number of slices per picture is %d.", pic_data->num_slices);
+	}*/
 	return VBP_OK;
 }
 
@@ -1259,13 +1294,27 @@
   
   	int i = 0;
 	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
+
+	/* check if configuration data is start code prefix */
+	viddec_sc_parse_cubby_cxt_t cubby = cxt->parse_cubby;
+	viddec_parser_ops_t *ops = pcontext->parser_ops;
+	int ret = ops->parse_sc((void *)&cubby,
+						NULL, /* context, not used */
+						&(cxt->sc_prefix_info));
+	if (ret == 1)
+	{
+		WTRACE("configuration data is start-code prefixed.\n");
+		bitstream_pattern = H264_BS_SC_PREFIXED;
+		return vbp_parse_start_code_h264(pcontext);
+	}
+
+
 	uint8* cur_data = cxt->parse_cubby.buf;
 
 	
 	if (cxt->parse_cubby.size < 6)
 	{
 		/* need at least 6 bytes to start parsing the structure, see spec 15 */
-                ETRACE ("Need at least 6 bytes to start parsing\n" );
 		return VBP_DATA;
 	}
   
@@ -1309,8 +1358,7 @@
 		if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
 		{
 			/* need at least 2 bytes to parse sequence_parameter_set_length */
-
-		        ETRACE ("Need at least 2 bytes to parse sps." );
+			ETRACE("Not enough data to parse SPS length.");
 			return VBP_DATA;
 		}
 
@@ -1323,7 +1371,7 @@
 		if (cur_data - cxt->parse_cubby.buf + sequence_parameter_set_length > cxt->parse_cubby.size)
 		{
 			/* need at least sequence_parameter_set_length bytes for SPS */
-		        ETRACE ("Need at least sequence paramter set length bytes." );
+			ETRACE("Not enough data to parse SPS.");
 			return VBP_DATA;
 		}
 
@@ -1341,7 +1389,7 @@
 	if (cur_data - cxt->parse_cubby.buf + 1 > cxt->parse_cubby.size)
 	{
 		/* need at least one more byte to parse num_of_picture_parameter_sets */
-		ETRACE ("need at least one more byte to parse num_of_picture_parameter_sets." );
+		ETRACE("Not enough data to parse number of PPS.");
 		return VBP_DATA;
 	}
 
@@ -1356,7 +1404,7 @@
 		if (cur_data - cxt->parse_cubby.buf + 2 > cxt->parse_cubby.size)
 		{
 			/* need at least 2 bytes to parse picture_parameter_set_length */
-                        ETRACE ("need at least 2 bytes to parse picture_parameter_set_length.");
+			ETRACE("Not enough data to parse PPS length.");
 			return VBP_DATA;
 		}
 
@@ -1368,7 +1416,7 @@
 		if (cur_data - cxt->parse_cubby.buf + picture_parameter_set_length > cxt->parse_cubby.size)
 		{
 			/* need at least picture_parameter_set_length bytes for PPS */
-                        ETRACE("need at least picture_parameter_set_length bytes for PPS");
+			ETRACE("Not enough data to parse PPS.");
 			return VBP_DATA;
 		}
 
@@ -1388,7 +1436,8 @@
   		WTRACE("Not all initialization data is parsed. Size = %d, parsed = %d.",
   			cxt->parse_cubby.size, (cur_data - cxt->parse_cubby.buf));
   	}
-   
+
+    bitstream_pattern = H264_BS_LENGTH_PREFIXED;
  	return VBP_OK;
 }
 
@@ -1426,77 +1475,157 @@
 */
 uint32 vbp_parse_start_code_h264(vbp_context *pcontext)
 {	
-	viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
-  	int32_t size_left = 0;
-  	int32_t size_parsed = 0;
-  	int32_t NAL_length = 0;
-  	viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+    viddec_pm_cxt_t *cxt = pcontext->parser_cxt;
 
-	/* reset query data for the new sample buffer */
-	vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
-	int i;
+    /* reset query data for the new sample buffer */
+    vbp_data_h264* query_data = (vbp_data_h264*)pcontext->query_data;
+    int i;
 
-#ifndef ANDROID
-	for (i = 0; i < MAX_NUM_PICTURES; i++)
-	{
-		query_data->pic_data[i].num_slices = 0;
-	}
-	query_data->num_pictures = 0;
-#else
-        ITRACE("pcontext->h264_frame_flag = %d\n", pcontext->h264_frame_flag);
- 
-	if(pcontext->h264_frame_flag == 0)
+    for (i = 0; i < MAX_NUM_PICTURES; i++)
+    {
+        query_data->pic_data[i].num_slices = 0;
+    }
+    query_data->num_pictures = 0;
+
+    cxt->list.num_items = 0;
+
+    /* reset start position of first item to 0 in case there is only one item */
+    cxt->list.data[0].stpos = 0;
+
+    /* start code emulation prevention byte is present in NAL */ 
+    cxt->getbits.is_emul_reqd = 1; 
+
+    if (bitstream_pattern == H264_BS_LENGTH_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t* cubby = NULL;
+        int32_t size_left = 0;
+        int32_t size_parsed = 0;
+        int32_t NAL_length = 0;
+
+        cubby = &(cxt->parse_cubby);
+
+        size_left = cubby->size;
+
+        while (size_left >= NAL_length_size)
         {
-             for (i = 0; i < MAX_NUM_PICTURES; i++)
-             {
-                 query_data->pic_data[i].num_slices = 0;
-             }
-             query_data->num_pictures = 0;
+            NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed);    	
+
+            size_parsed += NAL_length_size;
+            cxt->list.data[cxt->list.num_items].stpos = size_parsed;
+            size_parsed += NAL_length; /* skip NAL bytes */
+            /* end position is exclusive */
+            cxt->list.data[cxt->list.num_items].edpos = size_parsed; 
+            cxt->list.num_items++;
+            if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+            {
+                ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
+                break;
+            }
+
+            size_left = cubby->size - size_parsed;
         }
 
-        pcontext->h264_frame_flag = 1;
-#endif
-	
-  	cubby = &(cxt->parse_cubby);
-
-  	cxt->list.num_items = 0;
-
-	/* start code emulation prevention byte is present in NAL */ 
-	cxt->getbits.is_emul_reqd = 1;
-
-  	size_left = cubby->size;
-
-#ifndef ANDROID
-  	while (size_left >= NAL_length_size)
-  	{
-             NAL_length = vbp_get_NAL_length_h264(cubby->buf + size_parsed);    	
-
-    	     size_parsed += NAL_length_size;
-#else
-	while (size_left > 0)
-	{
-	     NAL_length = size_left;
-#endif
-    	  
-    	     cxt->list.data[cxt->list.num_items].stpos = size_parsed;
-    	     size_parsed += NAL_length; /* skip NAL bytes */
-    	     /* end position is exclusive */
-    	     cxt->list.data[cxt->list.num_items].edpos = size_parsed; 
-    	     cxt->list.num_items++;
-    	     if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
-      	     {
-      		ETRACE("num of list items exceeds the limit (%d).", MAX_IBUFS_PER_SC);
-      		break;
-             }  
-      
-             size_left = cubby->size - size_parsed;
-        }
-
-        if (size_left != 0)
+        if (size_left != 0 && length_prefix_verified == 0)
         {
-             WTRACE("Elementary stream is not aligned (%d).", size_left);
-        }
-        return VBP_OK;
+            WTRACE("Elementary stream is not aligned (%d).", size_left);
+
+            /* attempt to correct length prefix to start-code prefix only once, if it succeeds, we will
+                    * alway treat bit stream as start-code prefixed; otherwise, treat bit stream as length prefixed
+                    */
+            length_prefix_verified = 1;
+            viddec_sc_parse_cubby_cxt_t temp_cubby = cxt->parse_cubby;
+
+            viddec_parser_ops_t *ops = pcontext->parser_ops;
+            int ret = ops->parse_sc((void *)&temp_cubby,
+                NULL, /* context, not used */
+                &(cxt->sc_prefix_info));
+                
+            /* found start code */    	            		    
+            if (ret == 1)
+            {
+                WTRACE("Stream was supposed to be length prefixed, but actually is start-code prefixed.");
+                NAL_length_size = 0;
+                bitstream_pattern = H264_BS_SC_PREFIXED;
+                /* reset parsing data */    	    	   
+                for (i = 0; i < MAX_NUM_PICTURES; i++)
+                {
+                   query_data->pic_data[i].num_slices = 0;
+                }
+                query_data->num_pictures = 0;            	
+                cxt->list.num_items = 0;    	    	
+            }        	
+        }   
+    } 
+
+
+    if (bitstream_pattern == H264_BS_SC_PREFIXED)
+    {
+        viddec_sc_parse_cubby_cxt_t cubby;
+        /*  memory copy without updating cxt->parse_cubby */
+        cubby = cxt->parse_cubby;
+        viddec_parser_ops_t *ops = pcontext->parser_ops;
+        int ret = 0;
+
+        while(1)
+        {
+            ret = ops->parse_sc((void *)&cubby, 
+                NULL, /* context, not used */
+                &(cxt->sc_prefix_info));
+            if(ret == 1)
+            {
+                cubby.phase = 0;
+
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.data[0].stpos = cubby.sc_end_pos;
+                }
+                else
+                {
+                    cxt->list.data[cxt->list.num_items - 1].edpos =
+                    cubby.sc_end_pos + cxt->list.data[cxt->list.num_items - 1].stpos;
+
+                    cxt->list.data[cxt->list.num_items].stpos =
+                    cxt->list.data[cxt->list.num_items - 1].edpos;    		
+                }
+
+                cubby.buf = cxt->parse_cubby.buf +
+                cxt->list.data[cxt->list.num_items].stpos;
+
+                cubby.size = cxt->parse_cubby.size -
+                cxt->list.data[cxt->list.num_items].stpos;
+
+                cxt->list.num_items++;
+                if (cxt->list.num_items >= MAX_IBUFS_PER_SC)
+                {
+                    WTRACE("Num items exceeds the limit!");
+                    /* not fatal, just stop parsing */
+                    break;
+                }
+            }
+            else
+            {
+                if (cxt->list.num_items == 0)
+                {
+                    cxt->list.num_items = 1;
+                    bitstream_pattern = H264_BS_SINGLE_NAL;
+                    WTRACE("Stream was supposed to be SC prefixed, but actually contains a single NAL.");
+                }
+                cxt->list.data[cxt->list.num_items - 1].edpos = cxt->parse_cubby.size;
+                break;
+            }
+        }    
+
+    }
+
+    if (bitstream_pattern == H264_BS_SINGLE_NAL)
+    {
+        cxt->list.num_items = 1;
+        cxt->list.data[0].stpos = 0;
+        cxt->list.data[0].edpos = cxt->parse_cubby.size;
+    }
+
+
+    return VBP_OK;
 }
 
 /**
@@ -1515,10 +1644,11 @@
 
   	struct h264_viddec_parser* parser = NULL;
 	parser = (struct h264_viddec_parser *)&( pcontext->parser_cxt->codec_data[0]);
+	vbp_data_h264* query_data = (vbp_data_h264 *)pcontext->query_data;
 	switch (parser->info.nal_unit_type)
     {
 		case h264_NAL_UNIT_TYPE_SLICE:       		
-       	ITRACE("slice header is parsed."); 
+       	//ITRACE("slice header is parsed.");
        	error = vbp_add_pic_data_h264(pcontext, i);
        	if (VBP_OK == error)
        	{
@@ -1527,7 +1657,7 @@
        	break;
        		
        	case  h264_NAL_UNIT_TYPE_IDR:
-       	ITRACE("IDR header is parsed."); 
+       	//ITRACE("IDR header is parsed.");
        	error = vbp_add_pic_data_h264(pcontext, i);
        	if (VBP_OK == error)
        	{
@@ -1536,31 +1666,34 @@
        	break;
        		
        	case h264_NAL_UNIT_TYPE_SEI:
-		/* ITRACE("SEI header is parsed."); */
+		//ITRACE("SEI header is parsed.");
        	break;
        		
      	case h264_NAL_UNIT_TYPE_SPS:
- 		/*ITRACE("SPS header is parsed."); */
+     	query_data->has_sps = 1;
+     	query_data->has_pps = 0;
+        ITRACE("SPS header is parsed.");
  		break;
        		
        	case h264_NAL_UNIT_TYPE_PPS:
-       	/* ITRACE("PPS header is parsed."); */
+       	query_data->has_pps = 1;
+       	ITRACE("PPS header is parsed.");
        	break;
        		
       	case h264_NAL_UNIT_TYPE_Acc_unit_delimiter:
-       	/* ITRACE("ACC unit delimiter is parsed."); */
+       	//ITRACE("ACC unit delimiter is parsed.");
        	break;
        		
       	case h264_NAL_UNIT_TYPE_EOSeq:
-       	/* ITRACE("EOSeq is parsed."); */
+       	ITRACE("EOSeq is parsed.");
       	break;
  
      	case h264_NAL_UNIT_TYPE_EOstream:
-      	/* ITRACE("EOStream is parsed."); */
+      	 ITRACE("EOStream is parsed.");
        	break;
         		
      	default:  	
-	     WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); 
+	    WTRACE("unknown header %d is parsed.", parser->info.nal_unit_type); 
        	break;
 	}  
 	return error;    		    
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
index e7b4cd6..cfcad5b 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.c
@@ -7,10 +7,6 @@
  */
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
index b57821c..00a921c 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_loader.h
@@ -111,7 +111,10 @@
 	
 	/* video fromat */
 	uint8   	video_signal_type_present_flag; 	
-	uint8  		video_format;  		
+	uint8  		video_format;  
+
+    uint8       pic_order_cnt_type;
+    int         log2_max_pic_order_cnt_lsb_minus4;
 		
 } vbp_codec_data_h264;
 
@@ -141,20 +144,26 @@
 
 typedef struct _vbp_data_h264
 {
-     /* rolling counter of buffers sent by vbp_parse */
-     uint32 buf_number;
+    /* rolling counter of buffers sent by vbp_parse */
+    uint32 buf_number;
 
-	 uint32 num_pictures;
-	 
-	 vbp_picture_data_h264* pic_data;
-	      
-     /** 
-	 * do we need to send matrix to VA for each picture? If not, we need
-     * a flag indicating whether it is updated.
-	 */
-     VAIQMatrixBufferH264* IQ_matrix_buf;
+    uint32 num_pictures;
 
-     vbp_codec_data_h264* codec_data;
+    /* if SPS has been received */         
+    uint8  has_sps;
+
+    /* if PPS has been received */
+    uint8  has_pps;
+
+    vbp_picture_data_h264* pic_data;
+
+    /** 
+        * do we need to send matrix to VA for each picture? If not, we need
+        * a flag indicating whether it is updated.
+        */
+    VAIQMatrixBufferH264* IQ_matrix_buf;
+
+    vbp_codec_data_h264* codec_data;
 
 } vbp_data_h264; 
 
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
index 47408dc..85b32e0 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_mp42_parser.c
@@ -7,10 +7,6 @@
  */
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
@@ -632,7 +628,7 @@
 			getbits = viddec_pm_get_bits(parent, &code, length);
 			BREAK_GETBITS_FAIL(getbits, ret);
 
-			length = code;
+			_macroblock_number = code;
 		}
 
 		/* quant_scale */
@@ -955,6 +951,17 @@
 				break;
 			}
 
+			if (bit_offset)
+			{
+				/* byte-align parsing position */
+				getbits = viddec_pm_get_bits(parent, &code, 8 - bit_offset);
+				if (getbits == -1)
+				{
+					ret = MP4_STATUS_PARSE_ERROR;
+					return ret;
+				}
+			}
+
 			picture_data->number_slices = slice_index;
 		}
 	}
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
index c5d378d..275f43c 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.c
@@ -5,18 +5,11 @@
 
  No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
  */
-
-
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
 
-
 #include <glib.h>
 #include <dlfcn.h>
 
@@ -154,10 +147,6 @@
 	*/
 	error = pcontext->func_init_parser_entries(pcontext);
 
-#ifdef ANDROID
-        pcontext->h264_frame_flag = 0;
-#endif
-
 cleanup:
 
 	if (VBP_OK != error)
@@ -515,7 +504,7 @@
 
 	uint32 error = VBP_OK;
 
-	/* ITRACE("buffer counter: %d",buffer_counter);  */
+    //ITRACE("buffer counter: %d",buffer_counter);
 
 	/* set up emitter. */
 	pcontext->parser_cxt->emitter.cur.data = pcontext->workload1;
@@ -559,10 +548,6 @@
 	{
 		*data = NULL;
 	}
-#ifdef ANDROID
-	pcontext->h264_frame_flag = 0;
-#endif
-
 	return error;
 }
 
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
index 69202f8..67ff3e8 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_utils.h
@@ -76,10 +76,6 @@
 	function_process_parsing_result func_process_parsing_result;
 	function_populate_query_data 	func_populate_query_data;
 
-#ifdef ANDROID
-	uint32 h264_frame_flag;
-#endif
-
 };
 
 /**
diff --git a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
index 5c05c83..d43ada6 100644
--- a/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
+++ b/mix_vbp/viddec_fw/fw/parser/vbp_vc1_parser.c
@@ -8,10 +8,6 @@
 
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
index dc52e5c..2bae85b 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_emit.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
index 6e0b66b..8671ef3 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
index fb690a3..4458834 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_flush.c
@@ -1,8 +1,4 @@
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
index 10bb179..06c6cbd 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_parser_ops.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
index eada1fa..df7d502 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_tags.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
index 731a78e..a90242a 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_bstream.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
index 164710e..adacf2c 100644
--- a/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
+++ b/mix_vbp/viddec_fw/fw/parser/viddec_pm_utils_list.c
@@ -1,9 +1,5 @@
 
 #ifdef ANDROID
-//#ifndef NULL
-//#define NULL (void*)0x0
-//#endif
-
 #define true 1
 #define false 0
 #endif
diff --git a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
index e034b86..acca3ce 100644
--- a/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
+++ b/mix_vbp/viddec_fw/include/viddec_fw_common_defs.h
@@ -200,4 +200,18 @@
     VIDDEC_FW_MPEG2_ERR_CORRUPTED_QMAT_EXT      = (1 << 29),/* Parser detected corruption in quantization matrix extension. */
 };
 
+#ifdef VBP
+
+#ifndef NULL
+#define NULL (void*)0x0
+#endif
+
+#ifndef true
+#define true 1
+#define false 0
+#endif
+
+#endif 
+/* end of #ifdef VBP */
+
 #endif
diff --git a/mix_video/ChangeLog b/mix_video/ChangeLog
index 903e037..2f7a06c 100644
--- a/mix_video/ChangeLog
+++ b/mix_video/ChangeLog
@@ -1,3 +1,24 @@
+2010-09-15 Tao Tao <tao.q.tao@intel.com>
+
+		* Merged the changes for Android
+
+2010-09-02 Andy Qiu <junhai.qiu@intel.com>
+		* Refactored h.264 decoder to support partial frame and byte stream handling and delayed decoder configuration.
+		* Supported MPEG-4 video decoding with "video/x-xvid" mime type.
+		* Rolled version number to 0.1.19
+
+2010-07-29 Andy Qiu <junhai.qiu@intel.com>
+		* use high profile for baseline H.264 contents
+		* Rolled version number to 0.1.18
+
+2010-06-30 Tao Tao <tao.q.tao@intel.com>
+
+		* Added G_BEGIN_DECLS and G_END_DECLS to all header files
+
+2010-06-09 Weian Chen <weian.chen@intel.com>
+
+		* convert DOS line end format to Unix
+
 2010-06-04 Tao Tao <tao.q.tao@intel.com>
 
 		* Rolled version number to 0.1.17
diff --git a/mix_video/configure.ac b/mix_video/configure.ac
index 8768cee..14f9ac2 100644
--- a/mix_video/configure.ac
+++ b/mix_video/configure.ac
@@ -1,8 +1,8 @@
-AC_INIT("", "", [linda.s.cline@intel.com])
+AC_INIT([""],[""],[linda.s.cline@intel.com])
 
 AC_CONFIG_MACRO_DIR(m4)
 
-AS_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 17)
+UMG_MIX_VERSION(mixvideo, MIXVIDEO, 0, 1, 19)
 
 dnl AM_MAINTAINER_MODE provides the option to enable maintainer mode
 AM_MAINTAINER_MODE
@@ -13,11 +13,11 @@
 dnl make aclocal work in maintainer mode
 AC_SUBST(ACLOCAL_AMFLAGS, "-I m4")
 
-AM_CONFIG_HEADER(config.h)
+AC_CONFIG_HEADERS([config.h])
 
 dnl check for tools
 AC_PROG_CC
-AC_PROG_LIBTOOL
+LT_INIT
 
 #MIX_CFLAGS="-Wall -Werror"
 MIX_CFLAGS="-Wall"
diff --git a/mix_video/m4/as-mix-version.m4 b/mix_video/m4/as-mix-version.m4
index f0301b1..82f6c95 100644
--- a/mix_video/m4/as-mix-version.m4
+++ b/mix_video/m4/as-mix-version.m4
@@ -1,9 +1,9 @@
 dnl as-mix-version.m4 
 
-dnl AS_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
+dnl UMG_MIX_VERSION(PACKAGE, PREFIX, MAJOR, MINOR, RELEASE)
 
 dnl example
-dnl AS_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
+dnl UMG_MIX_VERSION(mixvideo,MIXVIDEO, 0, 3, 2,)
 dnl for a 0.3.2 release version
 
 dnl this macro
@@ -11,7 +11,7 @@
 dnl - defines [$PREFIX], VERSION
 dnl - AC_SUBST's all defined vars
 
-AC_DEFUN([AS_MIX_VERSION],
+AC_DEFUN([UMG_MIX_VERSION],
 [
   PACKAGE=[$1]
   [$2]_MAJOR=[$3]
diff --git a/mix_video/mixvideo.spec b/mix_video/mixvideo.spec
index 50752eb..df31162 100644
--- a/mix_video/mixvideo.spec
+++ b/mix_video/mixvideo.spec
@@ -6,15 +6,16 @@
 
 Summary: MIX Video
 Name: mixvideo
-Version: 0.1.17
+Version: 0.1.19
 Release: 1
-Source0: %{name}-%{version}.tar.gz
+Source0: %{name}-%{version}.tar.bz2
 NoSource: 0
 License: Proprietary
 Group: System Environment/Libraries
 BuildRoot: %{_tmppath}/%{name}-root
 ExclusiveArch: i586
-Requires: glib2 , mixcommon, mixvbp
+Requires: glib2 , mixcommon, mixvbp, libva, libX11
+BuildRequires: glib2-devel mixcommon-devel mixvbp-devel libva-devel libX11-devel
 
 %description
 MIX Video is an user library interface for various video codecs available on the platform.
diff --git a/mix_video/src/mixbuffer.h b/mix_video/src/mixbuffer.h
index 6977e92..0688442 100644
--- a/mix_video/src/mixbuffer.h
+++ b/mix_video/src/mixbuffer.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_BUFFER:
  *
@@ -154,4 +156,6 @@
 MIX_RESULT mix_buffer_set_data(MixBuffer * obj, guchar *data, guint size,
 		gulong token, MixBufferCallback callback);
 
+G_END_DECLS
+
 #endif /* __MIX_BUFFER_H__ */
diff --git a/mix_video/src/mixbuffer_private.h b/mix_video/src/mixbuffer_private.h
index 87c9c07..43c8347 100644
--- a/mix_video/src/mixbuffer_private.h
+++ b/mix_video/src/mixbuffer_private.h
@@ -12,6 +12,8 @@
 #include "mixbuffer.h"
 #include "mixbufferpool.h"
 
+G_BEGIN_DECLS
+
 typedef struct _MixBufferPrivate MixBufferPrivate;
 
 struct _MixBufferPrivate
@@ -35,5 +37,6 @@
 MIX_RESULT
 mix_buffer_set_pool (MixBuffer *obj, MixBufferPool *pool);
 
+G_END_DECLS
 
 #endif /* __MIX_BUFFER_PRIVATE_H__ */
diff --git a/mix_video/src/mixdisplayx11.h b/mix_video/src/mixdisplayx11.h
index 8b1788d..3b51f5e 100644
--- a/mix_video/src/mixdisplayx11.h
+++ b/mix_video/src/mixdisplayx11.h
@@ -13,6 +13,8 @@
 #include "mixvideodef.h"
 #include <X11/Xlib.h>
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_DISPLAYX11:
 * 
@@ -173,4 +175,6 @@
 MIX_RESULT mix_displayx11_get_drawable (MixDisplayX11 * obj,
 					Drawable * drawable);
 
+G_END_DECLS
+
 #endif /* __MIX_DISPLAYX11_H__ */
diff --git a/mix_video/src/mixframemanager.c b/mix_video/src/mixframemanager.c
index 569dabd..0a843e6 100644
--- a/mix_video/src/mixframemanager.c
+++ b/mix_video/src/mixframemanager.c
@@ -12,6 +12,14 @@
 #include "mixvideoframe_private.h"
 
 #define INITIAL_FRAME_ARRAY_SIZE 	16
+
+// Assume only one backward reference is used. This will hold up to 2 frames before forcing 
+// the earliest frame out of queue.
+#define MIX_MAX_ENQUEUE_SIZE        2
+
+// RTP timestamp is 32-bit long and could be rollover in 13 hours (based on 90K Hz clock)
+#define TS_ROLLOVER_THRESHOLD          (0xFFFFFFFF/2)
+
 #define MIX_SECOND  (G_USEC_PER_SEC * G_GINT64_CONSTANT (1000))
 
 static GObjectClass *parent_class = NULL;
@@ -32,23 +40,19 @@
 
 	self->flushing = FALSE;
 	self->eos = FALSE;
-	self->frame_array = NULL;
-	self->frame_queue = NULL;
+	self->frame_list = NULL;
 	self->initialized = FALSE;
 
-	self->mode = MIX_FRAMEORDER_MODE_DISPLAYORDER;
+	self->mode = MIX_DISPLAY_ORDER_UNKNOWN;
 	self->framerate_numerator = 30;
 	self->framerate_denominator = 1;
 
 	self->is_first_frame = TRUE;
-
-	/* for vc1 in asf */
-	self->p_frame = NULL;
-	self->prev_timestamp = 0;
-
-#ifdef ANDROID	
-	self->timestamp_storage = NULL;
-#endif	
+	self->next_frame_timestamp = 0;
+	self->last_frame_timestamp = 0;
+	self->next_frame_picnumber = 0;
+	self->max_enqueue_size = MIX_MAX_ENQUEUE_SIZE;
+	self->max_picture_number = (guint32)-1;
 }
 
 static void mix_framemanager_class_init(MixFrameManagerClass * klass) {
@@ -92,14 +96,16 @@
 /* MixFrameManager class methods */
 
 MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm,
-		MixFrameOrderMode mode, gint framerate_numerator,
-		gint framerate_denominator, gboolean timebased_ordering) {
+		MixDisplayOrderMode mode, gint framerate_numerator,
+		gint framerate_denominator) {
 
-	MIX_RESULT ret = MIX_RESULT_FAIL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
-	if (!MIX_IS_FRAMEMANAGER(fm) || (mode != MIX_FRAMEORDER_MODE_DISPLAYORDER
-			&& mode != MIX_FRAMEORDER_MODE_DECODEORDER) || framerate_numerator
-			<= 0 || framerate_denominator <= 0) {
+	if (!MIX_IS_FRAMEMANAGER(fm) || 
+	    mode <= MIX_DISPLAY_ORDER_UNKNOWN ||
+	    mode >= MIX_DISPLAY_ORDER_LAST || 
+	    framerate_numerator <= 0 || 
+	    framerate_denominator <= 0) {
 		return MIX_RESULT_INVALID_PARAM;
 	}
 
@@ -111,35 +117,15 @@
 		g_thread_init(NULL);
 	}
 
-	ret = MIX_RESULT_NO_MEMORY;
 	if (!fm->lock) {
 		fm->lock = g_mutex_new();
 		if (!fm->lock) {
+            ret = MIX_RESULT_NO_MEMORY;
 			goto cleanup;
 		}
 	}
 
-	if (mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) {
-		fm->frame_array = g_ptr_array_sized_new(INITIAL_FRAME_ARRAY_SIZE);
-		if (!fm->frame_array) {
-			goto cleanup;
-		}
-		
-
-#ifdef ANDROID		
-		fm->timestamp_storage = g_array_sized_new(FALSE, TRUE,
-				sizeof(guint64), INITIAL_FRAME_ARRAY_SIZE);
-		if (!fm->timestamp_storage) {
-			goto cleanup;
-		}
-#endif		
-	}
-
-	fm->frame_queue = g_queue_new();
-	if (!fm->frame_queue) {
-		goto cleanup;
-	}
-
+    fm->frame_list = NULL;
 	fm->framerate_numerator = framerate_numerator;
 	fm->framerate_denominator = framerate_denominator;
 	fm->frame_timestamp_delta = fm->framerate_denominator * MIX_SECOND
@@ -147,26 +133,20 @@
 
 	fm->mode = mode;
 
-	fm->timebased_ordering = timebased_ordering;
+    LOG_V("fm->mode = %d\n",  fm->mode);
+
+	fm->is_first_frame = TRUE;
+	fm->next_frame_timestamp = 0;
+	fm->last_frame_timestamp = 0;
+	fm->next_frame_picnumber = 0;
 
 	fm->initialized = TRUE;
 
-	ret = MIX_RESULT_SUCCESS;
+cleanup:
 
-	cleanup:
-
-	if (ret != MIX_RESULT_SUCCESS) {
-		if (fm->frame_array) {
-			g_ptr_array_free(fm->frame_array, TRUE);
-			fm->frame_array = NULL;
-		}
-		if (fm->frame_queue) {
-			g_queue_free(fm->frame_queue);
-			fm->frame_queue = NULL;
-		}
-	}
 	return ret;
 }
+
 MIX_RESULT mix_framemanager_deinitialize(MixFrameManager *fm) {
 
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
@@ -185,21 +165,6 @@
 
 	g_mutex_lock(fm->lock);
 
-	if (fm->frame_array) {
-		g_ptr_array_free(fm->frame_array, TRUE);
-		fm->frame_array = NULL;
-	}
-	if (fm->frame_queue) {
-		g_queue_free(fm->frame_queue);
-		fm->frame_queue = NULL;
-	}
-#ifdef ANDROID	
-	if (fm->timestamp_storage) {
-		g_array_free(fm->timestamp_storage, TRUE);
-		fm->timestamp_storage = NULL;
-	}
-#endif	
-
 	fm->initialized = FALSE;
 
 	g_mutex_unlock(fm->lock);
@@ -259,8 +224,8 @@
 	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm,
-		MixFrameOrderMode *mode) {
+MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm,
+		MixDisplayOrderMode *mode) {
 
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
 		return MIX_RESULT_INVALID_PARAM;
@@ -280,798 +245,435 @@
 	return MIX_RESULT_SUCCESS;
 }
 
-MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) {
-
+MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size)
+{
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
 		return MIX_RESULT_INVALID_PARAM;
 	}
 
-	if (!fm->initialized) {
-		return MIX_RESULT_NOT_INIT;
+	if (!fm->lock) {
+		return MIX_RESULT_FAIL;
 	}
 
+    if (size <= 0)
+    {
+		return MIX_RESULT_FAIL;
+    }
+    
 	g_mutex_lock(fm->lock);
 
-	/* flush frame_array */
-	if (fm->frame_array) {
-		guint len = fm->frame_array->len;
-		if (len) {
-			guint idx = 0;
-			MixVideoFrame *frame = NULL;
-			for (idx = 0; idx < len; idx++) {
-				frame = (MixVideoFrame *) g_ptr_array_index(fm->frame_array,
-						idx);
-				if (frame) {
-					mix_videoframe_unref(frame);
-					g_ptr_array_index(fm->frame_array, idx) = NULL;
-				}
-			}
-			/* g_ptr_array_remove_range(fm->frame_array, 0, len); */
-		}
-	}
-
-#ifdef ANDROID	
-	if(fm->timestamp_storage) {
-		g_array_remove_range(fm->timestamp_storage, 0, fm->timestamp_storage->len);
-	}
-#endif
-
-	if (fm->frame_queue) {
-		guint len = fm->frame_queue->length;
-		if (len) {
-			MixVideoFrame *frame = NULL;
-			while ((frame = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue))) {
-				mix_videoframe_unref(frame);
-			}
-		}
-	}
-
-	if(fm->p_frame) {
-		mix_videoframe_unref(fm->p_frame);
-		fm->p_frame = NULL;
-	}
-	fm->prev_timestamp = 0;
-
-	fm->eos = FALSE;
-
-	fm->is_first_frame = TRUE;
+	fm->max_enqueue_size = size;
+	LOG_V("max enqueue size is %d\n", size);
 
 	g_mutex_unlock(fm->lock);
 
 	return MIX_RESULT_SUCCESS;
 }
 
-MixVideoFrame *get_expected_frame_from_array(GPtrArray *array,
-		guint64 expected, guint64 tolerance, guint64 *frametimestamp) {
-
-	guint idx = 0;
-	guint len = 0;
-	guint64 timestamp = 0;
-	guint64 lowest_timestamp = (guint64)-1;
-	guint lowest_timestamp_idx = -1;
-	
-	MixVideoFrame *frame = NULL;
-
-	if (!array || !expected || !tolerance || !frametimestamp || expected < tolerance) {
-
-		return NULL;
-	}
-
-	len = array->len;
-	if (!len) {
-		return NULL;
-	}
-
-	for (idx = 0; idx < len; idx++) {
-		MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx);
-		if (_frame) {
-
-			if (mix_videoframe_get_timestamp(_frame, &timestamp)
-					!= MIX_RESULT_SUCCESS) {
-
-				/*
-				 * Oops, this shall never happen!
-				 * In case it heppens, release the frame!
-				 */
-
-				mix_videoframe_unref(_frame);
-
-				/* make an available slot */
-				g_ptr_array_index(array, idx) = NULL;
-
-				break;
-			}
-			
-			if (lowest_timestamp > timestamp)
-			{
-				lowest_timestamp = timestamp;
-				lowest_timestamp_idx = idx;
-			}
-		}
-	}
-	
-	if (lowest_timestamp == (guint64)-1)
-	{
-		return NULL;
-	}
-		
-
-	/* check if this is the expected next frame */
-	if (lowest_timestamp <= expected + tolerance)
-	{
-		MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_timestamp_idx);
-		/* make this slot available */
-		g_ptr_array_index(array, lowest_timestamp_idx) = NULL;
-
-		*frametimestamp = lowest_timestamp;
-		frame = _frame;
-	}
-	
-	return frame;
-}
-
-#ifdef ANDROID
-MixVideoFrame *get_expected_frame_from_array_DO(GPtrArray *array,
-	guint32 expected, guint32 *framedisplayorder) {
-
-    guint idx = 0;
-    guint len = 0;
-    guint32 displayorder = 0;
-    guint32 lowest_displayorder = (guint32)-1;
-    guint lowest_displayorder_idx = -1;
-
-    MixVideoFrame *frame = NULL;
-
-    if (!array || !expected || !framedisplayorder) {
-
-	return NULL;
-    }
-
-    len = array->len;
-    if (!len) {
-	return NULL;
-    }
-
-    for (idx = 0; idx < len; idx++) {
-	MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, idx);
-	if (_frame) {
-
-	    if (mix_videoframe_get_displayorder(_frame, &displayorder)
-		    != MIX_RESULT_SUCCESS) {
-
-		/*
-		 * Oops, this shall never happen!
-		 * In case it heppens, release the frame!
-		 */
-
-		mix_videoframe_unref(_frame);
-
-		/* make an available slot */
-		g_ptr_array_index(array, idx) = NULL;
-
-		break;
-	    }
-
-	    if (lowest_displayorder > displayorder)
-	    {
-		lowest_displayorder = displayorder;
-		lowest_displayorder_idx = idx;
-	    }
-	}
-    }
-
-    if (lowest_displayorder == (guint32)-1)
-    {
-	return NULL;
-    }
-
-
-    /* check if this is the expected next frame */
-    if (lowest_displayorder <= expected)
-    {
-	MixVideoFrame *_frame = (MixVideoFrame *) g_ptr_array_index(array, lowest_displayorder_idx);
-	/* make this slot available */
-	g_ptr_array_index(array, lowest_displayorder_idx) = NULL;
-
-	*framedisplayorder = lowest_displayorder;
-	frame = _frame;
-    }
-
-    return frame;
-}
-#endif /* ANDROID */
-
-void add_frame_into_array(GPtrArray *array, MixVideoFrame *mvf) {
-
-	gboolean found_slot = FALSE;
-	guint len = 0;
-
-	if (!array || !mvf) {
-		return;
-	}
-
-	/* do we have slot for this frame? */
-	len = array->len;
-	if (len) {
-		guint idx = 0;
-		gpointer frame = NULL;
-		for (idx = 0; idx < len; idx++) {
-			frame = g_ptr_array_index(array, idx);
-			if (!frame) {
-				found_slot = TRUE;
-				g_ptr_array_index(array, idx) = (gpointer) mvf;
-				break;
-			}
-		}
-	}
-
-	if (!found_slot) {
-		g_ptr_array_add(array, (gpointer) mvf);
-	}
-
-}
-
-#ifdef ANDROID
-gint frame_sorting_func_DO(gconstpointer a, gconstpointer b) {
-
-    MixVideoFrame *fa = *((MixVideoFrame **) a);
-    MixVideoFrame *fb = *((MixVideoFrame **) b);
-
-    guint32 ta, tb;
-
-    if (!fa && !fb) {
-	return 0;
-    }
-
-    if (fa && !fb) {
-	return 1;
-    }
-
-    if (!fa && fb) {
-	return -1;
-    }
-
-    mix_videoframe_get_displayorder(fa, &ta);
-    mix_videoframe_get_displayorder(fb, &tb);
-
-    if (ta > tb) {
-	return 1;
-    }
-
-    if (ta == tb) {
-	return 0;
-    }
-
-    return -1;
-}
-
-MIX_RESULT mix_framemanager_displayorder_based_enqueue(MixFrameManager *fm,
-	MixVideoFrame *mvf) {
-    /*
-     * display order mode.
-     *
-     * if this is the first frame, we always push it into
-     * output queue, if it is not, check if it is the one
-     * expected, if yes, push it into the output queue.
-     * if not, put it into waiting list.
-     *
-     * while the expected frame is pushed into output queue,
-     * the expected next timestamp is also updated. with this
-     * updated expected next timestamp, we search for expected
-     * frame from the waiting list, if found, repeat the process.
-     *
-     */
-
-    MIX_RESULT ret = MIX_RESULT_FAIL;
-    guint32 displayorder = 0;
-
-
-first_frame:
-
-    ret = mix_videoframe_get_displayorder(mvf, &displayorder);
-    if (ret != MIX_RESULT_SUCCESS) {
-	goto cleanup;
-    }
-
-    if (fm->is_first_frame) {
-
-	/*
-	 * for the first frame, we can always put it into the output queue
-	 */
-	g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-	/*
-	 * what displayorder of next frame shall be?
-	 */
-	fm->next_displayorder = displayorder + 1;
-
-	fm->is_first_frame = FALSE;
-
-    } else {
-
-	/*
-	 * If displayorder is 0, send all the frames in the array to the queue
-	 */ 
-	if(displayorder == 0) {
-	    if (fm->frame_array) {
-		guint len = fm->frame_array->len;
-		if (len) {
-
-		    /* sorting frames in the array by displayorder */
-		    g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO);
-
-		    guint idx = 0;
-		    MixVideoFrame *frame = NULL;
-		    for (idx = 0; idx < len; idx++) {
-			frame = (MixVideoFrame *) g_ptr_array_index(
-				fm->frame_array, idx);
-			if (frame) {
-			    g_ptr_array_index(fm->frame_array, idx) = NULL;
-			    g_queue_push_tail(fm->frame_queue, (gpointer) frame);
-			}
-		    }
-		}
-	    }
-
-	    g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-	    /*
-	     * what displayorder of next frame shall be?
-	     */
-	    fm->next_displayorder = displayorder + 1;
-
-	} else {
-
-	    /*
-	     * is this the next frame expected?
-	     */
-
-	    /* calculate tolerance */
-	    MixVideoFrame *frame_from_array = NULL;
-	    guint32 displayorder_frame_array = 0;
-
-	    /*
-	     * timestamp may be associated with the second field, which
-	     * will not fall between the tolerance range. 
-	     */
-
-	    if (displayorder <= fm->next_displayorder) {
-
-		/*
-		 * ok, this is the frame expected, push it into output queue
-		 */
-		g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-		/*
-		 * update next_frame_timestamp only if it falls within the tolerance range
-		 */
-		if (displayorder == fm->next_displayorder)
-		{ 
-		    fm->next_displayorder = displayorder + 1;
-		}
-
-		/*
-		 * since we updated next_displayorder, there might be a frame
-		 * in the frame_array that satisfying this new next_displayorder
-		 */
-
-		while ((frame_from_array = get_expected_frame_from_array_DO(
-				fm->frame_array, fm->next_displayorder, 
-				&displayorder_frame_array))) {
-
-		    g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array);
-
-		    /*
-		     * update next_frame_timestamp only if it falls within the tolerance range
-		     */				
-		    if (displayorder_frame_array >= fm->next_displayorder)
-		    {
-			fm->next_displayorder = displayorder_frame_array + 1;
-		    }
-		}
-
-	    } else {
-
-		/*
-		 * is discontinuity flag set for this frame ?
-		 */
-		gboolean discontinuity = FALSE;
-		ret = mix_videoframe_get_discontinuity(mvf, &discontinuity);
-		if (ret != MIX_RESULT_SUCCESS) {
-		    goto cleanup;
-		}
-
-		/*
-		 * If this is a frame with discontinuity flag set, clear frame_array
-		 * and treat the frame as the first frame.
-		 */
-		if (discontinuity) {
-
-		    guint len = fm->frame_array->len;
-		    if (len) {
-			guint idx = 0;
-			MixVideoFrame *frame = NULL;
-			for (idx = 0; idx < len; idx++) {
-			    frame = (MixVideoFrame *) g_ptr_array_index(
-				    fm->frame_array, idx);
-			    if (frame) {
-				mix_videoframe_unref(frame);
-				g_ptr_array_index(fm->frame_array, idx) = NULL;
-			    }
-			}
-		    }
-
-		    fm->is_first_frame = TRUE;
-		    goto first_frame;
-		}
-
-		/*
-		 * this is not the expected frame, put it into frame_array
-		 */					
-		add_frame_into_array(fm->frame_array, mvf);
-	    }
-	}
-    }
-cleanup:
-
-    return ret;
-}
-#endif /* ANDROID */
-
-
-MIX_RESULT mix_framemanager_timestamp_based_enqueue(MixFrameManager *fm,
-		MixVideoFrame *mvf) {
-	/*
-	 * display order mode.
-	 *
-	 * if this is the first frame, we always push it into
-	 * output queue, if it is not, check if it is the one
-	 * expected, if yes, push it into the output queue.
-	 * if not, put it into waiting list.
-	 *
-	 * while the expected frame is pushed into output queue,
-	 * the expected next timestamp is also updated. with this
-	 * updated expected next timestamp, we search for expected
-	 * frame from the waiting list, if found, repeat the process.
-	 *
-	 */
-
-	MIX_RESULT ret = MIX_RESULT_FAIL;
-	guint64 timestamp = 0;
-
-	first_frame:
-
-	ret = mix_videoframe_get_timestamp(mvf, &timestamp);
-	if (ret != MIX_RESULT_SUCCESS) {
-		goto cleanup;
-	}
-
-	if (fm->is_first_frame) {
-
-		/*
-		 * for the first frame, we can always put it into the output queue
-		 */
-		g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-		/*
-		 * what timestamp of next frame shall be?
-		 */
-		fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta;
-
-		fm->is_first_frame = FALSE;
-
-	} else {
-
-		/*
-		 * is this the next frame expected?
-		 */
-
-		/* calculate tolerance */
-		guint64 tolerance = fm->frame_timestamp_delta / 4;
-		MixVideoFrame *frame_from_array = NULL;
-		guint64 timestamp_frame_array = 0;
-
-		/*
-		* timestamp may be associated with the second field, which
-		* will not fall between the tolerance range. 
-		*/
-
-		if (timestamp <= fm->next_frame_timestamp + tolerance) {
-
-			/*
-			 * ok, this is the frame expected, push it into output queue
-			 */
-			g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-			/*
-			 * update next_frame_timestamp only if it falls within the tolerance range
-			 */
-			if (timestamp >= fm->next_frame_timestamp - tolerance)
-			{ 
-				fm->next_frame_timestamp = timestamp + fm->frame_timestamp_delta;
-			}
-			
-			/*
-			 * since we updated next_frame_timestamp, there might be a frame
-			 * in the frame_array that satisfying this new next_frame_timestamp
-			 */
-
-			while ((frame_from_array = get_expected_frame_from_array(
-					fm->frame_array, fm->next_frame_timestamp, tolerance,
-					&timestamp_frame_array))) {
-
-				g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array);
-				
-				/*
-			 	* update next_frame_timestamp only if it falls within the tolerance range
-			 	*/				
-				if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance)
-				{
-					fm->next_frame_timestamp = timestamp_frame_array
-							+ fm->frame_timestamp_delta;
-				}
-			}
-
-		} else {
-
-			/*
-			 * is discontinuity flag set for this frame ?
-			 */
-			gboolean discontinuity = FALSE;
-			ret = mix_videoframe_get_discontinuity(mvf, &discontinuity);
-			if (ret != MIX_RESULT_SUCCESS) {
-				goto cleanup;
-			}
-
-			/*
-			 * If this is a frame with discontinuity flag set, clear frame_array
-			 * and treat the frame as the first frame.
-			 */
-			if (discontinuity) {
-
-				guint len = fm->frame_array->len;
-				if (len) {
-					guint idx = 0;
-					MixVideoFrame *frame = NULL;
-					for (idx = 0; idx < len; idx++) {
-						frame = (MixVideoFrame *) g_ptr_array_index(
-								fm->frame_array, idx);
-						if (frame) {
-							mix_videoframe_unref(frame);
-							g_ptr_array_index(fm->frame_array, idx) = NULL;
-						}
-					}
-				}
-
-				fm->is_first_frame = TRUE;
-				goto first_frame;
-			}
-
-			/*
-			 * handle variable frame rate:
-			 * display any frame which time stamp is less than current one. 
-			 * 
-			 */
-			guint64 tolerance = fm->frame_timestamp_delta / 4;
-			MixVideoFrame *frame_from_array = NULL;
-			guint64 timestamp_frame_array = 0;
-
-			while ((frame_from_array = get_expected_frame_from_array(
-					fm->frame_array, timestamp, tolerance,
-					&timestamp_frame_array)))
-			{
-				g_queue_push_tail(fm->frame_queue, (gpointer) frame_from_array);
-				
-				/*
-			 	* update next_frame_timestamp only if it falls within the tolerance range
-			 	*/				
-				if (timestamp_frame_array >= fm->next_frame_timestamp - tolerance)
-				{
-					fm->next_frame_timestamp = timestamp_frame_array
-							+ fm->frame_timestamp_delta;
-				}
-			}
-			/*
-			 * this is not the expected frame, put it into frame_array
-			 */					
-
-			add_frame_into_array(fm->frame_array, mvf);
-		}
-	}
-	cleanup:
-
-	return ret;
-}
-
-MIX_RESULT mix_framemanager_frametype_based_enqueue(MixFrameManager *fm,
-		MixVideoFrame *mvf) {
-
-	MIX_RESULT ret = MIX_RESULT_FAIL;
-	MixFrameType frame_type;
-	guint64 timestamp = 0;
-
-	ret = mix_videoframe_get_frame_type(mvf, &frame_type);
-	if (ret != MIX_RESULT_SUCCESS) {
-		goto cleanup;
-	}
-
-	ret = mix_videoframe_get_timestamp(mvf, &timestamp);
-	if (ret != MIX_RESULT_SUCCESS) {
-		goto cleanup;
-	}
-
-#ifdef MIX_LOG_ENABLE
-	if (frame_type == TYPE_I) {
-		LOG_I( "TYPE_I %"G_GINT64_FORMAT"\n", timestamp);
-	} else if (frame_type == TYPE_P) {
-		LOG_I( "TYPE_P %"G_GINT64_FORMAT"\n", timestamp);
-	} else if (frame_type == TYPE_B) {
-		LOG_I( "TYPE_B %"G_GINT64_FORMAT"\n", timestamp);
-	} else {
-		LOG_I( "TYPE_UNKNOWN %"G_GINT64_FORMAT"\n", timestamp);
-	}
-#endif
-
-	if (fm->is_first_frame) {
-		/*
-		 * The first frame is not a I frame, unexpected!
-		 */
-		if (frame_type != TYPE_I) {
-			goto cleanup;
-		}
-
-		g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-		fm->is_first_frame = FALSE;
-	} else {
-
-		/*
-		 * I P B B P B B ...
-		 */
-		if (frame_type == TYPE_I || frame_type == TYPE_P) {
-
-			if (fm->p_frame) {
-
-				ret = mix_videoframe_set_timestamp(fm->p_frame,
-						fm->prev_timestamp);
-				if (ret != MIX_RESULT_SUCCESS) {
-					goto cleanup;
-				}
-
-				g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame);
-				fm->p_frame = NULL;
-			}
-
-			/* it is an I frame, push it into the out queue */
-			/*if (frame_type == TYPE_I) {
-
-			 g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-			 } else*/
-			{
-				/* it is a P frame, we can not push it to the out queue yet, save it */
-				fm->p_frame = mvf;
-				fm->prev_timestamp = timestamp;
-			}
-
-			ret = MIX_RESULT_SUCCESS;
-
-		} else {
-			/* it is a B frame, replace the timestamp with the previous one */
-			if (timestamp > fm->prev_timestamp) {
-				ret = mix_videoframe_set_timestamp(mvf, fm->prev_timestamp);
-				if (ret != MIX_RESULT_SUCCESS) {
-					goto cleanup;
-				}
-
-				/* save the timestamp */
-				fm->prev_timestamp = timestamp;
-			}
-			g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-			ret = MIX_RESULT_SUCCESS;
-		}
-	}
-
-	cleanup:
-
-	return ret;
-}
-
-MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) {
-
-	MIX_RESULT ret = MIX_RESULT_FAIL;
-
-	/*fm->mode = MIX_FRAMEORDER_MODE_DECODEORDER;*/
-
-	if (!mvf) {
-		return MIX_RESULT_INVALID_PARAM;
-	}
-
+MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num)
+{
+    // NOTE: set maximum picture order number only if pic_order_cnt_type is 0  (see H.264 spec)
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
 		return MIX_RESULT_INVALID_PARAM;
 	}
 
-	if (!fm->initialized) {
-		return MIX_RESULT_NOT_INIT;
-	}
-
-	/*
-	 * This should never happen!
-	 */
-	if (fm->mode != MIX_FRAMEORDER_MODE_DISPLAYORDER && fm->mode
-			!= MIX_FRAMEORDER_MODE_DECODEORDER) {
+	if (!fm->lock) {
 		return MIX_RESULT_FAIL;
 	}
 
+    if (num < 16)
+    {
+        // Refer to H.264 spec: log2_max_pic_order_cnt_lsb_minus4. Max pic order will never be less than 16.
+		return MIX_RESULT_INVALID_PARAM;
+    }
+    
 	g_mutex_lock(fm->lock);
 
-	ret = MIX_RESULT_SUCCESS;
-	if (fm->mode == MIX_FRAMEORDER_MODE_DECODEORDER) {
-		/*
-		 * decode order mode, push the frame into output queue
-		 */
-		g_queue_push_tail(fm->frame_queue, (gpointer) mvf);
-
-	} else {
-
-#ifdef ANDROID
-		guint64 timestamp = 0;
-		mix_videoframe_get_timestamp(mvf, &timestamp);
-
-		/* add timestamp into timestamp storage */
-		if(fm->timestamp_storage) {
-			gint idx = 0;
-			gboolean found = FALSE;
-
-			if(fm->timestamp_storage->len) {
-				for(idx = 0; idx < fm->timestamp_storage->len; idx ++) {
-					if(timestamp == g_array_index(fm->timestamp_storage, guint64, idx)) {
-						found = TRUE;
-						break;
-					}
-				}
-			}
-
-			if(!found) {
-				g_array_append_val(fm->timestamp_storage, timestamp);
-			}
-		}
-#endif
-
-		if (fm->timebased_ordering) {
-#ifndef ANDROID
-			ret = mix_framemanager_timestamp_based_enqueue(fm, mvf);
-#else
-			ret = mix_framemanager_displayorder_based_enqueue(fm, mvf);
-#endif
-
-		} else {
-			ret = mix_framemanager_frametype_based_enqueue(fm, mvf);
-		}
-	}
+    // max_picture_number is exclusie (range from 0 to num - 1).
+    // Note that this number may not be reliable if encoder does not conform to the spec, as of this, the
+    // implementaion will not automatically roll-over fm->next_frame_picnumber when it reaches 
+    // fm->max_picture_number.
+	fm->max_picture_number = num;
+	LOG_V("max picture number is %d\n", num);
 
 	g_mutex_unlock(fm->lock);
 
+	return MIX_RESULT_SUCCESS;
+
+}
+
+
+MIX_RESULT mix_framemanager_flush(MixFrameManager *fm) {
+
+    MixVideoFrame *frame = NULL;
+	if (!MIX_IS_FRAMEMANAGER(fm)) {
+		return MIX_RESULT_INVALID_PARAM;
+	}
+
+	if (!fm->initialized) {
+		return MIX_RESULT_NOT_INIT;
+	}
+
+	g_mutex_lock(fm->lock);
+
+	while (fm->frame_list)
+	{
+	    frame = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0);
+        fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)frame);
+        mix_videoframe_unref(frame);
+	    LOG_V("one frame is flushed\n");
+    };     
+
+	fm->eos = FALSE;
+	fm->is_first_frame = TRUE;
+	fm->next_frame_timestamp = 0;
+	fm->last_frame_timestamp = 0;
+	fm->next_frame_picnumber = 0;
+
+	g_mutex_unlock(fm->lock);
+
+	return MIX_RESULT_SUCCESS;
+}
+
+
+MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf) {
+
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+    LOG_V("Begin fm->mode = %d\n", fm->mode);
+
+	if (!mvf) {
+		return MIX_RESULT_INVALID_PARAM;
+	}
+
+	if (!MIX_IS_FRAMEMANAGER(fm)) {
+		return MIX_RESULT_INVALID_PARAM;
+	}
+
+	if (!fm->initialized) {
+		return MIX_RESULT_NOT_INIT;
+	}
+
+    gboolean discontinuity = FALSE;
+    mix_videoframe_get_discontinuity(mvf, &discontinuity);
+    if (discontinuity)
+    {
+        LOG_V("current frame has discontinuity!\n");
+        mix_framemanager_flush(fm);
+    }
+#ifdef MIX_LOG_ENABLE
+    if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER)
+    {
+        guint32 num;
+        mix_videoframe_get_displayorder(mvf, &num);
+        LOG_V("pic %d is enqueued.\n", num);
+    }
+
+    if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP)
+    {
+        guint64 ts;
+        mix_videoframe_get_timestamp(mvf, &ts);
+        LOG_V("ts %"G_GINT64_FORMAT" is enqueued.\n", ts);
+    }
+#endif
+
+	g_mutex_lock(fm->lock);	
+    fm->frame_list = g_slist_append(fm->frame_list, (gpointer)mvf);
+	g_mutex_unlock(fm->lock);
+	
+    LOG_V("End\n");
+
 	return ret;
 }
 
-#ifdef ANDROID
-gint timestamp_storage_sorting_func(gconstpointer a, gconstpointer b) {
+void mix_framemanager_update_timestamp(MixFrameManager *fm, MixVideoFrame *mvf) 
+{
+    // this function finds the lowest time stamp in the list and assign it to the dequeued video frame,
+    // if that timestamp is smaller than the timestamp of dequeued video frame.
+    int i;
+    guint64 ts, min_ts;
+    MixVideoFrame *p, *min_p;
+    int len = g_slist_length(fm->frame_list);
+    if (len == 0)
+    {
+        // nothing to update
+        return;
+    }
+    
+    // find video frame with the smallest timestamp, take rollover into account when
+    // comparing timestamp.
+    for (i = 0; i < len; i++)
+    {
+        p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
+        mix_videoframe_get_timestamp(p, &ts);
+        if (i == 0 ||
+            (ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) ||
+            (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD))        
+        {
+            min_ts = ts;
+            min_p = p;
+        }       
+    }
 
-	guint64 ta = *((guint64 *)a);
-	guint64 tb = *((guint64 *)b);
-
-	if(ta > tb) {
-		return +1;
-	} else if(ta == tb) {
-		return 0;
-	}
-	return -1;
+    mix_videoframe_get_timestamp(mvf, &ts);
+    if ((ts < min_ts && min_ts - ts < TS_ROLLOVER_THRESHOLD) ||
+        (ts > min_ts && ts - min_ts > TS_ROLLOVER_THRESHOLD)) 
+    {
+        // frame to be updated has smaller time stamp
+    }  
+    else
+    {
+        // time stamp needs to be monotonically non-decreasing so swap timestamp.
+        mix_videoframe_set_timestamp(mvf, min_ts);
+        mix_videoframe_set_timestamp(min_p, ts);
+        LOG_V("timestamp for current frame is updated from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT"\n",
+            ts, min_ts);
+    }
 }
+
+
+MIX_RESULT mix_framemanager_pictype_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
+{
+    int i, num_i_or_p;
+    MixVideoFrame *p, *first_i_or_p;
+    MixFrameType type;
+    int len = g_slist_length(fm->frame_list);
+
+    num_i_or_p = 0;
+    first_i_or_p = NULL;
+    
+    for (i = 0; i < len; i++)
+    {
+        p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
+        mix_videoframe_get_frame_type(p, &type);
+        if (type == TYPE_B)
+        {
+            // B frame has higher display priority as only one reference frame is kept in the list
+            // and it should be backward reference frame for B frame.
+            fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
+            mix_framemanager_update_timestamp(fm, p);
+            *mvf = p;
+            LOG_V("B frame is dequeued.\n");
+            return MIX_RESULT_SUCCESS;
+        }  
+        
+        if (type != TYPE_I && type != TYPE_P)
+        {
+            // this should never happen 
+            LOG_E("Frame typs is invalid!!!\n");
+            fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
+            mix_videoframe_unref(p);
+            return MIX_RESULT_FRAME_NOTAVAIL;                       
+        }
+        num_i_or_p++;
+        if (first_i_or_p == NULL)
+        {
+            first_i_or_p = p;
+        }
+    }
+
+    // if there are more than one reference frame in the list, the first one is dequeued. 
+    if (num_i_or_p > 1 || fm->eos)
+    {
+        if (first_i_or_p == NULL)
+        {
+            // this should never happen!
+            LOG_E("first_i_or_p frame is NULL!\n");
+            return MIX_RESULT_FAIL;
+        }
+        fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)first_i_or_p);
+        mix_framemanager_update_timestamp(fm, first_i_or_p);
+        *mvf = first_i_or_p;
+#ifdef MIX_LOG_ENABLE
+        mix_videoframe_get_frame_type(first_i_or_p, &type);
+        if (type == TYPE_I)
+        {
+            LOG_V("I frame is dequeued.\n");
+        }
+        else
+        {
+            LOG_V("P frame is dequeued.\n");
+        }                    
 #endif
+        return MIX_RESULT_SUCCESS;            
+    }
+    
+    return MIX_RESULT_FRAME_NOTAVAIL;   
+}
+
+MIX_RESULT mix_framemanager_timestamp_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
+{
+    int i, len;
+    MixVideoFrame *p, *p_out_of_dated;
+    guint64 ts, ts_next_pending, ts_out_of_dated;
+    guint64 tolerance = fm->frame_timestamp_delta/4;
+
+retry:    
+    // len may be changed during retry!
+    len = g_slist_length(fm->frame_list);
+    ts_next_pending = (guint64)-1; 
+    ts_out_of_dated = 0;
+    p_out_of_dated = NULL;
+    
+    
+    for (i = 0; i < len; i++)
+    {
+        p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
+        mix_videoframe_get_timestamp(p, &ts);
+        if (ts >= fm->last_frame_timestamp && 
+            ts <= fm->next_frame_timestamp + tolerance)
+        {
+            fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
+            *mvf = p;
+            mix_videoframe_get_timestamp(p, &(fm->last_frame_timestamp));
+            fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta;            
+            LOG_V("frame is dequeud, ts = %"G_GINT64_FORMAT".\n", ts);
+            return MIX_RESULT_SUCCESS;
+        }
+
+        if (ts > fm->next_frame_timestamp + tolerance &&
+            ts < ts_next_pending)
+        {
+            ts_next_pending = ts;
+        }
+        if (ts < fm->last_frame_timestamp && 
+            ts >= ts_out_of_dated)
+        {
+            // video frame that is most recently out-of-dated.
+            // this may happen in variable frame rate scenario where two adjacent frames both meet
+            // the "next frame" criteria, and the one with larger timestamp is dequeued first.
+            ts_out_of_dated = ts;
+            p_out_of_dated = p;
+        }        
+    }
+
+    if (p_out_of_dated && 
+        fm->last_frame_timestamp - ts_out_of_dated < TS_ROLLOVER_THRESHOLD)
+    {
+        fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p_out_of_dated);
+        mix_videoframe_unref(p_out_of_dated);
+        LOG_W("video frame is out of dated. ts = %"G_GINT64_FORMAT" compared to last ts =  %"G_GINT64_FORMAT".\n",
+            ts_out_of_dated, fm->last_frame_timestamp);
+        return MIX_RESULT_FRAME_NOTAVAIL;
+    }
+    
+    if (len <= fm->max_enqueue_size && fm->eos == FALSE)
+    {
+        LOG_V("no frame is dequeued, expected ts = %"G_GINT64_FORMAT", next pending ts = %"G_GINT64_FORMAT".(List size = %d)\n", 
+            fm->next_frame_timestamp, ts_next_pending, len);
+        return MIX_RESULT_FRAME_NOTAVAIL;
+    }
+
+    // timestamp has gap
+    if (ts_next_pending != -1)
+    {
+        LOG_V("timestamp has gap, jumping from %"G_GINT64_FORMAT" to %"G_GINT64_FORMAT".\n",
+                fm->next_frame_timestamp, ts_next_pending);
+                
+        fm->next_frame_timestamp = ts_next_pending;
+        goto retry;
+    }
+
+    // time stamp roll-over
+    LOG_V("time stamp is rolled over, resetting next frame timestamp from %"G_GINT64_FORMAT" to 0.\n", 
+        fm->next_frame_timestamp);
+
+    fm->next_frame_timestamp = 0;
+    fm->last_frame_timestamp = 0;
+    goto retry;
+
+    // should never run to here
+    LOG_E("Error in timestamp-based dequeue implementation!\n");
+    return MIX_RESULT_FAIL;
+}
+
+MIX_RESULT mix_framemanager_picnumber_based_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) 
+{
+    int i, len;
+    MixVideoFrame* p;
+    guint32 picnum, smallest_picnum;
+    guint32 next_picnum_pending;
+
+    len = g_slist_length(fm->frame_list);
+
+retry:    
+    next_picnum_pending = (guint32)-1;
+    smallest_picnum = (guint32)-1;
+    
+    for (i = 0; i < len; i++)
+    {
+        p = (MixVideoFrame*)g_slist_nth_data(fm->frame_list, i);
+        mix_videoframe_get_displayorder(p, &picnum);
+        if (picnum == fm->next_frame_picnumber)
+        {
+            fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)p);
+            mix_framemanager_update_timestamp(fm, p);
+            *mvf = p;           
+            LOG_V("frame is dequeued, poc = %d.\n", fm->next_frame_picnumber);
+            fm->next_frame_picnumber++;
+            //if (fm->next_frame_picnumber == fm->max_picture_number)
+            //    fm->next_frame_picnumber = 0;            
+            return MIX_RESULT_SUCCESS;
+        }
+
+        if (picnum > fm->next_frame_picnumber &&
+            picnum < next_picnum_pending)
+        {
+            next_picnum_pending = picnum;
+        }
+
+        if (picnum < fm->next_frame_picnumber &&
+            picnum < smallest_picnum)
+        {
+            smallest_picnum = picnum;
+        }
+    }
+
+    if (smallest_picnum != (guint32)-1 && fm->next_frame_picnumber - smallest_picnum < 8)
+    {
+        // the smallest value of "max_pic_order_cnt_lsb_minus4" is 16. If the distance of "next frame pic number"  
+        // to the smallest pic number  in the list is less than half of 16, it is safely to assume that pic number
+        // is reset when an new IDR is encoded. (where pic numbfer of top or bottom field must be 0, subclause 8.2.1).
+        LOG_V("next frame number is reset from %d to 0, smallest picnumber in list (size = %d) is %d.\n",
+            fm->next_frame_picnumber, len, smallest_picnum);
+        fm->next_frame_picnumber = 0;
+        goto retry;
+    }
+    
+    if (len <= fm->max_enqueue_size && fm->eos == FALSE)
+    {
+        LOG_V("No frame is dequeued. Expected POC = %d, next pending POC = %d. (List size = %d)\n", 
+                fm->next_frame_picnumber, next_picnum_pending, len);
+        return MIX_RESULT_FRAME_NOTAVAIL;
+    }
+
+    // picture number  has gap
+    if (next_picnum_pending != -1)
+    {
+        LOG_V("picture number has gap, jumping from %d to %d.\n",
+                fm->next_frame_picnumber, next_picnum_pending);
+                
+        fm->next_frame_picnumber = next_picnum_pending;
+        goto retry;
+    }
+
+    // picture number roll-over
+    LOG_V("picture number is rolled over, resetting next picnum from %d to 0.\n", 
+        fm->next_frame_picnumber);
+
+    fm->next_frame_picnumber = 0;
+    goto retry;
+
+    // should never run to here
+    LOG_E("Error in picnumber-based dequeue implementation!\n");
+    return MIX_RESULT_FAIL;
+}
 
 MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf) {
 
-	MIX_RESULT ret = MIX_RESULT_FAIL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+    LOG_V("Begin\n");
 
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
 		return MIX_RESULT_INVALID_PARAM;
@@ -1087,66 +689,99 @@
 
 	g_mutex_lock(fm->lock);
 
-	ret = MIX_RESULT_FRAME_NOTAVAIL;
-	*mvf = (MixVideoFrame *) g_queue_pop_head(fm->frame_queue);
-	if (*mvf) {
-#ifdef ANDROID
-		if(fm->timestamp_storage && fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) {
-			if(fm->timestamp_storage->len) {
-				guint64 ts;
-				g_array_sort(fm->timestamp_storage, timestamp_storage_sorting_func);
-				ts = g_array_index(fm->timestamp_storage, guint64, 0);
-				mix_videoframe_set_timestamp(*mvf, ts);
-				g_array_remove_index_fast(fm->timestamp_storage, 0);
-			}
-		}
-#endif	
-		ret = MIX_RESULT_SUCCESS;
-	} else if (fm->eos) {
-		ret = MIX_RESULT_EOS;
+	if (fm->frame_list == NULL)
+	{
+	    if (fm->eos)
+	    {
+	        LOG_V("No frame is dequeued (eos)!\n");
+	        ret = MIX_RESULT_EOS;
+        }
+        else
+        {
+            LOG_V("No frame is dequeued as queue is empty!\n");
+            ret = MIX_RESULT_FRAME_NOTAVAIL;
+        }            
+	}
+	else if (fm->is_first_frame)
+	{
+	    // dequeue the first entry in the list. Not need to update the time stamp as
+	    // the list should contain only one frame.
+#ifdef MIX_LOG_ENABLE	    
+    	if (g_slist_length(fm->frame_list) != 1)
+    	{
+    	    LOG_W("length of list is not equal to 1 for the first frame.\n");    	    
+    	}
+#endif    	
+        *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0);
+        fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf));
+
+        if (fm->mode == MIX_DISPLAY_ORDER_TIMESTAMP)
+        {            
+            mix_videoframe_get_timestamp(*mvf, &(fm->last_frame_timestamp));             
+            fm->next_frame_timestamp = fm->last_frame_timestamp + fm->frame_timestamp_delta;
+            LOG_V("The first frame is dequeued, ts = %"G_GINT64_FORMAT"\n", fm->last_frame_timestamp);
+        }
+        else if (fm->mode == MIX_DISPLAY_ORDER_PICNUMBER)
+        {            
+            mix_videoframe_get_displayorder(*mvf, &(fm->next_frame_picnumber));
+            LOG_V("The first frame is dequeued, POC = %d\n", fm->next_frame_picnumber);
+            fm->next_frame_picnumber++;
+            //if (fm->next_frame_picnumber == fm->max_picture_number)
+             //   fm->next_frame_picnumber = 0;
+        }
+        else
+        {
+#ifdef MIX_LOG_ENABLE     
+            MixFrameType type;
+            mix_videoframe_get_frame_type(*mvf, &type);
+            LOG_V("The first frame is dequeud, frame type is %d.\n", type);
+#endif            
+        }
+	    fm->is_first_frame = FALSE;
+	    
+        ret = MIX_RESULT_SUCCESS;	       
+	}
+	else
+	{
+	    // not the first frame and list is not empty
+        switch(fm->mode)
+        {
+        case MIX_DISPLAY_ORDER_TIMESTAMP:
+            ret = mix_framemanager_timestamp_based_dequeue(fm, mvf);
+            break;
+
+        case MIX_DISPLAY_ORDER_PICNUMBER:
+            ret = mix_framemanager_picnumber_based_dequeue(fm, mvf);
+            break;
+
+        case MIX_DISPLAY_ORDER_PICTYPE:
+            ret = mix_framemanager_pictype_based_dequeue(fm, mvf);
+            break;
+
+        case MIX_DISPLAY_ORDER_FIFO:        
+            *mvf = (MixVideoFrame*) g_slist_nth_data(fm->frame_list, 0);
+            fm->frame_list = g_slist_remove(fm->frame_list, (gconstpointer)(*mvf));
+            ret = MIX_RESULT_SUCCESS;          
+            LOG_V("One frame is dequeued.\n");
+            break;
+            
+        default:         
+            LOG_E("Invalid frame order mode\n");
+            ret = MIX_RESULT_FAIL;
+            break;
+	    }
 	}
 
 	g_mutex_unlock(fm->lock);
 
+    LOG_V("End\n");
+
 	return ret;
 }
 
-gint frame_sorting_func(gconstpointer a, gconstpointer b) {
-
-	MixVideoFrame *fa = *((MixVideoFrame **) a);
-	MixVideoFrame *fb = *((MixVideoFrame **) b);
-
-	guint64 ta, tb;
-
-	if (!fa && !fb) {
-		return 0;
-	}
-
-	if (fa && !fb) {
-		return 1;
-	}
-
-	if (!fa && fb) {
-		return -1;
-	}
-
-	mix_videoframe_get_timestamp(fa, &ta);
-	mix_videoframe_get_timestamp(fb, &tb);
-
-	if (ta > tb) {
-		return 1;
-	}
-
-	if (ta == tb) {
-		return 0;
-	}
-
-	return -1;
-}
-
 MIX_RESULT mix_framemanager_eos(MixFrameManager *fm) {
 
-	MIX_RESULT ret = MIX_RESULT_FAIL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
 	if (!MIX_IS_FRAMEMANAGER(fm)) {
 		return MIX_RESULT_INVALID_PARAM;
@@ -1156,52 +791,9 @@
 		return MIX_RESULT_NOT_INIT;
 	}
 
-	g_mutex_lock(fm->lock);
-	
-	if (fm->mode == MIX_FRAMEORDER_MODE_DISPLAYORDER) {
-
-		/* Do we have frames that are not in the output queue?
-		 * MixVideoFormat* must guarantee that when this 
-		 * function called, the last frame is already enqueued! 
-		 */
-
-		/* In case it is frame type based enqueue, p_frame is the
-		 * only frame that is not in the output queue
-		 */
-		if (fm->p_frame && fm->frame_queue) {
-			g_queue_push_tail(fm->frame_queue, (gpointer) fm->p_frame);
-			fm->p_frame = NULL;
-		}		
-		
-		/* In case it is timestamp based enqueue, throw all the frames
-		 * in the array into the output queue by the order of timestamp
-		 */		
-		if (fm->frame_array) {
-			guint len = fm->frame_array->len;
-			if (len) {
-#ifndef ANDROID				
-				/* sorting frames in the array by timestamp */
-				g_ptr_array_sort(fm->frame_array, frame_sorting_func);
-#else
-                /* sorting frames is the array by displayorder */  
-				g_ptr_array_sort(fm->frame_array, frame_sorting_func_DO);
-#endif
-				
-				guint idx = 0;
-				MixVideoFrame *frame = NULL;
-				for (idx = 0; idx < len; idx++) {
-					frame = (MixVideoFrame *) g_ptr_array_index(
-							fm->frame_array, idx);
-					if (frame) {
-						g_ptr_array_index(fm->frame_array, idx) = NULL;
-						g_queue_push_tail(fm->frame_queue, (gpointer) frame);
-					}
-				}
-			}
-		}
-	}
-	
+	g_mutex_lock(fm->lock);		
 	fm->eos = TRUE;
+	LOG_V("EOS is received.\n");
 	g_mutex_unlock(fm->lock);
 
 	return ret;
diff --git a/mix_video/src/mixframemanager.h b/mix_video/src/mixframemanager.h
index bde47ee..fae5948 100644
--- a/mix_video/src/mixframemanager.h
+++ b/mix_video/src/mixframemanager.h
@@ -13,6 +13,7 @@
 #include "mixvideodef.h"
 #include "mixvideoframe.h"
 
+G_BEGIN_DECLS
 /*
  * Type macros.
  */
@@ -26,6 +27,21 @@
 typedef struct _MixFrameManager MixFrameManager;
 typedef struct _MixFrameManagerClass MixFrameManagerClass;
 
+/* 
+* MIX_FRAMEORDER_MODE_DECODEORDER is here interpreted as 
+* MIX_DISPLAY_ORDER_FIFO,  a special case of display order mode. 
+*/
+typedef enum
+{
+    MIX_DISPLAY_ORDER_UNKNOWN,
+    MIX_DISPLAY_ORDER_FIFO,
+    MIX_DISPLAY_ORDER_TIMESTAMP,
+    MIX_DISPLAY_ORDER_PICNUMBER,
+    MIX_DISPLAY_ORDER_PICTYPE,
+    MIX_DISPLAY_ORDER_LAST
+} MixDisplayOrderMode;
+
+
 struct _MixFrameManager {
 	/*< public > */
 	GObject parent;
@@ -38,30 +54,20 @@
 	gboolean eos;
 
 	GMutex *lock;
-	GPtrArray *frame_array;
-	GQueue *frame_queue;
+	GSList* frame_list;
 
 	gint framerate_numerator;
 	gint framerate_denominator;
 	guint64 frame_timestamp_delta;
 
-	MixFrameOrderMode mode;
+	MixDisplayOrderMode mode;
 
 	gboolean is_first_frame;
+	guint64 last_frame_timestamp;
 	guint64 next_frame_timestamp;
-
-	/*
-	 * For VC-1 in ASF.
-	 */
-
-	MixVideoFrame *p_frame;
-	guint64 prev_timestamp;
-
-	gboolean timebased_ordering;
-#ifdef ANDROID
-	guint32 next_displayorder;
-	GArray *timestamp_storage;
-#endif	
+	guint32 next_frame_picnumber;
+	gint    max_enqueue_size;
+	guint32 max_picture_number;
 };
 
 /**
@@ -117,8 +123,8 @@
  * Initialize FM
  */
 MIX_RESULT mix_framemanager_initialize(MixFrameManager *fm,
-		MixFrameOrderMode mode, gint framerate_numerator,
-		gint framerate_denominator, gboolean timebased_ordering);
+		MixDisplayOrderMode mode, gint framerate_numerator,
+		gint framerate_denominator);
 /*
  * Deinitialize FM
  */
@@ -138,10 +144,22 @@
 
 
 /*
- * Get Frame Order Mode
+ * Set miximum size of queue
  */
-MIX_RESULT mix_framemanager_get_frame_order_mode(MixFrameManager *fm,
-													MixFrameOrderMode *mode);
+MIX_RESULT mix_framemanager_set_max_enqueue_size(MixFrameManager *fm, gint size);
+						
+
+/*
+ * Set miximum picture number
+ */
+MIX_RESULT mix_framemanager_set_max_picture_number(MixFrameManager *fm, guint32 num);
+
+
+/*
+ * Get Display Order Mode
+ */
+MIX_RESULT mix_framemanager_get_display_order_mode(MixFrameManager *fm,
+													MixDisplayOrderMode *mode);
 
 /*
  * For discontiunity, reset FM
@@ -154,7 +172,7 @@
 MIX_RESULT mix_framemanager_enqueue(MixFrameManager *fm, MixVideoFrame *mvf);
 
 /*
- * Dequeue MixVideoFrame in proper order depends on MixFrameOrderMode value
+ * Dequeue MixVideoFrame in proper order depends on MixDisplayOrderMode value
  * during initialization.
  */
 MIX_RESULT mix_framemanager_dequeue(MixFrameManager *fm, MixVideoFrame **mvf);
@@ -164,5 +182,5 @@
  */
 MIX_RESULT mix_framemanager_eos(MixFrameManager *fm);
 
-
+G_END_DECLS
 #endif /* __MIX_FRAMEMANAGER_H__ */
diff --git a/mix_video/src/mixsurfacepool.c b/mix_video/src/mixsurfacepool.c
index 0c778af..9f92ae1 100644
--- a/mix_video/src/mixsurfacepool.c
+++ b/mix_video/src/mixsurfacepool.c
@@ -42,6 +42,7 @@
 	self->free_list_max_size = 0;
 	self->free_list_cur_size = 0;
 	self->high_water_mark = 0;
+	self->initialized = FALSE;
 
 	self->reserved1 = NULL;
 	self->reserved2 = NULL;
@@ -256,6 +257,9 @@
 
 		obj->high_water_mark = 0;
 
+        /* assume it is initialized */
+        obj->initialized = TRUE;
+        
 		MIX_UNLOCK(obj->objectlock);
 
 		return MIX_RESULT_SUCCESS;
@@ -302,6 +306,8 @@
 
 	obj->high_water_mark = 0;
 
+    obj->initialized = TRUE;
+
 	MIX_UNLOCK(obj->objectlock);
 
 	LOG_V( "End\n");
@@ -536,6 +542,14 @@
 
 	MIX_LOCK(obj->objectlock);
 
+    if (obj->initialized == FALSE)
+    {
+        LOG_W("surface pool is not initialized, probably configuration data has not been received yet.\n");
+        MIX_UNLOCK(obj->objectlock);
+        return MIX_RESULT_NOT_INIT;
+    }
+
+    
 #if 0
 	if (obj->free_list == NULL) {
 #else
diff --git a/mix_video/src/mixsurfacepool.h b/mix_video/src/mixsurfacepool.h
index d475792..0639fbc 100644
--- a/mix_video/src/mixsurfacepool.h
+++ b/mix_video/src/mixsurfacepool.h
@@ -79,6 +79,7 @@
   gulong free_list_max_size;	/* initial size of the free list */
   gulong free_list_cur_size;	/* current size of the free list */
   gulong high_water_mark;	/* most surfaces in use at one time */
+  gboolean initialized;
 //  guint64 timestamp;
 
   void *reserved1;
diff --git a/mix_video/src/mixvideo.c b/mix_video/src/mixvideo.c
index 063f641..718d355 100644
--- a/mix_video/src/mixvideo.c
+++ b/mix_video/src/mixvideo.c
@@ -177,6 +177,8 @@
 
 MIX_RESULT mix_video_get_max_coded_buffer_size_default (MixVideo * mix, guint *max_size);
 
+MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix,
+	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params);
 
 static void mix_video_finalize(GObject * obj);
 MIX_RESULT mix_video_configure_decode(MixVideo * mix,
@@ -228,6 +230,7 @@
 	klass->get_mix_buffer_func = mix_video_get_mixbuffer_default;
 	klass->release_mix_buffer_func = mix_video_release_mixbuffer_default;
 	klass->get_max_coded_buffer_size_func = mix_video_get_max_coded_buffer_size_default;
+	klass->set_dynamic_enc_config_func = mix_video_set_dynamic_enc_config_default;
 }
 
 MixVideo *mix_video_new(void) {
@@ -529,6 +532,7 @@
 	guint bufpoolsize = 0;
 
 	MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DISPLAYORDER;
+	MixDisplayOrderMode display_order_mode = MIX_DISPLAY_ORDER_UNKNOWN;
 
 	LOG_V( "Begin\n");
 
@@ -626,18 +630,28 @@
 		goto cleanup;
 	}
 
-	/* initialize frame manager */
-
-	if (mix_strcmp(mime_type, "video/x-wmv") == 0 || mix_strcmp(mime_type,
-			"video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-divx") == 0
-                        || mix_strcmp(mime_type, "video/x-h263") == 0) {
-		ret = mix_framemanager_initialize(priv->frame_manager,
-				frame_order_mode, fps_n, fps_d, FALSE);
-	} else {
-		ret = mix_framemanager_initialize(priv->frame_manager,
-				frame_order_mode, fps_n, fps_d, TRUE);
+	if (frame_order_mode == MIX_FRAMEORDER_MODE_DECODEORDER)
+	{
+    	display_order_mode = MIX_DISPLAY_ORDER_FIFO;	
+    }
+	else if (mix_strcmp(mime_type, "video/x-wmv")  == 0 || 
+            mix_strcmp(mime_type, "video/mpeg")   == 0 ||
+            mix_strcmp(mime_type, "video/x-divx") == 0 || 
+            mix_strcmp(mime_type, "video/x-h263") == 0 ||
+            mix_strcmp(mime_type, "video/x-xvid") == 0 ) 
+    {
+        display_order_mode = MIX_DISPLAY_ORDER_PICTYPE;           
+	} 
+	else 
+	{
+        //display_order_mode = MIX_DISPLAY_ORDER_TIMESTAMP;           
+        display_order_mode = MIX_DISPLAY_ORDER_PICNUMBER;           
 	}
 
+	/* initialize frame manager */
+    ret = mix_framemanager_initialize(priv->frame_manager,
+            display_order_mode, fps_n, fps_d);
+
 	if (ret != MIX_RESULT_SUCCESS) {
 		LOG_E("Failed to initialize frame manager\n");
 		goto cleanup;
@@ -688,13 +702,16 @@
 
 		priv->video_format = MIX_VIDEOFORMAT(video_format);
 
-	} else if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type,
-			"video/x-divx") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) {
+	} else if (mix_strcmp(mime_type, "video/mpeg")   == 0 || 
+                   mix_strcmp(mime_type, "video/x-divx") == 0 || 
+                   mix_strcmp(mime_type, "video/x-h263") == 0 ||
+                   mix_strcmp(mime_type, "video/x-xvid") == 0) {
 
 		guint version = 0;
 
 		/* Is this mpeg4:2 ? */
-		if (mix_strcmp(mime_type, "video/mpeg") == 0 || mix_strcmp(mime_type, "video/x-h263") == 0 ) {
+		if (mix_strcmp(mime_type, "video/mpeg") == 0 || 
+                    mix_strcmp(mime_type, "video/x-h263") == 0 ) {
 
 			/*
 			 *  we don't support mpeg other than mpeg verion 4
@@ -722,6 +739,7 @@
 
 			/* config_param shall be MixVideoConfigParamsDecMP42 */
 			if (!MIX_IS_VIDEOCONFIGPARAMSDEC_MP42(priv_config_params_dec)) {
+                                LOG_E("MIX_IS_VIDEOCONFIGPARAMSDEC_MP42 failed.\n");
 				ret = MIX_RESULT_NOT_SUPPORTED;
 				goto cleanup;
 			}
@@ -736,6 +754,7 @@
 
 			/* if it is not divx 4 or 5 */
 			if (version != 4 && version != 5) {
+                                LOG_E("Invalid divx version.\n");
 				ret = MIX_RESULT_NOT_SUPPORTED;
 				goto cleanup;
 			}
@@ -811,9 +830,6 @@
 	MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264;
 	guint bufpoolsize = 0;
 
-	MixFrameOrderMode frame_order_mode = MIX_FRAMEORDER_MODE_DECODEORDER;
-
-
 	LOG_V( "Begin\n");
 
 	CHECK_INIT(mix, priv);
@@ -890,8 +906,8 @@
 
 	/* initialize frame manager */
 	/* frame rate can be any value for encoding. */
-	ret = mix_framemanager_initialize(priv->frame_manager, frame_order_mode,
-			1, 1, FALSE);
+	ret = mix_framemanager_initialize(priv->frame_manager, MIX_DISPLAY_ORDER_FIFO,
+			1, 1);
 
 	if (ret != MIX_RESULT_SUCCESS) {
 		LOG_E("Failed to initialize frame manager\n");
@@ -1052,7 +1068,7 @@
 MIX_RESULT mix_video_get_config_default(MixVideo * mix,
 		MixVideoConfigParams ** config_params) {
 
-	MIX_RESULT ret = MIX_RESULT_FAIL;
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
 	MixVideoPrivate *priv = NULL;
 
 	CHECK_INIT_CONFIG(mix, priv);
@@ -1564,6 +1580,293 @@
 	return ret;
 }
 
+
+MIX_RESULT mix_video_set_dynamic_enc_config_default (MixVideo * mix,  
+	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params)
+{
+	MIX_RESULT ret = MIX_RESULT_FAIL;
+	MixVideoPrivate *priv = NULL;
+
+	LOG_V( "Begin\n");
+
+	CHECK_INIT_CONFIG(mix, priv);
+
+	if (dynamic_params == NULL) {
+		LOG_E(
+			"dynamic_params == NULL\n");
+		return MIX_RESULT_FAIL;
+	}
+
+	MixVideoConfigParamsEnc *priv_config_params_enc = NULL;
+	if (priv->config_params) {
+		/*
+		 * FIXME: It would be better to use ref/unref
+		 */
+		priv_config_params_enc = (MixVideoConfigParamsEnc *)priv->config_params;
+		//priv_config_params_enc = mix_videoconfigparamsenc_ref (priv->config_params);
+	}
+	else {
+		LOG_E(
+			"priv->config_params is invalid\n");
+		return MIX_RESULT_FAIL;		
+	}
+
+	g_mutex_lock(priv->objlock);
+
+	switch (params_type) {
+		case MIX_ENC_PARAMS_BITRATE:
+		{
+			ret = mix_videoconfigparamsenc_set_bit_rate (priv_config_params_enc, dynamic_params->bitrate);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_bit_rate\n");
+				goto cleanup;
+			}				
+		}
+			break;
+		case MIX_ENC_PARAMS_SLICE_SIZE:
+		{
+			/*
+			*/
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 = 
+				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
+
+			ret = mix_videoconfigparamsenc_h264_set_slice_num (config_params_enc_h264, dynamic_params->slice_num);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_h264_set_slice_num\n");
+				goto cleanup;
+			}				
+		}
+			break;
+			
+		case MIX_ENC_PARAMS_IDR_INTERVAL:
+		{
+			MixVideoConfigParamsEncH264 * config_params_enc_h264 = 
+				MIX_VIDEOCONFIGPARAMSENC_H264 (priv->config_params);
+
+			ret = mix_videoconfigparamsenc_h264_set_IDR_interval(config_params_enc_h264, dynamic_params->idr_interval);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_h264_set_IDR_interval\n");
+				goto cleanup;
+			}				
+		}
+			break;
+
+		case MIX_ENC_PARAMS_RC_MODE:			
+		case MIX_ENC_PARAMS_RESOLUTION:
+		{
+			/*
+			 * Step 1: Release videofmtenc Object
+			 */
+			if (priv->video_format_enc) {
+				mix_videofmtenc_deinitialize(priv->video_format_enc);
+			}
+			
+			MIXUNREF(priv->video_format_enc, mix_videoformatenc_unref)	
+
+			//priv->alloc_surface_cnt = 0; //Surfaces are also released, we need to set alloc_surface_cnt to 0
+
+			/*
+			* Please note there maybe issue here for usrptr shared buffer mode
+			*/
+
+			/*
+			 * Step 2: Change configuration parameters (frame size)
+			 */			
+
+			if (params_type == MIX_ENC_PARAMS_RESOLUTION) {
+				ret = mix_videoconfigparamsenc_set_picture_res (priv_config_params_enc, dynamic_params->width, dynamic_params->height);
+				if (ret != MIX_RESULT_SUCCESS) {
+					LOG_E("Failed mix_videoconfigparamsenc_set_picture_res\n");
+					goto cleanup;
+				}			
+			}
+			else if (params_type == MIX_ENC_PARAMS_RC_MODE) {
+				ret = mix_videoconfigparamsenc_set_rate_control(priv_config_params_enc, dynamic_params->rc_mode);
+				if (ret != MIX_RESULT_SUCCESS) {
+					LOG_E("Failed mix_videoconfigparamsenc_set_rate_control\n");
+					goto cleanup;
+				}					
+			}
+
+
+			/*
+			 * Step 3: Renew mixvideofmtenc object
+			 */	
+
+			MixEncodeTargetFormat encode_format = MIX_ENCODE_TARGET_FORMAT_H264;
+			
+			ret = mix_videoconfigparamsenc_get_encode_format(priv_config_params_enc,
+				&encode_format);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed to get target format\n");
+				goto cleanup;
+			}
+
+			if (encode_format == MIX_ENCODE_TARGET_FORMAT_H264
+				&& MIX_IS_VIDEOCONFIGPARAMSENC_H264(priv_config_params_enc)) {
+
+				MixVideoFormatEnc_H264 *video_format_enc =
+					mix_videoformatenc_h264_new();
+
+				if (!video_format_enc) {
+					ret = MIX_RESULT_NO_MEMORY;
+					LOG_E("mix_video_configure_encode: Failed to create h264 video enc format\n");
+					goto cleanup;
+				}
+
+				/* work specific to h264 encode */
+
+				priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+			}
+			else if (encode_format == MIX_ENCODE_TARGET_FORMAT_MPEG4
+				&& MIX_IS_VIDEOCONFIGPARAMSENC_MPEG4(priv_config_params_enc)) {
+
+				MixVideoFormatEnc_MPEG4 *video_format_enc = mix_videoformatenc_mpeg4_new();
+				if (!video_format_enc) {
+					ret = MIX_RESULT_NO_MEMORY;
+					LOG_E("mix_video_configure_encode: Failed to create mpeg-4:2 video format\n");
+					goto cleanup;
+				}
+
+				/* work specific to mpeg4 */
+
+				priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+			}
+        
+        		else if (encode_format == MIX_ENCODE_TARGET_FORMAT_H263
+				&& MIX_IS_VIDEOCONFIGPARAMSENC_H263(priv_config_params_enc)) {
+
+				MixVideoFormatEnc_H263 *video_format_enc = mix_videoformatenc_h263_new();
+				if (!video_format_enc) {
+					ret = MIX_RESULT_NO_MEMORY;
+					LOG_E("mix_video_configure_encode: Failed to create h.263 video format\n");
+					goto cleanup;
+				}
+
+				/* work specific to h.263 */
+
+				priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+			}
+
+			else if (encode_format == MIX_ENCODE_TARGET_FORMAT_PREVIEW
+				&& MIX_IS_VIDEOCONFIGPARAMSENC_PREVIEW(priv_config_params_enc)) {
+
+        			MixVideoFormatEnc_Preview *video_format_enc = mix_videoformatenc_preview_new();
+        			if (!video_format_enc) {
+			            	ret = MIX_RESULT_NO_MEMORY;
+			            	LOG_E( "mix_video_configure_encode: Failed to create preview video format\n");
+					goto cleanup;
+		       	 }
+
+				priv->video_format_enc = MIX_VIDEOFORMATENC(video_format_enc);
+
+			}
+			else {
+
+				/*unsupported format */
+				ret = MIX_RESULT_NOT_SUPPORTED;
+				LOG_E("Unknown format, we can't handle it\n");
+				goto cleanup;
+			}
+
+
+			/*
+			 * Step 4: Re-initialize and start a new encode session, of course with new resolution value
+			 */				
+
+			/* 
+			  * Initialize MixVideoEncFormat 
+			  */
+
+			/*
+			* If we are using usrptr shared buffer mode, alloc_surfaces/usrptr/alloc_surface_cnt
+			* will be re-requested by v4l2camsrc, how to differetiate old surface pools and new one
+			* is a problem.
+			*/
+
+			/*
+			* priv->alloc_surface_cnt already been reset to 0 after calling mix_videofmtenc_initialize
+			* For dynamic frame size change, upstream element need to re-call buffer allocation method
+			* and priv->alloc_surface_cnt will get a new value.
+			*/
+			//priv->alloc_surface_cnt = 5;
+			ret = mix_videofmtenc_initialize(priv->video_format_enc,
+            			priv_config_params_enc, priv->frame_manager, NULL, &priv->surface_pool,
+            			priv->va_display/*, priv->alloc_surfaces, priv->usrptr, priv->alloc_surface_cnt*/);
+
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed initialize video format\n");
+				goto cleanup;
+			}				
+
+			mix_surfacepool_ref(priv->surface_pool);
+			
+			
+		}
+			break;
+		case MIX_ENC_PARAMS_GOP_SIZE:
+		{
+			ret = mix_videoconfigparamsenc_set_intra_period (priv_config_params_enc, dynamic_params->intra_period);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_intra_period\n");
+				goto cleanup;
+			}						
+
+		}
+			break;
+		case MIX_ENC_PARAMS_FRAME_RATE:
+		{
+			ret = mix_videoconfigparamsenc_set_frame_rate (priv_config_params_enc, dynamic_params->frame_rate_num, dynamic_params->frame_rate_denom);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_frame_rate\n");
+				goto cleanup;
+			}				
+		}
+			break;
+		case MIX_ENC_PARAMS_FORCE_KEY_FRAME:
+		{
+			/*
+			 * nothing to be done now.
+			 */
+		}
+			break;
+		case MIX_ENC_PARAMS_QP:
+		{
+			ret = mix_videoconfigparamsenc_set_init_qp (priv_config_params_enc, dynamic_params->QP);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_init_qp\n");
+				goto cleanup;
+			}				
+		}
+			break;
+		case MIX_ENC_PARAMS_CIR_FRAME_CNT:
+		{
+			ret = mix_videoconfigparamsenc_set_CIR_frame_cnt (priv_config_params_enc, dynamic_params->CIR_frame_cnt);
+			if (ret != MIX_RESULT_SUCCESS) {
+				LOG_E("Failed mix_videoconfigparamsenc_set_CIR_frame_cnt\n");
+				goto cleanup;
+			}				
+			
+		}
+			break;
+			
+		default:
+			break;
+	}
+
+	ret = mix_videofmtenc_set_dynamic_enc_config (priv->video_format_enc, priv_config_params_enc, params_type);
+
+cleanup:	
+
+	g_mutex_unlock(priv->objlock);
+
+	LOG_V( "End ret = 0x%x\n", ret);
+
+	return ret;	
+}
 /*
  * API functions
  */
@@ -1770,3 +2073,14 @@
 	}
 	return MIX_RESULT_NOTIMPL;
 }
+
+MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, 
+	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params)
+{
+       MixVideoClass *klass = MIX_VIDEO_GET_CLASS(mix);
+	if (klass->set_dynamic_enc_config_func) {
+		return klass->set_dynamic_enc_config_func(mix, params_type, dynamic_params);
+	}	   
+	return MIX_RESULT_NOTIMPL;	
+
+}
diff --git a/mix_video/src/mixvideo.h b/mix_video/src/mixvideo.h
index 1184d05..f8e4828 100644
--- a/mix_video/src/mixvideo.h
+++ b/mix_video/src/mixvideo.h
@@ -21,6 +21,8 @@
 #include "mixvideocaps.h"
 #include "mixbuffer.h"
 
+G_BEGIN_DECLS
+
 /*
  * Type macros.
  */
@@ -83,6 +85,9 @@
 typedef MIX_RESULT (*MixVideoGetMaxCodedBufferSizeFunc) (MixVideo * mix,
 	      guint *max_size);
 
+
+typedef MIX_RESULT (*MixVideoSetDynamicEncConfigFunc) (MixVideo * mix,
+	      MixEncParamsType params_type, MixEncDynamicParams * dynamic_params);
 /**
  * MixVideo:
  * @parent: Parent object.
@@ -123,6 +128,7 @@
 	MixVideoGetMixBufferFunc get_mix_buffer_func;
 	MixVideoReleaseMixBufferFunc release_mix_buffer_func;
 	MixVideoGetMaxCodedBufferSizeFunc get_max_coded_buffer_size_func;
+	MixVideoSetDynamicEncConfigFunc set_dynamic_enc_config_func;
 };
 
 /**
@@ -542,4 +548,24 @@
  */
 MIX_RESULT mix_video_get_max_coded_buffer_size(MixVideo * mix, guint *bufsize);
 
+
+/**
+ * mix_video_set_dynamic_enc_config:
+ * @mix: #MixVideo object.
+ * @params_type: Dynamic encoder configuration type
+ * @dynamic_params: Point to dynamic control data structure which includes the new value to be changed to
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ * 
+ * <para>
+ * This function can be used to change the encoder parameters at run-time
+ * </para>
+ * <para>
+ * Usually this function is after the encoding session is started.
+ * </para>
+ */
+MIX_RESULT mix_video_set_dynamic_enc_config (MixVideo * mix, 
+	MixEncParamsType params_type, MixEncDynamicParams * dynamic_params);
+
+G_END_DECLS
+
 #endif /* __MIX_VIDEO_H__ */
diff --git a/mix_video/src/mixvideo_private.h b/mix_video/src/mixvideo_private.h
index 806d249..ffa403f 100644
--- a/mix_video/src/mixvideo_private.h
+++ b/mix_video/src/mixvideo_private.h
@@ -9,6 +9,7 @@
 #ifndef __MIX_VIDEO_PRIVATE_H__
 #define __MIX_VIDEO_PRIVATE_H__
 
+G_BEGIN_DECLS
 
 typedef struct _MixVideoPrivate MixVideoPrivate;
 
@@ -53,5 +54,6 @@
 void mix_video_private_initialize(MixVideoPrivate* priv);
 void mix_video_private_cleanup(MixVideoPrivate* priv);
 
+G_END_DECLS
 
 #endif /* __MIX_VIDEO_PRIVATE_H__ */
diff --git a/mix_video/src/mixvideocaps.h b/mix_video/src/mixvideocaps.h
index 6630c19..ff50647 100644
--- a/mix_video/src/mixvideocaps.h
+++ b/mix_video/src/mixvideocaps.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCAPS:
 * 
@@ -134,4 +136,6 @@
 MIX_RESULT mix_videocaps_get_video_hw_caps (MixVideoCaps * obj,
 					    gchar ** video_hw_caps);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCAPS_H__ */
diff --git a/mix_video/src/mixvideoconfigparams.h b/mix_video/src/mixvideoconfigparams.h
index acfa595..d3939af 100644
--- a/mix_video/src/mixvideoconfigparams.h
+++ b/mix_video/src/mixvideoconfigparams.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOCONFIGPARAMS:
  *
@@ -124,4 +126,6 @@
 
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMS_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec.c b/mix_video/src/mixvideoconfigparamsdec.c
index 5491f00..7ad334f 100644
--- a/mix_video/src/mixvideoconfigparamsdec.c
+++ b/mix_video/src/mixvideoconfigparamsdec.c
@@ -156,6 +156,7 @@
 
 		/* copy properties of primitive type */
 
+		this_target->frame_order_mode = this_src->frame_order_mode;
 		this_target->frame_rate_num = this_src->frame_rate_num;
 		this_target->frame_rate_denom = this_src->frame_rate_denom;
 		this_target->picture_width = this_src->picture_width;
@@ -332,6 +333,7 @@
 		MixVideoConfigParamsDec * obj, MixFrameOrderMode frame_order_mode) {
 	MIX_VIDEOCONFIGPARAMSDEC_SETTER_CHECK_INPUT (obj);
 	obj->frame_order_mode = frame_order_mode;
+        LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode);
 	return MIX_RESULT_SUCCESS;
 }
 
@@ -339,6 +341,7 @@
 		MixVideoConfigParamsDec * obj, MixFrameOrderMode * frame_order_mode) {
 	MIX_VIDEOCONFIGPARAMSDEC_GETTER_CHECK_INPUT (obj, frame_order_mode);
 	*frame_order_mode = obj->frame_order_mode;
+        LOG_V("obj->frame_order_mode = %d", obj->frame_order_mode);
 	return MIX_RESULT_SUCCESS;
 }
 
diff --git a/mix_video/src/mixvideoconfigparamsdec.h b/mix_video/src/mixvideoconfigparamsdec.h
index fba4b78..809eb1e 100644
--- a/mix_video/src/mixvideoconfigparamsdec.h
+++ b/mix_video/src/mixvideoconfigparamsdec.h
@@ -12,6 +12,8 @@
 #include <mixvideoconfigparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOCONFIGPARAMSDEC:
  *
@@ -375,4 +377,6 @@
 
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_h264.h b/mix_video/src/mixvideoconfigparamsdec_h264.h
index f1d6e16..8a99313 100644
--- a/mix_video/src/mixvideoconfigparamsdec_h264.h
+++ b/mix_video/src/mixvideoconfigparamsdec_h264.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsdec.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSDEC_H264:
 * 
@@ -135,4 +137,6 @@
 
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSDEC_H264_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_mp42.h b/mix_video/src/mixvideoconfigparamsdec_mp42.h
index 3ac3b8c..6e3d84c 100644
--- a/mix_video/src/mixvideoconfigparamsdec_mp42.h
+++ b/mix_video/src/mixvideoconfigparamsdec_mp42.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsdec.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOCONFIGPARAMSDEC_MP42:
  *
@@ -182,4 +184,6 @@
 MIX_RESULT mix_videoconfigparamsdec_mp42_get_divxversion(
 		MixVideoConfigParamsDecMP42 *obj, guint *version);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSDEC_MP42_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsdec_vc1.h b/mix_video/src/mixvideoconfigparamsdec_vc1.h
index 9d0744b..d38f975 100644
--- a/mix_video/src/mixvideoconfigparamsdec_vc1.h
+++ b/mix_video/src/mixvideoconfigparamsdec_vc1.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsdec.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSDEC_VC1:
 * 
@@ -142,4 +144,6 @@
 
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSDECDEC_VC1_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsenc.c b/mix_video/src/mixvideoconfigparamsenc.c
index 28cd288..040b612 100644
--- a/mix_video/src/mixvideoconfigparamsenc.c
+++ b/mix_video/src/mixvideoconfigparamsenc.c
@@ -60,6 +60,9 @@
 	self->rate_control = MIX_RATE_CONTROL_NONE;
 	self->raw_format = MIX_RAW_TARGET_FORMAT_YUV420;
 	self->profile = MIX_PROFILE_H264BASELINE;	
+	self->level = 30;
+
+	self->CIR_frame_cnt = 15;
 
 	/* TODO: initialize other properties */
 	self->reserved1 = NULL;
@@ -180,6 +183,8 @@
 	       this_target->rate_control = this_src->rate_control;
 	       this_target->raw_format = this_src->raw_format;
 	       this_target->profile = this_src->profile;		
+	       this_target->level = this_src->level;			   
+	       this_target->CIR_frame_cnt = this_src->CIR_frame_cnt;	
 		
 		/* copy properties of non-primitive */
 
@@ -317,7 +322,13 @@
 	      if (this_first->profile != this_second->profile) {
 		  	goto not_equal;
 		}	  	
+	      if (this_first->level != this_second->level) {
+		  	goto not_equal;
+		}		
 
+	      if (this_first->CIR_frame_cnt != this_second->CIR_frame_cnt) {
+		  	goto not_equal;
+		}	
 		/* check the equalitiy of the none-primitive type properties */
 
 		/* compare mime_type */
@@ -687,3 +698,32 @@
 	return MIX_RESULT_SUCCESS;			
 }
 
+MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, 
+		guint8 level) {
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->level = level;
+	return MIX_RESULT_SUCCESS;			
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, 
+		guint8 * level) {
+	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, level);
+	*level = obj->level;
+	return MIX_RESULT_SUCCESS;			
+}
+
+
+MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+		guint CIR_frame_cnt) {
+	MIX_VIDEOCONFIGPARAMSENC_SETTER_CHECK_INPUT (obj);
+	obj->CIR_frame_cnt = CIR_frame_cnt;
+	return MIX_RESULT_SUCCESS;			
+}
+
+MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+		guint * CIR_frame_cnt) {
+	MIX_VIDEOCONFIGPARAMSENC_GETTER_CHECK_INPUT (obj, CIR_frame_cnt);
+	*CIR_frame_cnt = obj->CIR_frame_cnt;
+	return MIX_RESULT_SUCCESS;			
+}
+
diff --git a/mix_video/src/mixvideoconfigparamsenc.h b/mix_video/src/mixvideoconfigparamsenc.h
index be7ec9f..adb5f25 100644
--- a/mix_video/src/mixvideoconfigparamsenc.h
+++ b/mix_video/src/mixvideoconfigparamsenc.h
@@ -12,6 +12,8 @@
 #include <mixvideoconfigparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOCONFIGPARAMSENC:
  *
@@ -75,6 +77,8 @@
 	/* Encoding profile */
 	MixProfile profile;
 
+	guint8 level;
+
 	/* Raw format to be encoded */
 	MixRawTargetFormat raw_format;
 
@@ -123,6 +127,8 @@
 	/* Size of the array ci_frame_id */
 	guint	ci_frame_num;
 	
+	guint 	CIR_frame_cnt;
+	
 	/* < private > */
 	gulong draw;
 	
@@ -565,7 +571,58 @@
 MIX_RESULT mix_videoconfigparamsenc_get_profile (MixVideoConfigParamsEnc * obj,
 		MixProfile * profile);
 
+
+/**
+ * mix_videoconfigparamsenc_set_level:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @level: Encoding level  
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set Encoding level 
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_level (MixVideoConfigParamsEnc * obj, 
+		guint8 level);
+
+
+/**
+ * mix_videoconfigparamsenc_get_level:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @level: Encoding level to be returned  
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get Encoding level 
+ */
+
+MIX_RESULT mix_videoconfigparamsenc_get_level (MixVideoConfigParamsEnc * obj, 
+		guint8 * level);
+
+
+/**
+ * mix_videoconfigparamsenc_set_CIR_frame_cnt:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @CIR_frame_cnt: Encoding CIR frame count  
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set Encoding CIR frame count
+ */
+MIX_RESULT mix_videoconfigparamsenc_set_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+		guint CIR_frame_cnt);
+
+/**
+ * mix_videoconfigparamsenc_set_CIR_frame_cnt:
+ * @obj: #MixVideoConfigParamsEnc object
+ * @CIR_frame_cnt: Encoding CIR frame count to be returned  
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get Encoding CIR frame count 
+ */
+
+MIX_RESULT mix_videoconfigparamsenc_get_CIR_frame_cnt (MixVideoConfigParamsEnc * obj, 
+		guint * CIR_frame_cnt);
+
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSENC_H__ */
 
diff --git a/mix_video/src/mixvideoconfigparamsenc_h263.h b/mix_video/src/mixvideoconfigparamsenc_h263.h
index 8355207..097041c 100644
--- a/mix_video/src/mixvideoconfigparamsenc_h263.h
+++ b/mix_video/src/mixvideoconfigparamsenc_h263.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsenc.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSENC_H263:
 * 
@@ -185,4 +187,6 @@
 MIX_RESULT mix_videoconfigparamsenc_h263_get_slice_num (MixVideoConfigParamsEncH263 * obj,
 		guint * slice_num);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSENC_H263_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.c b/mix_video/src/mixvideoconfigparamsenc_h264.c
index 69b6b0c..620093d 100644
--- a/mix_video/src/mixvideoconfigparamsenc_h264.c
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.c
@@ -56,6 +56,7 @@
   self->disable_deblocking_filter_idc = 0;
 
   self->delimiter_type = MIX_DELIMITER_LENGTHPREFIX;
+  self->idr_interval = 2;
 
   self->reserved1 = NULL;
   self->reserved2 = NULL;
@@ -170,6 +171,8 @@
       this_target->slice_num = this_src->slice_num;
       this_target->disable_deblocking_filter_idc = this_src->disable_deblocking_filter_idc;
       this_target->delimiter_type = this_src->delimiter_type;
+      this_target->idr_interval = this_src->idr_interval;	  
+
 	  
 
       // Now chainup base class
@@ -225,7 +228,11 @@
       if (this_first->delimiter_type != this_second->delimiter_type) {
 	  	goto not_equal;
 	}  	  
-	  
+
+      if (this_first->idr_interval != this_second->idr_interval) {
+	  	goto not_equal;
+	}  	  
+	  	  
 
 	ret = TRUE;
 
@@ -321,3 +328,17 @@
 	*delimiter_type = obj->delimiter_type;
 	return MIX_RESULT_SUCCESS;
 }
+
+MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj,
+		guint idr_interval) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_SETTER_CHECK_INPUT (obj);
+	obj->idr_interval = idr_interval;
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj,
+		guint * idr_interval) {
+	MIX_VIDEOCONFIGPARAMSENC_H264_GETTER_CHECK_INPUT (obj, idr_interval);
+	*idr_interval = obj->idr_interval;
+	return MIX_RESULT_SUCCESS;
+}
diff --git a/mix_video/src/mixvideoconfigparamsenc_h264.h b/mix_video/src/mixvideoconfigparamsenc_h264.h
index b1334aa..1885846 100644
--- a/mix_video/src/mixvideoconfigparamsenc_h264.h
+++ b/mix_video/src/mixvideoconfigparamsenc_h264.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsenc.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSENC_H264:
 * 
@@ -83,6 +85,8 @@
 
   /* delimiter_type */
   MixDelimiterType delimiter_type;
+
+  guint idr_interval;
   
   /* Reserved for future use */  
   void *reserved1;
@@ -236,5 +240,32 @@
 MIX_RESULT mix_videoconfigparamsenc_h264_get_delimiter_type (MixVideoConfigParamsEncH264 * obj,
 		MixDelimiterType * delimiter_type);
 
+
+/**
+ * mix_videoconfigparamsenc_h264_set_IDR_interval:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @idr_interval: IDR interval
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Set IDR interval
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_set_IDR_interval (MixVideoConfigParamsEncH264 * obj,
+		guint idr_interval);
+
+
+/**
+ * mix_videoconfigparamsenc_h264_get_IDR_interval:
+ * @obj: #MixVideoConfigParamsEncH264 object
+ * @idr_interval: IDR interval to be returned
+ * @returns: <link linkend="MixVideo-mixvideodef">Common Video Error Return Codes</link>
+ *
+ * Get IDR interval
+ */
+MIX_RESULT mix_videoconfigparamsenc_h264_get_IDR_interval (MixVideoConfigParamsEncH264 * obj,
+		guint * idr_interval);
+
+
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSENC_H264_H__ */
 
diff --git a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h
index 123a6ae..7ff32bc 100644
--- a/mix_video/src/mixvideoconfigparamsenc_mpeg4.h
+++ b/mix_video/src/mixvideoconfigparamsenc_mpeg4.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsenc.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSENC_MPEG4:
 * 
@@ -220,4 +222,6 @@
 MIX_RESULT mix_videoconfigparamsenc_mpeg4_get_fixed_vti (MixVideoConfigParamsEncMPEG4 * obj,
 		guint * fixed_vop_time_increment);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSENC_MPEG4_H__ */
diff --git a/mix_video/src/mixvideoconfigparamsenc_preview.h b/mix_video/src/mixvideoconfigparamsenc_preview.h
index 8e57952..ddfe075 100644
--- a/mix_video/src/mixvideoconfigparamsenc_preview.h
+++ b/mix_video/src/mixvideoconfigparamsenc_preview.h
@@ -12,6 +12,8 @@
 #include "mixvideoconfigparamsenc.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
 * MIX_TYPE_VIDEOCONFIGPARAMSENC_PREVIEW:
 * 
@@ -116,5 +118,7 @@
 */
 #define mix_videoconfigparamsenc_preview_unref(obj) mix_params_unref(MIX_PARAMS(obj))
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOCONFIGPARAMSENC_PREVIEW_H__ */
 
diff --git a/mix_video/src/mixvideodecodeparams.h b/mix_video/src/mixvideodecodeparams.h
index 00cfa98..dfd614b 100644
--- a/mix_video/src/mixvideodecodeparams.h
+++ b/mix_video/src/mixvideodecodeparams.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEODECODEPARAMS:
  *
@@ -185,4 +187,6 @@
 MIX_RESULT mix_videodecodeparams_get_discontinuity(MixVideoDecodeParams * obj,
 		gboolean *discontinuity);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEODECODEPARAMS_H__ */
diff --git a/mix_video/src/mixvideodef.h b/mix_video/src/mixvideodef.h
index d3adcd8..9f8651a 100644
--- a/mix_video/src/mixvideodef.h
+++ b/mix_video/src/mixvideodef.h
@@ -29,8 +29,11 @@
 
 #ifndef __MIX_VIDEO_DEF_H__
 #define __MIX_VIDEO_DEF_H__
+
 #include <mixresult.h>
 
+G_BEGIN_DECLS
+
 /*
  * MI-X video error code
  */
@@ -132,4 +135,39 @@
 } MixDelimiterType;
 
 
+typedef enum {
+	MIX_ENC_PARAMS_START_UNUSED = 0x01000000,
+	MIX_ENC_PARAMS_BITRATE,
+	MIX_ENC_PARAMS_SLICE_SIZE,
+	MIX_ENC_PARAMS_RESOLUTION,
+	MIX_ENC_PARAMS_GOP_SIZE,
+	MIX_ENC_PARAMS_FRAME_RATE,
+	MIX_ENC_PARAMS_FORCE_KEY_FRAME,
+	MIX_ENC_PARAMS_IDR_INTERVAL,
+	MIX_ENC_PARAMS_RC_MODE,
+	MIX_ENC_PARAMS_MAX_ENCODED_SLICE_SIZE,
+	MIX_ENC_PARAMS_QP,
+	MIX_ENC_PARAMS_CIR_FRAME_CNT,
+	MIX_ENC_PARAMS_LAST
+} MixEncParamsType;
+
+typedef struct _MixEncDynamicParams {
+	guint bitrate;
+	guint slice_num;
+	guint width;
+	guint height;
+	guint frame_rate_num;
+	guint frame_rate_denom;	
+	guint intra_period;
+	guint idr_interval;	
+	guint QP;
+	guint CIR_frame_cnt;
+	guint max_slice_size;
+	gboolean force_idr;
+	MixRateControl rc_mode;
+	
+} MixEncDynamicParams;
+
+G_END_DECLS
+
 #endif /*  __MIX_VIDEO_DEF_H__ */
diff --git a/mix_video/src/mixvideoencodeparams.h b/mix_video/src/mixvideoencodeparams.h
index 54804dd..ac8e6c2 100644
--- a/mix_video/src/mixvideoencodeparams.h
+++ b/mix_video/src/mixvideoencodeparams.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOENCODEPARAMS:
  *
@@ -147,5 +149,7 @@
 MIX_RESULT mix_videoencodeparams_get_discontinuity(MixVideoEncodeParams * obj,
 		gboolean *discontinuity);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOENCODEPARAMS_H__ */
 
diff --git a/mix_video/src/mixvideoformat.c b/mix_video/src/mixvideoformat.c
index f446651..c78423f 100644
--- a/mix_video/src/mixvideoformat.c
+++ b/mix_video/src/mixvideoformat.c
@@ -41,23 +41,27 @@
 	/* These are all public because MixVideoFormat objects are completely internal to MixVideo,
 		no need for private members  */
 
-	self->initialized = FALSE;
-	self->framemgr = NULL;
-	self->surfacepool = NULL;
-	self->inputbufpool = NULL;
-	self->inputbufqueue = NULL;
-	self->va_display = NULL;
-	self->va_context = VA_INVALID_ID;
-	self->va_config = VA_INVALID_ID;
-	self->va_surfaces = NULL;
-	self->va_num_surfaces = 0;
-	self->mime_type = NULL;
-	self->frame_rate_num = 0;
-	self->frame_rate_denom = 0;
-	self->picture_width = 0;
-	self->picture_height = 0;
-	self->parse_in_progress = FALSE;
-	self->current_timestamp = 0;
+    self->initialized = FALSE;
+    self->va_initialized = FALSE;
+    self->framemgr = NULL;
+    self->surfacepool = NULL;
+    self->inputbufpool = NULL;
+    self->inputbufqueue = NULL;
+    self->va_display = NULL;
+    self->va_context = VA_INVALID_ID;
+    self->va_config = VA_INVALID_ID;
+    self->va_surfaces = NULL;
+    self->va_num_surfaces = 0;
+    self->mime_type = NULL;
+    self->frame_rate_num = 0;
+    self->frame_rate_denom = 0;
+    self->picture_width = 0;
+    self->picture_height = 0;
+    self->parse_in_progress = FALSE;
+    self->current_timestamp = (guint64)-1;
+    self->end_picture_pending = FALSE;
+    self->video_frame = NULL;
+    self->extra_surfaces = 0;
 }
 
 static void mix_videoformat_class_init(MixVideoFormatClass * klass) {
@@ -143,6 +147,11 @@
 		}
 	}
 
+    if (mix->video_frame)
+    {
+        mix_videoframe_unref(mix->video_frame);
+        mix->video_frame = NULL;
+    }
 
 	//Deinit input buffer queue 
 
diff --git a/mix_video/src/mixvideoformat.h b/mix_video/src/mixvideoformat.h
index 2553667..2499934 100644
--- a/mix_video/src/mixvideoformat.h
+++ b/mix_video/src/mixvideoformat.h
@@ -23,6 +23,8 @@
 #include "mixbufferpool.h"
 #include "mixvideoformatqueue.h"
 
+G_BEGIN_DECLS
+
 // Redefine the Handle defined in vbp_loader.h
 #define	VBPhandle	Handle
 
@@ -56,32 +58,36 @@
 typedef MIX_RESULT (*MixVideoFmtDeinitializeFunc)(MixVideoFormat *mix);
 
 struct _MixVideoFormat {
-	/*< public > */
-	GObject parent;
+    /*< public > */
+    GObject parent;
 
-	/*< public > */
+    /*< public > */
 
-	/*< private > */
-        GMutex *objectlock;
-	gboolean initialized;
-	MixFrameManager *framemgr;
-	MixSurfacePool *surfacepool;
-	VADisplay va_display;
-	VAContextID va_context;
-	VAConfigID va_config;
-	VASurfaceID *va_surfaces;
-	guint va_num_surfaces;
-	VBPhandle parser_handle;
-	GString *mime_type;
-	guint frame_rate_num;
-	guint frame_rate_denom;
-	guint picture_width;
-	guint picture_height;
-	gboolean parse_in_progress;
-	gboolean discontinuity_frame_in_progress;
-	guint64 current_timestamp;
-	MixBufferPool *inputbufpool;
-	GQueue *inputbufqueue;
+    /*< private > */
+    GMutex *objectlock;
+    gboolean initialized;
+    MixFrameManager *framemgr;
+    MixSurfacePool *surfacepool;
+    VADisplay va_display;
+    VAContextID va_context;
+    VAConfigID va_config;
+    VASurfaceID *va_surfaces;
+    guint va_num_surfaces;
+    VBPhandle parser_handle;
+    GString *mime_type;
+    guint frame_rate_num;
+    guint frame_rate_denom;
+    guint picture_width;
+    guint picture_height;
+    gboolean parse_in_progress;
+    gboolean discontinuity_frame_in_progress;
+    guint64 current_timestamp;
+    MixBufferPool *inputbufpool;
+    GQueue *inputbufqueue;    
+    gboolean va_initialized;
+    gboolean end_picture_pending;
+    MixVideoFrame* video_frame;    
+    guint extra_surfaces;
 };
 
 /**
@@ -157,4 +163,6 @@
 
 MIX_RESULT mix_videofmt_deinitialize(MixVideoFormat *mix);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMAT_H__ */
diff --git a/mix_video/src/mixvideoformat_h264.c b/mix_video/src/mixvideoformat_h264.c
index 18015fc..6464177 100644
--- a/mix_video/src/mixvideoformat_h264.c
+++ b/mix_video/src/mixvideoformat_h264.c
@@ -6,6 +6,7 @@
  No license under any patent, copyright, trade secret or other intellectual property right is granted to or conferred upon you by disclosure or delivery of the Materials, either expressly, by implication, inducement, estoppel or otherwise. Any license under such intellectual property rights must be express and approved by Intel in writing.
  */
 #include <glib.h>
+#include <math.h>
 #ifndef ANDROID
 #include <va/va_x11.h>
 #endif
@@ -17,14 +18,7 @@
 static int mix_video_h264_counter = 0;
 #endif /* MIX_LOG_ENABLE */
 
-#ifdef ANDROID
-typedef struct _NalBuffer {
-	unsigned char *buffer;
-	unsigned int offset;
-	unsigned int length;
-	void *appdata;
-} NalBuffer;
-#endif
+#define DECODER_ROBUSTNESS
 
 /* The parent class. The pointer will be saved
  * in this class's initialization. The pointer
@@ -115,10 +109,9 @@
 	g_mutex_lock(parent->objectlock);
 	parent->initialized = TRUE;
 	parent->parse_in_progress = FALSE;
-	parent->current_timestamp = 0;
 
 	//Close the parser
-        pret = vbp_close(parent->parser_handle);
+    pret = vbp_close(parent->parser_handle);
 	parent->parser_handle = NULL;
 	if (pret != VBP_OK)
 	{
@@ -141,7 +134,7 @@
 /*  H.264 vmethods implementation */
 MIX_RESULT mix_videofmt_h264_getcaps(MixVideoFormat *mix, GString *msg) {
 
-MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
 	if (mix == NULL || msg == NULL)
 	{
@@ -163,665 +156,1119 @@
 	return ret;
 }
 
+MIX_RESULT mix_videofmt_h264_initialize_va(
+    MixVideoFormat *mix,
+    vbp_data_h264 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    VAStatus vret = VA_STATUS_SUCCESS;
+    VAConfigAttrib attrib;
+
+    MixVideoFormat *parent = MIX_VIDEOFORMAT(mix);
+    MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);  
+
+    if (parent->va_initialized)
+    {
+        LOG_W("va already initialized.\n");
+        return MIX_RESULT_SUCCESS;
+    }
+
+
+    LOG_V( "Begin\n");
+
+    //We are requesting RT attributes
+    attrib.type = VAConfigAttribRTFormat;
+    attrib.value = VA_RT_FORMAT_YUV420;
+
+    //Initialize and save the VA config ID
+    //We use high profile for all kinds of H.264 profiles (baseline, main and high)
+    vret = vaCreateConfig(
+        parent->va_display, 
+        VAProfileH264High, 
+        VAEntrypointVLD, 
+        &attrib, 
+        1, 
+        &(parent->va_config));
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E("vaCreateConfig failed\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames);
+
+
+    // handle both frame and field coding for interlaced content
+    int num_ref_pictures = data->codec_data->num_ref_frames;
+
+
+    //Adding 1 to work around VBLANK issue, and another 1 to compensate cached frame that
+    // will not start decoding until a new frame is received.
+    parent->va_num_surfaces = 1 + 1 + parent->extra_surfaces + (((num_ref_pictures + 3) <
+        MIX_VIDEO_H264_SURFACE_NUM) ? 
+        (num_ref_pictures + 3)
+        : MIX_VIDEO_H264_SURFACE_NUM);
+
+    parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*parent->va_num_surfaces);
+    if (parent->va_surfaces == NULL)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "parent->va_surfaces == NULL. \n");
+        goto cleanup;
+    }
+
+    LOG_V( "Codec data says picture size is %d x %d\n", 
+        (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, 
+        (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16);
+    LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height);
+
+    vret = vaCreateSurfaces(
+        parent->va_display, 
+        (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, 
+        (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, 
+        VA_RT_FORMAT_YUV420,
+        parent->va_num_surfaces, 
+        parent->va_surfaces);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error allocating surfaces\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Created %d libva surfaces\n", parent->va_num_surfaces);
+
+    //Initialize the surface pool
+    ret = mix_surfacepool_initialize(
+        parent->surfacepool,
+        parent->va_surfaces, 
+        parent->va_num_surfaces, 
+        parent->va_display);
+
+    switch (ret)
+    {
+    case MIX_RESULT_SUCCESS:
+        break;
+    case MIX_RESULT_ALREADY_INIT:  //This case is for future use when we can be  initialized multiple times.  It is to detect when we have not been reset before re-initializing.
+    default:
+        ret = MIX_RESULT_ALREADY_INIT;
+        LOG_E( "Error init surface pool\n");
+        goto cleanup;
+        break;
+    }
+
+    if (data->codec_data->pic_order_cnt_type == 0)
+    {
+        int max = (int)pow(2, data->codec_data->log2_max_pic_order_cnt_lsb_minus4 + 4);
+        mix_framemanager_set_max_picture_number(parent->framemgr, max);
+    }
+    
+    //Initialize and save the VA context ID
+    //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
+    vret = vaCreateContext(
+        parent->va_display, 
+        parent->va_config,
+        parent->picture_width, 
+        parent->picture_height,
+        0,  // no flag set
+        parent->va_surfaces, 
+        parent->va_num_surfaces,
+        &(parent->va_context));
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error initializing video driver\n");
+        goto cleanup;
+    }
+
+    parent->va_initialized = TRUE;
+
+cleanup:
+    /* nothing to clean up */      
+
+    return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_h264_update_ref_pic_list(
+    MixVideoFormat *mix, 
+    VAPictureParameterBufferH264* picture_params,
+    VASliceParameterBufferH264* slice_params)
+{
+    MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);  
+    
+    //Do slice parameters
+    
+    //First patch up the List0 and List1 surface IDs
+    int j = 0;
+    guint poc = 0;   
+    gpointer video_frame = NULL;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    
+    for (; j <= slice_params->num_ref_idx_l0_active_minus1; j++)
+    {
+        if (!(slice_params->RefPicList0[j].flags & VA_PICTURE_H264_INVALID))
+        {
+            poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList0[j]));
+            video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
+            if (video_frame == NULL)
+            {
+                LOG_E("unable to find surface of picture %d (current picture %d).", 
+                    poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic)));
+                ret = MIX_RESULT_DROPFRAME;  //return non-fatal error
+                goto cleanup;
+            }
+            else
+            {
+                slice_params->RefPicList0[j].picture_id = 
+                    ((MixVideoFrame *)video_frame)->frame_id;
+            }
+        }
+    
+    }
+    
+    if ((slice_params->slice_type == 1) || (slice_params->slice_type == 6))
+    {
+        for (j = 0; j <= slice_params->num_ref_idx_l1_active_minus1; j++)
+        {
+            if (!(slice_params->RefPicList1[j].flags & VA_PICTURE_H264_INVALID))
+            {
+                poc = mix_videofmt_h264_get_poc(&(slice_params->RefPicList1[j]));
+                video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
+                if (video_frame == NULL)
+                {
+                    LOG_E("unable to find surface of picture %d (current picture %d).", 
+                    poc, mix_videofmt_h264_get_poc(&(picture_params->CurrPic)));
+                    ret = MIX_RESULT_DROPFRAME;  //return non-fatal error
+                    goto cleanup;
+                }
+                else
+                {                       
+                    slice_params->RefPicList1[j].picture_id = 
+                        ((MixVideoFrame *)video_frame)->frame_id;
+                }
+            }
+        }
+    }
+
+cleanup:
+    // do nothing
+
+    return ret;
+}
+
+
+MIX_RESULT mix_videofmt_h264_decode_a_slice(
+    MixVideoFormat *mix,
+    vbp_data_h264 *data,
+    int picture_index,
+    int slice_index)
+{  
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    VAStatus vret = VA_STATUS_SUCCESS;
+    VADisplay vadisplay = NULL;
+    VAContextID vacontext;
+    guint buffer_id_cnt = 0;
+
+    // maximum 4 buffers to render a slice: picture parameter, IQMatrix, slice parameter, slice data
+    VABufferID buffer_ids[4];
+
+
+    LOG_V( "Begin\n");
+
+    //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+    vbp_picture_data_h264* pic_data = &(data->pic_data[picture_index]);
+    vbp_slice_data_h264* slice_data = &(pic_data->slc_data[slice_index]);
+    VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms;
+    VASliceParameterBufferH264* slice_params = &(slice_data->slc_parms);
+    vadisplay = mix->va_display;
+    vacontext = mix->va_context;
+
+#ifdef DECODER_ROBUSTNESS
+    if ((slice_params->first_mb_in_slice == 0) || (!mix->end_picture_pending))
+#else
+    if (slice_params->first_mb_in_slice == 0)
+#endif
+    {
+        // this is the first slice of the picture
+        if (mix->end_picture_pending)
+        {
+            // interlace content, decoding the first field
+            vret = vaEndPicture(vadisplay, vacontext);
+            if (vret != VA_STATUS_SUCCESS)
+            {
+                ret = MIX_RESULT_FAIL;
+                LOG_E("vaEndPicture failed.\n");
+                goto cleanup;
+            }
+
+            // for interlace content, top field may be valid only after the second field is parsed
+            mix_videoframe_set_displayorder(mix->video_frame, pic_params->CurrPic.TopFieldOrderCnt);            
+        }
+
+        gulong surface = 0;
+
+        LOG_V("mix->video_frame = 0x%x\n", mix->video_frame);
+
+        //Get our surface ID from the frame object
+        ret = mix_videoframe_get_frame_id(mix->video_frame, &surface);    
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+            LOG_E( "Error getting surface ID from frame object\n");
+            goto cleanup;
+        }
+
+#ifdef DECODER_ROBUSTNESS
+        LOG_V( "Updating DPB for libva\n");
+
+        //Now handle the reference frames and surface IDs for DPB and current frame
+        mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame);
+
+#ifdef HACK_DPB
+        //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+        ret = mix_videofmt_h264_hack_dpb(mix, pic_data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+		LOG_E( "Error reference frame not found\n");
+		//Need to remove the frame we inserted in _handle_ref_frames above, since we are not going to decode it
+		mix_videofmt_h264_cleanup_ref_frame(mix, pic_params, mix->frame);
+		goto cleanup;
+        }
+#endif
+
+        LOG_V( "Calling vaBeginPicture\n");
+
+        //Now we can begin the picture
+        vret = vaBeginPicture(vadisplay, vacontext, surface);
+        if (vret != VA_STATUS_SUCCESS)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E( "Video driver returned error from vaBeginPicture\n");
+            goto cleanup;
+        }
+
+        // vaBeginPicture needs a matching vaEndPicture 
+        mix->end_picture_pending = TRUE;
+
+#else
+        LOG_V( "Calling vaBeginPicture\n");
+
+        //Now we can begin the picture
+        vret = vaBeginPicture(vadisplay, vacontext, surface);
+        if (vret != VA_STATUS_SUCCESS)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E( "Video driver returned error from vaBeginPicture\n");
+            goto cleanup;
+        }
+
+        // vaBeginPicture needs a matching vaEndPicture 
+        mix->end_picture_pending = TRUE;
+
+        LOG_V( "Updating DPB for libva\n");
+
+        //Now handle the reference frames and surface IDs for DPB and current frame
+        mix_videofmt_h264_handle_ref_frames(mix, pic_params, mix->video_frame);
+
+#ifdef HACK_DPB
+        //We have to provide a hacked DPB rather than complete DPB for libva as workaround
+        ret = mix_videofmt_h264_hack_dpb(mix, pic_data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {
+		LOG_E( "Error reference frame not found\n");
+		goto cleanup;
+        }
+#endif
+
+#endif
+
+        //Libva buffer set up
+
+
+        LOG_V( "Creating libva picture parameter buffer\n");
+        LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames);
+
+        //First the picture parameter buffer
+        vret = vaCreateBuffer(
+            vadisplay, 
+            vacontext,
+            VAPictureParameterBufferType,
+            sizeof(VAPictureParameterBufferH264),
+            1,
+            pic_params,
+            &buffer_ids[buffer_id_cnt]);
+
+        if (vret != VA_STATUS_SUCCESS)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E( "Video driver returned error from vaCreateBuffer\n");
+            goto cleanup;
+        }
+
+        buffer_id_cnt++;
+
+        LOG_V( "Creating libva IQMatrix buffer\n");
+
+
+        //Then the IQ matrix buffer
+        vret = vaCreateBuffer(
+            vadisplay,
+            vacontext,
+            VAIQMatrixBufferType,
+            sizeof(VAIQMatrixBufferH264),
+            1,
+            data->IQ_matrix_buf,
+            &buffer_ids[buffer_id_cnt]);
+
+        if (vret != VA_STATUS_SUCCESS)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E( "Video driver returned error from vaCreateBuffer\n");
+            goto cleanup;
+        }
+        buffer_id_cnt++;           
+    }
+
+#ifndef DECODER_ROBUSTNESS
+    if (!mix->end_picture_pending)
+    {
+        LOG_E("first slice is lost??????????\n");
+        ret = MIX_RESULT_DROPFRAME;
+        goto cleanup;
+    }
+#endif
+
+    //Now for slices
+
+    ret = mix_videofmt_h264_update_ref_pic_list(mix, pic_params, slice_params);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E("mix_videofmt_h264_update_ref_pic_list failed.\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Creating libva slice parameter buffer\n");
+
+    vret = vaCreateBuffer(
+        vadisplay, 
+        vacontext,
+        VASliceParameterBufferType,
+        sizeof(VASliceParameterBufferH264),
+        1,
+        slice_params,
+        &buffer_ids[buffer_id_cnt]);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaCreateBuffer\n");
+        goto cleanup;
+    }
+
+    buffer_id_cnt++;
+
+
+    //Do slice data
+
+    //slice data buffer pointer
+    //Note that this is the original data buffer ptr;
+    // offset to the actual slice data is provided in
+    // slice_data_offset in VASliceParameterBufferH264
+
+    LOG_V( "Creating libva slice data buffer, using slice address %x, with offset %d and size %u\n", 
+        (guint)slice_data->buffer_addr, slice_params->slice_data_offset, slice_data->slice_size);
+
+    vret = vaCreateBuffer(
+        vadisplay, 
+        vacontext,
+        VASliceDataBufferType,
+        slice_data->slice_size, //size
+        1,        //num_elements
+        slice_data->buffer_addr + slice_data->slice_offset,
+        &buffer_ids[buffer_id_cnt]);
+
+    buffer_id_cnt++;
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaCreateBuffer\n");
+        goto cleanup;
+    }
+
+
+    LOG_V( "Calling vaRenderPicture\n");
+
+    //Render the picture
+    vret = vaRenderPicture(
+        vadisplay,
+        vacontext,
+        buffer_ids,
+        buffer_id_cnt);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaRenderPicture\n");
+        goto cleanup;
+    }
+
+
+cleanup:
+    LOG_V( "End\n");
+
+    return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_h264_decode_end(
+    MixVideoFormat *mix, 
+    gboolean drop_picture)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;    
+    VAStatus vret = VA_STATUS_SUCCESS;
+    MixVideoFormat* parent = MIX_VIDEOFORMAT(mix);
+    //MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);  
+
+    LOG_V("Begin\n");
+
+    if (!parent->end_picture_pending)
+    {
+        if (parent->video_frame)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E("Unexpected: video_frame is not unreferenced.\n");
+        }
+        goto cleanup;
+    }    
+
+    if (parent->video_frame == NULL)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E("Unexpected: video_frame has been unreferenced.\n");
+        goto cleanup;
+    }
+    
+	LOG_V( "Calling vaEndPicture\n");    
+    vret = vaEndPicture(parent->va_display, parent->va_context);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaEndPicture\n");
+        goto cleanup;
+    }
+
+#if 0	/* we don't call vaSyncSurface here, the call is moved to mix_video_render() */
+
+    LOG_V( "Calling vaSyncSurface\n");
+
+    //Decode the picture
+    vret = vaSyncSurface(parent->va_display, parent->video_frame->frame_id);
+
+    if (vret != VA_STATUS_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Video driver returned error from vaSyncSurface\n");
+        goto cleanup;
+    }
+#endif
+
+    if (drop_picture)
+    {
+        // we are asked to drop this decoded picture
+        mix_videoframe_unref(parent->video_frame);
+        parent->video_frame = NULL;
+        goto cleanup;
+    }
+
+	LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", 
+	    parent->current_timestamp);
+
+    //Enqueue the decoded frame using frame manager
+    ret = mix_framemanager_enqueue(parent->framemgr, parent->video_frame);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error enqueuing frame object\n");
+        goto cleanup;
+    }
+    else
+    {
+        // video frame is passed to frame manager
+        parent->video_frame = NULL;
+        LOG_V("video_frame is assigned to be NULL !\n");
+    }
+
+cleanup:
+    if (parent->video_frame)
+    {
+        /* this always indicates an error */        
+        mix_videoframe_unref(parent->video_frame);
+        parent->video_frame = NULL;
+    }
+    parent->end_picture_pending = FALSE;
+    LOG_V("End\n");
+    return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_h264_decode_continue(
+    MixVideoFormat *mix, 
+    vbp_data_h264 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    int i, j;
+    vbp_picture_data_h264* pic_data = NULL;
+
+    LOG_V("Begin\n");
+
+    for (i = 0; i < data->num_pictures; i++)
+    {
+        pic_data = &(data->pic_data[i]);
+        if (pic_data->pic_parms == NULL)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E("pic_data->pic_parms is NULL.\n");
+            goto cleanup;
+        }
+
+        if (pic_data->slc_data == NULL)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E("pic_data->slc_data is NULL.\n");
+            goto cleanup;
+        }	
+
+        if (pic_data->num_slices == 0)
+        {
+            ret = MIX_RESULT_FAIL;
+            LOG_E("pic_data->num_slices == 0.\n");
+            goto cleanup;
+        }	    
+
+	    LOG_V( "num_slices is %d\n", pic_data->num_slices);        
+        for (j = 0; j < pic_data->num_slices; j++)
+        {
+    	    LOG_V( "Decoding slice %d\n", j);        
+	        ret = mix_videofmt_h264_decode_a_slice(mix, data, i, j);
+            if (ret != 	MIX_RESULT_SUCCESS)
+            {
+                LOG_E( "mix_videofmt_h264_decode_a_slice failed, error =  %#X.", ret);
+                goto cleanup;
+           }			  
+        }		
+    }
+
+cleanup:
+    // nothing to cleanup;
+
+    LOG_V("End\n");
+    return ret;
+}
+
+
+MIX_RESULT mix_videofmt_h264_set_frame_type(
+    MixVideoFormat *mix, 
+    vbp_data_h264 *data)
+{    
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+    //Set the picture type (I, B or P frame)
+    //For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type)
+    MixFrameType frame_type = TYPE_INVALID;
+
+    switch (data->pic_data[0].slc_data[0].slc_parms.slice_type)
+    {
+    case 0:
+    case 3:
+    case 5:
+    case 8:
+        frame_type = TYPE_P;
+        break;
+    case 1:
+    case 6:
+        frame_type = TYPE_B;
+        break;
+    case 2:
+    case 4:
+    case 7:
+    case 9:
+        frame_type = TYPE_I;
+        break;
+    default:
+        break;
+    }
+
+    //Do not have to check for B frames after a seek
+    //Note:  Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise
+    //  DPB will not be correct and frames may come in with invalid references
+    //  This will be detected when DPB is checked for valid mapped surfaces and 
+    //  error returned from there.
+
+	LOG_V( "frame type is %d\n", frame_type);
+
+    //Set the frame type for the frame object (used in reordering by frame manager)
+    ret = mix_videoframe_set_frame_type(mix->video_frame, frame_type);
+
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error setting frame type on frame\n");
+    }
+
+    return ret;
+}
+
+
+MIX_RESULT mix_videofmt_h264_set_frame_structure(
+    MixVideoFormat *mix, 
+    vbp_data_h264 *data)
+{    
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+    if (data->pic_data[0].pic_parms->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
+    {
+        mix_videoframe_set_frame_structure(mix->video_frame, VA_BOTTOM_FIELD | VA_TOP_FIELD); 
+    }
+    else
+    {
+        mix_videoframe_set_frame_structure(mix->video_frame, VA_FRAME_PICTURE);
+    }
+
+    return ret;
+}
+
+
+MIX_RESULT mix_videofmt_h264_decode_begin(
+    MixVideoFormat *mix, 
+    vbp_data_h264 *data)
+{
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+
+    //Get a frame from the surface pool
+    LOG_V("Begin\n");
+    ret = mix_surfacepool_get(mix->surfacepool, &(mix->video_frame));
+
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error getting frame from surfacepool\n");
+        return ret; 
+    }
+
+    /* the following calls will always succeed */
+
+    // set frame type
+    ret = mix_videofmt_h264_set_frame_type(mix, data);
+
+    // set frame structure
+    ret = mix_videofmt_h264_set_frame_structure(mix, data);
+
+    //Set the discontinuity flag
+    mix_videoframe_set_discontinuity(mix->video_frame, mix->discontinuity_frame_in_progress);
+
+    //Set the timestamp
+    mix_videoframe_set_timestamp(mix->video_frame, mix->current_timestamp);	
+
+    // Set displayorder
+    ret = mix_videoframe_set_displayorder(mix->video_frame, 
+        data->pic_data[0].pic_parms->CurrPic.TopFieldOrderCnt);
+    if(ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E("Error setting displayorder\n");
+        return ret;
+    }
+
+    ret = mix_videofmt_h264_decode_continue(mix, data);
+
+    LOG_V("End\n");
+    return ret;
+
+}
+
+
+MIX_RESULT mix_videofmt_h264_decode_a_buffer(
+    MixVideoFormat *mix, 
+    MixBuffer * bufin,
+    guint64 ts,
+    gboolean discontinuity) 
+{
+    uint32 pret = 0;
+    MixVideoFormat *parent = NULL;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    vbp_data_h264 *data = NULL;
+
+    LOG_V( "Begin\n");
+
+    parent = MIX_VIDEOFORMAT(mix);
+
+    LOG_V( "Calling parse for current frame, parse handle %d\n", (int)parent->parser_handle);
+    pret = vbp_parse(parent->parser_handle, 
+        bufin->data, 
+        bufin->size,
+        FALSE);
+
+    LOG_V( "Called parse for current frame\n");
+    if ((pret != VBP_DONE) &&(pret != VBP_OK))
+    {
+        ret = MIX_RESULT_DROPFRAME;
+        LOG_E( "vbp_parse failed.\n");
+        goto cleanup;
+    }
+
+    //query for data
+    pret = vbp_query(parent->parser_handle, (void *) &data);
+
+    if ((pret != VBP_OK) || (data == NULL))
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "vbp_query failed.\n");
+        goto cleanup;
+    }
+    LOG_V( "Called query for current frame\n");
+
+
+    if (data->has_sps == 0 || data->has_pps == 0)
+    {
+        ret = MIX_RESULT_SUCCESS;
+        LOG_V("SPS or PPS is not available.\n");
+        goto cleanup;      
+    }
+
+    if (parent->va_initialized == FALSE)
+    {
+        LOG_V("try initializing VA...\n");
+        ret = mix_videofmt_h264_initialize_va(parent, data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_h264_initialize_va failed.\n");
+            goto cleanup; 
+        }
+    }
+
+    // first pic_data always exists, check if any slice is parsed
+    if (data->pic_data[0].num_slices == 0)
+    {
+        ret = MIX_RESULT_SUCCESS;
+        LOG_V("slice is not available.\n");
+        goto cleanup;      
+    }
+    
+    guint64 last_ts = parent->current_timestamp;    
+    parent->current_timestamp = ts;
+    parent->discontinuity_frame_in_progress = discontinuity;
+
+    LOG_V("ts = %lli last_ts = %lli\n", ts, last_ts);
+
+    if (last_ts != ts)
+    {
+        // finish decoding the last frame
+        ret = mix_videofmt_h264_decode_end(parent, FALSE);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_h264_decode_end failed.\n");
+            goto cleanup; 
+        }
+
+        // start decoding a new frame
+        ret = mix_videofmt_h264_decode_begin(parent, data); 
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_h264_decode_begin failed.\n");
+            goto cleanup; 
+        }        
+    }
+    else
+    {
+        // parital frame
+        LOG_V("partial frame handling...\n");
+        ret = mix_videofmt_h264_decode_continue(parent, data);
+        if (ret != MIX_RESULT_SUCCESS)
+        {         
+            LOG_V("mix_videofmt_h264_decode_continue failed.\n");
+            goto cleanup; 
+        }        
+    }
+
+    cleanup:
+
+    LOG_V( "End\n");
+
+    return ret;
+}
+
+
 MIX_RESULT mix_videofmt_h264_initialize(MixVideoFormat *mix, 
 		MixVideoConfigParamsDec * config_params,
-                MixFrameManager * frame_mgr,
+        MixFrameManager * frame_mgr,
 		MixBufferPool * input_buf_pool,
 		MixSurfacePool ** surface_pool,
 		VADisplay va_display ) {
 
-	uint32 pret = 0;
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	enum _vbp_parser_type ptype = VBP_H264;
-	vbp_data_h264 *data = NULL;
-	MixVideoFormat *parent = NULL;
-	MixIOVec *header = NULL;
-	gint numprofs = 0, numactualprofs = 0;
-	gint numentrypts = 0, numactualentrypts = 0;
-	VADisplay vadisplay = NULL;
-	VAProfile *profiles = NULL;
-	VAEntrypoint *entrypts = NULL;
-	VAConfigAttrib attrib;
-	VAStatus vret = VA_STATUS_SUCCESS;
-	guint extra_surfaces = 0;
-	VASurfaceID *surfaces = NULL;
-	guint numSurfaces = 0;
+    uint32 pret = 0;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    enum _vbp_parser_type ptype = VBP_H264;
+    vbp_data_h264 *data = NULL;
+    MixVideoFormat *parent = NULL;
+    MixIOVec *header = NULL;
+    guint pic_width_in_codec_data = 0;
+    guint pic_height_in_codec_data = 0;
 
-	//TODO Partition this method into smaller methods
+    if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL)
+    {
+        LOG_E( "NUll pointer passed in\n");
+        return MIX_RESULT_NULL_PTR;
+    }
 
-	if (mix == NULL || config_params == NULL || frame_mgr == NULL || input_buf_pool == NULL || va_display == NULL)
-	{
-		LOG_E( "NUll pointer passed in\n");
-		return MIX_RESULT_NULL_PTR;
-	}
+    LOG_V( "Begin\n");
 
-	LOG_V( "Begin\n");
+    /* Chainup parent method. */
 
-	/* Chainup parent method. */
+    if (parent_class->initialize) {
+        ret = parent_class->initialize(mix, config_params,
+            frame_mgr, input_buf_pool, surface_pool, 
+            va_display);
+    }
 
-	if (parent_class->initialize) {
-		ret = parent_class->initialize(mix, config_params,
-				frame_mgr, input_buf_pool, surface_pool, 
-				va_display);
-	}
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error initializing\n");
+        return ret;
+    }
 
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		LOG_E( "Error initializing\n");
-		return ret;
-	}
+    if (!MIX_IS_VIDEOFORMAT_H264(mix))
+       return MIX_RESULT_INVALID_PARAM;
 
-	if (!MIX_IS_VIDEOFORMAT_H264(mix))
-		return MIX_RESULT_INVALID_PARAM;
+    parent = MIX_VIDEOFORMAT(mix);
+    MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
 
-	parent = MIX_VIDEOFORMAT(mix);
-	MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+    LOG_V( "Locking\n");
+    //From now on, we exit this function through cleanup:
+    g_mutex_lock(parent->objectlock);
 
-	LOG_V( "Locking\n");
-	//From now on, we exit this function through cleanup:
-	g_mutex_lock(parent->objectlock);
+    parent->surfacepool = mix_surfacepool_new();
+    *surface_pool = parent->surfacepool;
+
+    if (parent->surfacepool == NULL)
+    {
+        ret = MIX_RESULT_NO_MEMORY;
+        LOG_E( "parent->surfacepool == NULL.\n");
+        goto cleanup;
+    }
+
+    //Create our table of Decoded Picture Buffer "in use" surfaces
+    self->dpb_surface_table = g_hash_table_new_full(
+        NULL, 
+        NULL, 
+        mix_videofmt_h264_destroy_DPB_key, 
+        mix_videofmt_h264_destroy_DPB_value);
+
+    if (self->dpb_surface_table == NULL)
+    {
+        ret = MIX_RESULT_NO_MEMORY;
+        LOG_E( "Error allocating dbp surface table\n");
+        goto cleanup;  //leave this goto here in case other code is added between here and cleanup label
+    }
+
+    ret = mix_videoconfigparamsdec_get_extra_surface_allocation(
+        config_params,
+        &parent->extra_surfaces);
+
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Cannot get extra surface allocation setting\n");
+        goto cleanup;
+    }    
 
 	LOG_V( "Before vbp_open\n");
-	//Load the bitstream parser
-	pret = vbp_open(ptype, &(parent->parser_handle));
-
+    //Load the bitstream parser
+    pret = vbp_open(ptype, &(parent->parser_handle));
 	LOG_V( "After vbp_open\n");
-        if (!(pret == VBP_OK))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error opening parser\n");
-		goto cleanup;
-	}
-	LOG_V( "Opened parser\n");
 
-	ret = mix_videoconfigparamsdec_get_header(config_params, 
-		&header);
+    if (!(pret == VBP_OK))
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error opening parser\n");
+        goto cleanup;
+    }
+    LOG_V( "Opened parser\n");
 
-        if ((ret != MIX_RESULT_SUCCESS) || (header == NULL))
-        {
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Cannot get header data\n");
-		goto cleanup;
-        }
 
-        ret = mix_videoconfigparamsdec_get_extra_surface_allocation(config_params,
-                &extra_surfaces);
+    ret = mix_videoconfigparamsdec_get_header(config_params, &header);
+
+    if ((ret != MIX_RESULT_SUCCESS) || (header == NULL))
+    {
+        // Delay initializing VA if codec configuration data is not ready, but don't return an error.
+        ret = MIX_RESULT_SUCCESS;
+        LOG_W( "Codec data is not available in the configuration parameter.\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle);
+
+    pret = vbp_parse(
+        parent->parser_handle,
+        header->data, 
+        header->data_size,
+        TRUE);
+
+    if (!((pret == VBP_OK) || (pret == VBP_DONE)))
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error parsing header data\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Parsed header\n");
+
+    //Get the header data and save
+    pret = vbp_query(parent->parser_handle, (void *)&data);
+
+    if ((pret != VBP_OK) || (data == NULL))
+    {
+        ret = MIX_RESULT_FAIL;
+        LOG_E( "Error reading parsed header data\n");
+        goto cleanup;
+    }
+
+    LOG_V( "Queried parser for header data\n");
+
+    // Update the pic size according to the parsed codec_data
+    pic_width_in_codec_data  = (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16;
+    pic_height_in_codec_data = (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16;
+    mix_videoconfigparamsdec_set_picture_res (config_params, pic_width_in_codec_data, pic_height_in_codec_data);
+    parent->picture_width  = pic_width_in_codec_data;
+    parent->picture_height = pic_height_in_codec_data;
+
+    ret = mix_videofmt_h264_initialize_va(mix, data);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        LOG_E( "Error initializing va. \n");
+        goto cleanup;
+    }
+
+
+    cleanup:
+    if (ret != MIX_RESULT_SUCCESS) {
+        pret = vbp_close(parent->parser_handle);
+        parent->parser_handle = NULL;
+        parent->initialized = FALSE;
+
+    } else {
+        parent->initialized = TRUE;
+    }
+
+    if (header != NULL)
+    {
+        if (header->data != NULL)
+            g_free(header->data);
+        g_free(header);
+        header = NULL;
+    }
+
+
+    LOG_V( "Unlocking\n");
+    g_mutex_unlock(parent->objectlock);
+
+
+    return ret;
+}
+
+MIX_RESULT mix_videofmt_h264_decode(
+    MixVideoFormat *mix, 
+    MixBuffer * bufin[],
+    gint bufincnt, 
+    MixVideoDecodeParams * decode_params) {
+
+    int i = 0;
+    MixVideoFormat *parent = NULL;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    guint64 ts = 0;
+    gboolean discontinuity = FALSE;
+
+    LOG_V( "Begin\n");
+
+    if (mix == NULL || bufin == NULL || decode_params == NULL || bufincnt == 0)
+    {
+        LOG_E( "NUll pointer passed in\n");
+        return MIX_RESULT_NULL_PTR;
+    }
+
+    /* Chainup parent method.
+        We are not chaining up to parent method for now.
+       */
+
+#if 0
+    if (parent_class->decode) {
+       return parent_class->decode(mix, bufin, bufincnt, decode_params);
+    }
+#endif
+
+    if (!MIX_IS_VIDEOFORMAT_H264(mix))
+        return MIX_RESULT_INVALID_PARAM;
+
+    parent = MIX_VIDEOFORMAT(mix);
+
+    ret = mix_videodecodeparams_get_timestamp(decode_params, &ts);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        // never happen
+        return MIX_RESULT_FAIL;
+    }
+
+    ret = mix_videodecodeparams_get_discontinuity(decode_params, &discontinuity);
+    if (ret != MIX_RESULT_SUCCESS)
+    {
+        // never happen
+        return MIX_RESULT_FAIL;
+    }
+
+    //From now on, we exit this function through cleanup:
+
+    LOG_V( "Locking\n");
+    g_mutex_lock(parent->objectlock);
+
+    LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts);
+
+    for (i = 0; i < bufincnt; i++)
+    {
+		LOG_V( "Decoding a buf %x, size %d\n", (guint)bufin[i]->data, bufin[i]->size);  
+		    
+        // decode a buffer at a time
+        ret = mix_videofmt_h264_decode_a_buffer(
+            mix, 
+            bufin[i],
+            ts,
+            discontinuity);
 
         if (ret != MIX_RESULT_SUCCESS)
         {
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Cannot get extra surface allocation setting\n");
-		goto cleanup;
-        }
+            LOG_E("mix_videofmt_h264_decode_a_buffer failed.\n");
+            goto cleanup;
+        }        
+    }
 
-        LOG_V( "Calling parse on header data, handle %d\n", (int)parent->parser_handle);
 
-	pret = vbp_parse(parent->parser_handle, header->data, 
-			header->data_size, TRUE);
+cleanup:
 
-        if (!((pret == VBP_OK) || (pret == VBP_DONE)))
-        {
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error parsing header data\n");
-		goto cleanup;
-        }
+    LOG_V( "Unlocking\n");
+    g_mutex_unlock(parent->objectlock);
 
-        LOG_V( "Parsed header\n");
+    LOG_V( "End\n");
 
-       //Get the header data and save
-        pret = vbp_query(parent->parser_handle, (void *)&data);
-
-	if ((pret != VBP_OK) || (data == NULL))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error reading parsed header data\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Queried parser for header data\n");
-
-	//Time for libva initialization
-
-	vadisplay = parent->va_display;
-
-	numprofs = vaMaxNumProfiles(vadisplay);
-	profiles = g_malloc(numprofs*sizeof(VAProfile));
-
-	if (!profiles)
-	{
-		ret = MIX_RESULT_NO_MEMORY;
-		LOG_E( "Error allocating memory\n");
-		goto cleanup;
-	}
-
-	vret = vaQueryConfigProfiles(vadisplay, profiles, 
-		&numactualprofs);
-	if (!(vret == VA_STATUS_SUCCESS))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing video driver\n");
-		goto cleanup;
-	}
-
-        //check the desired profile support
-        gint vaprof = 0;
-
-	//TODO Need to cover more cases
-	switch (data->codec_data->profile_idc)
-	{
-#if 1
-//TODO Reinstate this once constraint_set1 flag has been added to codec_data
-	case 66: //Baseline profile
-
-	LOG_V( "mix_videofmt_h264_initialize:  Baseline profile\n");
-		if (data->codec_data->constraint_set1_flag == 0)
-		{
-        		for (; vaprof < numactualprofs; vaprof++)
-        		{
-               			if (profiles[vaprof] == VAProfileH264Baseline)
-               	       	 	break;
-        		}
-		} else
-		{
-        		for (; vaprof < numactualprofs; vaprof++)
-        		{
-               			if (profiles[vaprof] == VAProfileH264High)
-               	       	 	break;
-        		}
-		}
-		if ((vaprof >= numprofs) || ((profiles[vaprof] != VAProfileH264Baseline) && (profiles[vaprof] != VAProfileH264High)))
-		//Did not get the profile we wanted
-		{
-			ret = MIX_RESULT_FAIL;
-			LOG_E( "Profile not supported by driver\n");
-			goto cleanup;
-		}
-		break;
-#endif
-
-#if 0
-//Code left in place in case bug is fixed in libva
-	case 77: //Main profile (need to set to High for libva bug)
-	LOG_V( "mix_videofmt_h264_initialize:  Main profile\n");
-
-        	for (; vaprof < numactualprofs; vaprof++)
-        	{
-               		if (profiles[vaprof] == VAProfileH264Main)
-               	        	break;
-        	}
-		if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264Main)  
-		//Did not get the profile we wanted
-		{
-			ret = MIX_RESULT_FAIL;
-			LOG_E( "Profile not supported by driver\n");
-			goto cleanup;
-		}
-		break;
-#endif
-
-	case 100: //High profile
-	default:  //Set to High as default
-
-	LOG_V( "High profile\n");
-
-        	for (; vaprof < numactualprofs; vaprof++)
-        	{
-               		if (profiles[vaprof] == VAProfileH264High)
-               	        	break;
-        	}
-		if (vaprof >= numprofs || profiles[vaprof] != VAProfileH264High)
-		//Did not get the profile we wanted
-		{
-			ret = MIX_RESULT_FAIL;
-			LOG_E( "Profile not supported by driver\n");
-			goto cleanup;
-		}
-		break;
-
-
-	}
-
-	numentrypts = vaMaxNumEntrypoints(vadisplay);
-	entrypts = g_malloc(numentrypts*sizeof(VAEntrypoint));
-
-	if (!entrypts)
-	{
-		ret = MIX_RESULT_NO_MEMORY;
-		LOG_E( "Error allocating memory\n");
-		goto cleanup;
-	}
-
-	vret = vaQueryConfigEntrypoints(vadisplay, profiles[vaprof], 
-		entrypts, &numactualentrypts);
-	if (!(vret == VA_STATUS_SUCCESS))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing driver\n");
-		goto cleanup;
-	}
-
-	gint vaentrypt = 0;
-	for (; vaentrypt < numactualentrypts; vaentrypt++)
-	{
-		if (entrypts[vaentrypt] == VAEntrypointVLD)
-			break;
-	}
-	if (vaentrypt >= numentrypts || entrypts[vaentrypt] != VAEntrypointVLD)  
-	//Did not get the entrypt we wanted
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Entry point not supported by driver\n");
-		goto cleanup;
-	}
-
-	//We are requesting RT attributes
-	attrib.type = VAConfigAttribRTFormat;
-
-	vret = vaGetConfigAttributes(vadisplay, profiles[vaprof], 
-		entrypts[vaentrypt], &attrib, 1);
-
-        //TODO Handle other values returned for RT format
-        // and check with requested format provided in config params
-        //Right now only YUV 4:2:0 is supported by libva
-        // and this is our default
-        if (((attrib.value & VA_RT_FORMAT_YUV420) == 0) ||
-                vret != VA_STATUS_SUCCESS)
-        {
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing driver\n");
-		goto cleanup;
-        }
-
-	//Initialize and save the VA config ID
-	vret = vaCreateConfig(vadisplay, profiles[vaprof], 
-		entrypts[vaentrypt], &attrib, 1, &(parent->va_config));
-
-	if (!(vret == VA_STATUS_SUCCESS))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing driver\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Created libva config with profile %d\n", vaprof);
-
-
-	//Initialize the surface pool
-
-	LOG_V( "Codec data says num_ref_frames is %d\n", data->codec_data->num_ref_frames);
-
-
-	// handle both frame and field coding for interlaced content
-	int num_ref_pictures = data->codec_data->num_ref_frames;
-	if (!data->codec_data->frame_mbs_only_flag &&
-		!data->codec_data->mb_adaptive_frame_field_flag)
-	{
-		
-		// field coding, two fields share the same surface.	
-		//num_ref_pictures *= 2;			
-	}
-
-	//Adding 1 to work around VBLANK issue
-	parent->va_num_surfaces = 1 + extra_surfaces + (((num_ref_pictures + 3) <
-		MIX_VIDEO_H264_SURFACE_NUM) ? 
-		(num_ref_pictures + 3)
-		: MIX_VIDEO_H264_SURFACE_NUM);
-		
-	numSurfaces = parent->va_num_surfaces;
-	
-	parent->va_surfaces = g_malloc(sizeof(VASurfaceID)*numSurfaces);
-
-	surfaces = parent->va_surfaces;
-
-	if (surfaces == NULL)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Cannot allocate temporary data\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Codec data says picture size is %d x %d\n", (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, (data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16);
-	LOG_V( "getcaps says picture size is %d x %d\n", parent->picture_width, parent->picture_height);
-
-	vret = vaCreateSurfaces(vadisplay, (data->pic_data[0].pic_parms->picture_width_in_mbs_minus1 + 1) * 16, 
-		(data->pic_data[0].pic_parms->picture_height_in_mbs_minus1 + 1) * 16, entrypts[vaentrypt],
-		numSurfaces, surfaces);
-
-	if (!(vret == VA_STATUS_SUCCESS))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error allocating surfaces\n");
-		goto cleanup;
-	}
-
-	parent->surfacepool = mix_surfacepool_new();
-	*surface_pool = parent->surfacepool;
-
-	if (parent->surfacepool == NULL)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing surface pool\n");
-		goto cleanup;
-	}
-	
-
-	ret = mix_surfacepool_initialize(parent->surfacepool,
-		surfaces, numSurfaces, vadisplay);
-
-	switch (ret)
-	{
-		case MIX_RESULT_SUCCESS:
-			break;
-		case MIX_RESULT_ALREADY_INIT:  //This case is for future use when we can be  initialized multiple times.  It is to detect when we have not been reset before re-initializing.
-		default:
-			ret = MIX_RESULT_ALREADY_INIT;
-			LOG_E( "Error init failure\n");
-			goto cleanup;
-                        break;
-	}
-
-	LOG_V( "Created %d libva surfaces\n", numSurfaces);
-
-        //Initialize and save the VA context ID
-        //Note: VA_PROGRESSIVE libva flag is only relevant to MPEG2
-        vret = vaCreateContext(vadisplay, parent->va_config,
-                parent->picture_width, parent->picture_height,
-                0, surfaces, numSurfaces,
-                &(parent->va_context));
-	if (!(vret == VA_STATUS_SUCCESS))
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Error initializing video driver\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Created libva context width %d, height %d\n", parent->picture_width, parent->picture_height);
-
-	//Create our table of Decoded Picture Buffer "in use" surfaces
-	self->dpb_surface_table = g_hash_table_new_full(NULL, NULL, mix_videofmt_h264_destroy_DPB_key, mix_videofmt_h264_destroy_DPB_value);
-
-	if (self->dpb_surface_table == NULL)
-	{
-		ret = MIX_RESULT_NO_MEMORY;
-		LOG_E( "Error allocating dbp surface table\n");
-		goto cleanup;  //leave this goto here in case other code is added between here and cleanup label
-	}
-
-	cleanup:
-	if (ret != MIX_RESULT_SUCCESS) {
-		pret = vbp_close(parent->parser_handle);
-		parent->parser_handle = NULL;
-       		parent->initialized = FALSE;
-
-	} else {
-	         parent->initialized = TRUE;
-	}
-
-	if (header != NULL)
-	{
-		if (header->data != NULL)
-			g_free(header->data);
-		g_free(header);
-		header = NULL;
-	}
-
-	g_free(profiles);
-        g_free(entrypts);
-
-	LOG_V( "Unlocking\n");
-        g_mutex_unlock(parent->objectlock);
-
-
-	return ret;
-}
-
-MIX_RESULT mix_videofmt_h264_decode(MixVideoFormat *mix, MixBuffer * bufin[],
-                gint bufincnt, MixVideoDecodeParams * decode_params) {
-
-        uint32 pret = 0;
-	int i = 0;
-        MixVideoFormat *parent = NULL;
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	guint64 ts = 0;
-	vbp_data_h264 *data = NULL;
-	gboolean discontinuity = FALSE;
-	MixInputBufferEntry *bufentry = NULL;
-
-        LOG_V( "Begin\n");
-
-        if (mix == NULL || bufin == NULL || decode_params == NULL )
-	{
-		LOG_E( "NUll pointer passed in\n");
-                return MIX_RESULT_NULL_PTR;
-	}
-
-	/* Chainup parent method.
-		We are not chaining up to parent method for now.
-	 */
-
-#if 0
-        if (parent_class->decode) {
-                return parent_class->decode(mix, bufin, bufincnt,
-                                        decode_params);
-	}
-#endif
-
-	if (!MIX_IS_VIDEOFORMAT_H264(mix))
-		return MIX_RESULT_INVALID_PARAM;
-
-	parent = MIX_VIDEOFORMAT(mix);
-
-
-	ret = mix_videodecodeparams_get_timestamp(decode_params, 
-			&ts);
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		return MIX_RESULT_FAIL;
-	}
-
-	ret = mix_videodecodeparams_get_discontinuity(decode_params, 
-			&discontinuity);
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		return MIX_RESULT_FAIL;
-	}
-
-	//From now on, we exit this function through cleanup:
-
-	LOG_V( "Locking\n");
-        g_mutex_lock(parent->objectlock);
-
-	LOG_V( "parse in progress is %d\n", parent->parse_in_progress);
-	//If this is a new frame and we haven't retrieved parser
-	//  workload data from previous frame yet, do so
-	if ((ts != parent->current_timestamp) && 
-			(parent->parse_in_progress))
-	{
-
-		//query for data
-		pret = vbp_query(parent->parser_handle,
-			(void *) &data);
-
-		if ((pret != VBP_OK) || (data == NULL))
-        	{
-			ret = MIX_RESULT_FAIL;
-			LOG_E( "Error initializing parser\n");
-               		goto cleanup;
-        	}
-	
-		LOG_V( "Queried for last frame data\n");
-
-		//process and decode data
-		ret = mix_videofmt_h264_process_decode(mix,
-			data, parent->current_timestamp, 
-			parent->discontinuity_frame_in_progress);
-
-		if (ret != MIX_RESULT_SUCCESS)
-        	{
-			//We log this but need to process the new frame data, so do not return
-			LOG_E( "Process_decode failed.\n");
-        	}
-
-		LOG_V( "Called process and decode for last frame\n");
-
-		parent->parse_in_progress = FALSE;
-
-	}
-
-	parent->current_timestamp = ts;
-	parent->discontinuity_frame_in_progress = discontinuity;
-
-	LOG_V( "Starting current frame %d, timestamp %"G_GINT64_FORMAT"\n", mix_video_h264_counter++, ts);
-
-	for (i = 0; i < bufincnt; i++)
-	{
-
-		LOG_V( "Calling parse for current frame, parse handle %d, buf %x, size %d\n", (int)parent->parser_handle, (guint)bufin[i]->data, bufin[i]->size);
-
-#ifndef ANDROID		
-		pret = vbp_parse(parent->parser_handle, 
-			bufin[i]->data, 
-			bufin[i]->size,
-			FALSE);
-#else
-		/* we got an array of NALs for a frame */
-		{
-			gint nal_index = 0;
-			NalBuffer *nals = (NalBuffer *)bufin[i]->data;
-			gint nal_count = bufin[i]->size;
-			
-                        LOG_V("nal_count = %d\n", nal_count);
-			for(nal_index = 0; nal_index < nal_count; nal_index ++) {
-
-                                LOG_V("nals[%d].offset = 0x%x nals[nal_index].length = %d\n",
-                                                  nal_index, nals[nal_index].offset, nals[nal_index].length);
-
-				pret = vbp_parse(parent->parser_handle, 
-						nals[nal_index].buffer + nals[nal_index].offset, 
-						nals[nal_index].length, 
-						FALSE);                       
-                               
-                                LOG_V("nal_index = %d pret = 0x%x\n", nal_index, pret);
-
-				if(pret != VBP_OK && pret != VBP_DONE) {
-					ret = MIX_RESULT_FAIL;
-                                        LOG_E( "Error parsing data : pret = 0x%x\n", pret);
-					goto cleanup;
-				}
-			}
-		}
-#endif		
-
-		LOG_V( "Called parse for current frame\n");
-
-		if ((pret == VBP_DONE) || (pret == VBP_OK))
-		{
-			//query for data
-			pret = vbp_query(parent->parser_handle,
-				(void *) &data);
-
-			if ((pret != VBP_OK) || (data == NULL))
-        		{
-				ret = MIX_RESULT_FAIL;
-				LOG_E( "Error getting parser data\n");
-               			goto cleanup;
-        		}
-
-			LOG_V( "Called query for current frame\n");
-
-			//Increase the ref count of this input buffer
-			mix_buffer_ref(bufin[i]);
-
-			//Create a new MixInputBufferEntry
-			//TODO make this from a pool to optimize
-			bufentry = g_malloc(sizeof(
-				MixInputBufferEntry));
-			if (bufentry == NULL)
-        		{
-				ret = MIX_RESULT_NO_MEMORY;
-				LOG_E( "Error allocating bufentry\n");
-               			goto cleanup;
-        		}
-
-			bufentry->buf = bufin[i];
-	LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts);
-			bufentry->timestamp = ts;
-
-			LOG_V( "Enqueue this input buffer for current frame\n");
-			LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp);
-
-			//Enqueue this input buffer
-			g_queue_push_tail(parent->inputbufqueue, 
-				(gpointer)bufentry);
-
-			//process and decode data
-			ret = mix_videofmt_h264_process_decode(mix,
-				data, ts, discontinuity);
-
-			if (ret != MIX_RESULT_SUCCESS)
-                	{
-				//We log this but continue since we need to complete our processing of input buffers
-				LOG_E( "Process_decode failed.\n");
-                	}
-
-			LOG_V( "Called process and decode for current frame\n");
-
-			parent->parse_in_progress = FALSE;
-		}
-		else if (pret != VBP_OK)
-        	{
-			//We log this but continue since we need to complete our processing of input buffers
-			LOG_E( "Parsing failed.\n");
-			ret = MIX_RESULT_FAIL;
-        	}
-		else
-		{
-
-			LOG_V( "Enqueuing buffer and going on to next (if any) for this frame\n");
-
-			//Increase the ref count of this input buffer
-			mix_buffer_ref(bufin[i]);
-
-			//Create a new MixInputBufferEntry
-			//TODO make this from a pool to optimize
-			bufentry = g_malloc(sizeof
-				(MixInputBufferEntry));
-			if (bufentry == NULL)
-        		{
-				ret = MIX_RESULT_NO_MEMORY;
-				LOG_E( "Error allocating bufentry\n");
-               			goto cleanup;
-        		}
-			bufentry->buf = bufin[i];
-	LOG_V( "Setting bufentry %x for mixbuffer %x ts to %"G_GINT64_FORMAT"\n", (guint)bufentry, (guint)bufentry->buf, ts);
-			bufentry->timestamp = ts;
-
-			LOG_V( "Enqueue this input buffer for current frame\n");
-			LOG_V( "bufentry->timestamp %"G_GINT64_FORMAT"\n", bufentry->timestamp);
-
-			//Enqueue this input buffer
-			g_queue_push_tail(parent->inputbufqueue, 
-				(gpointer)bufentry);
-	LOG_V( "Setting parse_in_progress to TRUE\n");
-			parent->parse_in_progress = TRUE;
-		}
-
-	}
-
-
-	cleanup:
-
-	LOG_V( "Unlocking\n");
- 	g_mutex_unlock(parent->objectlock);
-
-        LOG_V( "End\n");
-
-	return ret;
+    return ret;
 }
 
 MIX_RESULT mix_videofmt_h264_flush(MixVideoFormat *mix) {
 
-MIX_RESULT ret = MIX_RESULT_SUCCESS;
+    MIX_RESULT ret = MIX_RESULT_SUCCESS;
 
 	LOG_V( "Begin\n");
 
@@ -831,9 +1278,7 @@
 		return MIX_RESULT_NULL_PTR;
 	}
 
-        uint32 pret = 0;
-	MixInputBufferEntry *bufentry = NULL;
-
+    uint32 pret = 0;
 
 	/* Chainup parent method.
 		We are not chaining up to parent method for now.
@@ -847,23 +1292,15 @@
 
 	MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
 
-        g_mutex_lock(mix->objectlock);
+    g_mutex_lock(mix->objectlock);
 
-	//Clear the contents of inputbufqueue
-	while (!g_queue_is_empty(mix->inputbufqueue))
-	{
-		bufentry = (MixInputBufferEntry *) g_queue_pop_head(
-				mix->inputbufqueue);
-		if (bufentry == NULL) continue;
-
-		mix_buffer_unref(bufentry->buf);
-		g_free(bufentry);
-	}
-
+    // drop any decode-pending picture, and ignore return value
+    mix_videofmt_h264_decode_end(mix, TRUE);
+	
 	//Clear parse_in_progress flag and current timestamp
-        mix->parse_in_progress = FALSE;
+    mix->parse_in_progress = FALSE;
 	mix->discontinuity_frame_in_progress = FALSE;
-	mix->current_timestamp = 0;
+	mix->current_timestamp = (guint64)-1;
 
 	//Clear the DPB surface table
 	g_hash_table_remove_all(self->dpb_surface_table);
@@ -873,7 +1310,7 @@
 	if (pret != VBP_OK)
 		ret = MIX_RESULT_FAIL;
 
-        g_mutex_unlock(mix->objectlock);
+    g_mutex_unlock(mix->objectlock);
 
 	LOG_V( "End\n");
 
@@ -883,10 +1320,8 @@
 MIX_RESULT mix_videofmt_h264_eos(MixVideoFormat *mix) {
 
 	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	vbp_data_h264 *data = NULL;
-        uint32 pret = 0;
 
-        LOG_V( "Begin\n");
+    LOG_V( "Begin\n");
 
 	if (mix == NULL)
 	{
@@ -904,38 +1339,12 @@
 	}
 #endif
 
-        g_mutex_lock(mix->objectlock);
+    g_mutex_lock(mix->objectlock);
 
-	//if a frame is in progress, process the frame
-	if (mix->parse_in_progress)
-	{
-		//query for data
-		pret = vbp_query(mix->parser_handle,
-			(void *) &data);
-
-		if ((pret != VBP_OK) || (data == NULL))
-               	{
-               		ret = MIX_RESULT_FAIL;
- 			LOG_E( "Error getting last parse data\n");
-			goto cleanup;
-               	}
-
-		//process and decode data
-		ret = mix_videofmt_h264_process_decode(mix,
-			data, mix->current_timestamp, 
-			mix->discontinuity_frame_in_progress);
-		mix->parse_in_progress = FALSE;
-		if (ret != MIX_RESULT_SUCCESS)
-		{
- 			LOG_E( "Error processing last frame\n");
-			goto cleanup;
-		}
-
-	}
-
-cleanup:
-
-        g_mutex_unlock(mix->objectlock);
+    // finished decoding the pending frame
+    mix_videofmt_h264_decode_end(mix, FALSE);
+    
+    g_mutex_unlock(mix->objectlock);
 
 	//Call Frame Manager with _eos()
 	ret = mix_framemanager_eos(mix->framemgr);
@@ -966,12 +1375,14 @@
 		return parent_class->deinitialize(mix);
 	}
 
-        //Most stuff is cleaned up in parent_class->finalize() and in _finalize
+    //Most stuff is cleaned up in parent_class->finalize() and in _finalize
 
-        LOG_V( "End\n");
+    LOG_V( "End\n");
 
 	return MIX_RESULT_SUCCESS;
 }
+
+
 #define HACK_DPB
 #ifdef HACK_DPB
 static inline MIX_RESULT mix_videofmt_h264_hack_dpb(MixVideoFormat *mix, 
@@ -984,7 +1395,6 @@
 	VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms;
 	VAPictureH264 *pRefList = NULL;
 	int i = 0, j = 0, k = 0, list = 0;
-	MIX_RESULT ret = MIX_RESULT_FAIL;
 
 	MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
 
@@ -1038,7 +1448,7 @@
 					pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id = 
 						((MixVideoFrame *)video_frame)->frame_id;
 
-        				LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id);
+        			LOG_V( "Inserting frame id %d into DPB\n", pic_params->ReferenceFrames[pic_params->num_ref_frames].picture_id);
 
 					pic_params->ReferenceFrames[pic_params->num_ref_frames].flags = 
 						pRefList[j].flags;
@@ -1051,7 +1461,7 @@
 				}
 
 			}
-		pRefList = pic_data->slc_data[i].slc_parms.RefPicList1;
+		    pRefList = pic_data->slc_data[i].slc_parms.RefPicList1;
 		}
 
 	}
@@ -1059,456 +1469,7 @@
 }
 #endif
 
-					
-MIX_RESULT mix_videofmt_h264_process_decode_picture(MixVideoFormat *mix,
-					vbp_data_h264 *data, 
-					guint64 timestamp,
-					gboolean discontinuity,
-					int pic_index,
-					MixVideoFrame *frame)
-{
-
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	VAStatus vret = VA_STATUS_SUCCESS;
-	VADisplay vadisplay = NULL;
-	VAContextID vacontext;
-	guint buffer_id_cnt = 0;
-	VABufferID *buffer_ids = NULL;
-
-	//TODO Partition this method into smaller methods
-
-	LOG_V( "Begin\n");
-
-	if ((mix == NULL) || (data == NULL) || (data->pic_data == NULL) || (frame == NULL))
-	{
-		LOG_E( "Null pointer passed in\n");
-		return MIX_RESULT_NULL_PTR;
-	}
-
-	vbp_picture_data_h264* pic_data = &(data->pic_data[pic_index]);
 	
-	
-	//After this point, all exits from this function are through cleanup:
-
-	if (!MIX_IS_VIDEOFORMAT_H264(mix))
-		return MIX_RESULT_INVALID_PARAM;
-
-	MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
-
-	VAPictureParameterBufferH264 *pic_params = pic_data->pic_parms;
-
-	if (pic_params == NULL) 
-	{
-		ret = MIX_RESULT_NULL_PTR;
-		LOG_E( "Error reading parser data\n");
-		goto cleanup;
-	}
-
-	//TODO
-	//Check for frame gaps and repeat frames if necessary
-
-	LOG_V( "num_slices is %d, allocating %d buffer_ids\n", pic_data->num_slices, (pic_data->num_slices * 2) + 2);
-
-	buffer_ids = g_malloc(sizeof(VABufferID) * 
-					((pic_data->num_slices * 2) + 2));
-
-	if (buffer_ids == NULL) 
-	{
-		LOG_E( "Cannot allocate buffer IDs\n");
-		ret = MIX_RESULT_NO_MEMORY;
-		goto cleanup;
-	}
-
-	//Set up reference frames for the picture parameter buffer
-
-	//Set the picture type (I, B or P frame)
-	//For H.264 we use the first encountered slice type for this (check - may need to change later to search all slices for B type)
-	MixFrameType frame_type = TYPE_INVALID;
-
-	switch (pic_data->slc_data->slc_parms.slice_type)
-	{
-		case 0:
-		case 3:
-		case 5:
-		case 8:
-			frame_type = TYPE_P;
-			break;
-		case 1:
-		case 6:
-			frame_type = TYPE_B;
-			break;
-		case 2:
-		case 4:
-		case 7:
-		case 9:
-			frame_type = TYPE_I;
-			break;
-		default:
-			break;
-	}
-
-	//Do not have to check for B frames after a seek
-	//Note:  Demux should seek to IDR (instantaneous decoding refresh) frame, otherwise
-	//  DPB will not be correct and frames may come in with invalid references
-	//  This will be detected when DPB is checked for valid mapped surfaces and 
-	//  error returned from there.
-
-	LOG_V( "Getting a new surface for frame_num %d\n", pic_params->frame_num);
-	LOG_V( "frame type is %d\n", frame_type);
-
-        // Set displayorder
-	ret = mix_videoframe_set_displayorder(frame, pic_params->CurrPic.TopFieldOrderCnt / 2);
-	if(ret != MIX_RESULT_SUCCESS) 
-	{
-                LOG_E("Error setting displayorder\n");
-		goto cleanup;
-	}
-
-	//Set the frame type for the frame object (used in reordering by frame manager)
-	ret = mix_videoframe_set_frame_type(frame, frame_type);
-
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		LOG_E( "Error setting frame type on frame\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Updating DPB for libva\n");
-
-	//Now handle the reference frames and surface IDs for DPB and current frame
-	mix_videofmt_h264_handle_ref_frames(mix, pic_params, frame);
-
-#ifdef HACK_DPB
-	//We have to provide a hacked DPB rather than complete DPB for libva as workaround
-	ret = mix_videofmt_h264_hack_dpb(mix, pic_data);
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		LOG_E( "Error reference frame not found\n");
-		goto cleanup;
-	}
-#endif
-
-	//Libva buffer set up
-
-	vadisplay = mix->va_display;
-	vacontext = mix->va_context;
-
-	LOG_V( "Creating libva picture parameter buffer\n");
-	LOG_V( "picture parameter buffer shows num_ref_frames is %d\n", pic_params->num_ref_frames);
-
-	//First the picture parameter buffer
-	vret = vaCreateBuffer(vadisplay, vacontext,
-			VAPictureParameterBufferType,
-			sizeof(VAPictureParameterBufferH264),
-			1,
-			pic_params,
-			&buffer_ids[buffer_id_cnt]);
-	buffer_id_cnt++;
-
-	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaCreateBuffer\n");
-		goto cleanup;
-	}
-			
-	LOG_V( "Creating libva IQMatrix buffer\n");
-
-
-	//Then the IQ matrix buffer
-    	vret = vaCreateBuffer(vadisplay, vacontext,
-                    VAIQMatrixBufferType,
-                    sizeof(VAIQMatrixBufferH264),
-                    1,
-                    data->IQ_matrix_buf,
-                    &buffer_ids[buffer_id_cnt]);
-	buffer_id_cnt++;
-
-	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaCreateBuffer\n");
-		goto cleanup;
-	}
-
-
-	//Now for slices
-	int i = 0;
-	gpointer video_frame;
-	for (;i < pic_data->num_slices; i++)
-	{
-	
-		LOG_V( "Creating libva slice parameter buffer, for slice %d\n", i);
-
-		//Do slice parameters
-
-		//First patch up the List0 and List1 surface IDs
-		int j = 0;
-		guint poc = 0;
-		for (; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l0_active_minus1; j++)
-		{
-			if (!(pic_data->slc_data[i].slc_parms.RefPicList0[j].flags & VA_PICTURE_H264_INVALID))
-			{
-				poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList0[j]));
-				video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
-				if (video_frame == NULL)
-				{
-					LOG_E(  "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic));
-					ret = MIX_RESULT_DROPFRAME;  //return non-fatal error
-					goto cleanup;
-				}
-				else
-				{
-					pic_data->slc_data[i].slc_parms.RefPicList0[j].picture_id = 
-						((MixVideoFrame *)video_frame)->frame_id;
-				}
-			}
-
-		}
-
-		if ((pic_data->slc_data->slc_parms.slice_type == 1) || (pic_data->slc_data->slc_parms.slice_type == 6))
-		{
-			for (j = 0; j <= pic_data->slc_data[i].slc_parms.num_ref_idx_l1_active_minus1; j++)
-			{
-				if (!(pic_data->slc_data[i].slc_parms.RefPicList1[j].flags & VA_PICTURE_H264_INVALID))
-				{
-					poc = mix_videofmt_h264_get_poc(&(pic_data->slc_data[i].slc_parms.RefPicList1[j]));
-					video_frame = g_hash_table_lookup(self->dpb_surface_table, (gpointer)poc);
-					if (video_frame == NULL)
-					{
-						LOG_E(  "unable to find surface of picture %d (current picture %d).", poc, mix_videofmt_h264_get_poc(&pic_params->CurrPic));
-						ret = MIX_RESULT_DROPFRAME;  //return non-fatal error
-						goto cleanup;
-					}
-					else
-					{						
-						pic_data->slc_data[i].slc_parms.RefPicList1[j].picture_id = 
-							((MixVideoFrame *)video_frame)->frame_id;
-					}
-				}
-			}
-		}
-
-
-		//Then do the libva setup
-
-	       	vret = vaCreateBuffer(vadisplay, vacontext,
-			 VASliceParameterBufferType,
-			 sizeof(VASliceParameterBufferH264),
-			 1,
-	       	         &(pic_data->slc_data[i].slc_parms),
-	       	         &buffer_ids[buffer_id_cnt]);
-
-		if (vret != VA_STATUS_SUCCESS)
-		{
-			ret = MIX_RESULT_FAIL;
-			LOG_E( "Video driver returned error from vaCreateBuffer\n");
-			goto cleanup;
-		}
-
-	    	buffer_id_cnt++;
-
-
-		LOG_V( "Creating libva slice data buffer for slice %d, using slice address %x, with offset %d and size %u\n", i, (guint)pic_data->slc_data[i].buffer_addr, pic_data->slc_data[i].slc_parms.slice_data_offset, pic_data->slc_data[i].slice_size);
-
-
-		//Do slice data
-
-      		vret = vaCreateBuffer(vadisplay, vacontext,
-       	       	  VASliceDataBufferType,
-		  //size
-		  pic_data->slc_data[i].slice_size,
-		  //num_elements
-       	       	  1,
-		  //slice data buffer pointer
-		  //Note that this is the original data buffer ptr;
-		  // offset to the actual slice data is provided in
-		  // slice_data_offset in VASliceParameterBufferH264
-		  pic_data->slc_data[i].buffer_addr + pic_data->slc_data[i].slice_offset,
-      	       	  &buffer_ids[buffer_id_cnt]);
-
-       	 	buffer_id_cnt++;
-
-       	 	if (vret != VA_STATUS_SUCCESS)
-		{
-			ret = MIX_RESULT_FAIL;
- 			LOG_E( "Video driver returned error from vaCreateBuffer\n");
-			goto cleanup;
-		}
-
-	}
-
-	gulong surface = 0;
-
-	//Get our surface ID from the frame object
-	ret = mix_videoframe_get_frame_id(frame, &surface);
-
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		LOG_E( "Error getting surface ID from frame object\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Calling vaBeginPicture\n");
-
-	//Now we can begin the picture
-      	vret = vaBeginPicture(vadisplay, vacontext, surface);
-
-       	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaBeginPicture\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Calling vaRenderPicture\n");
-
-	//Render the picture
-      	vret = vaRenderPicture(vadisplay, vacontext,
-      	     		buffer_ids,
-			buffer_id_cnt);
-
-
-       	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaRenderPicture\n");
-		goto cleanup;
-	}
-
-	LOG_V( "Calling vaEndPicture\n");
-
-	//End picture
-	vret = vaEndPicture(vadisplay, vacontext);
-
-       	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaEndPicture\n");
-		goto cleanup;
-	}
-
-#if 0	/* we don't call vaSyncSurface here, the call is moved to mix_video_render() */
-
-	LOG_V( "Calling vaSyncSurface\n");
-
-	//Decode the picture
-      	vret = vaSyncSurface(vadisplay, surface);
-
-       	if (vret != VA_STATUS_SUCCESS)
-	{
-		ret = MIX_RESULT_FAIL;
-		LOG_E( "Video driver returned error from vaSyncSurface\n");
-		goto cleanup;
-	}
-#endif
-
-	if (pic_index == 0)
-	{
-		//Set the discontinuity flag
-		mix_videoframe_set_discontinuity(frame, discontinuity);
-
-		//Set the timestamp
-		mix_videoframe_set_timestamp(frame, timestamp);
-		
-		guint32 frame_structure = VA_FRAME_PICTURE;
-		if (pic_params->CurrPic.flags & VA_PICTURE_H264_TOP_FIELD)
-		{
-			frame_structure =  VA_TOP_FIELD;
-		}
-		else if (pic_params->CurrPic.flags & VA_PICTURE_H264_BOTTOM_FIELD)
-		{
-			frame_structure = VA_BOTTOM_FIELD;
-		}		
-		mix_videoframe_set_frame_structure(frame, frame_structure);	
-	}
-	else
-	{
-		// frame must be field-coded, no need to set
-		// discontinuity falg and time stamp again
-		mix_videoframe_set_frame_structure(frame, VA_BOTTOM_FIELD | VA_TOP_FIELD);
-	}
-	
-	//TODO need to save off frame when handling is added for repeat frames?
-
-//TODO Complete YUVDUMP code and move into base class
-#ifdef YUVDUMP
-	if (mix_video_h264_counter < 10)
-		ret = GetImageFromSurface (mix, frame);
-//		g_usleep(5000000);
-#endif  /* YUVDUMP */
-
-	LOG_V( "Enqueueing the frame with frame manager, timestamp %"G_GINT64_FORMAT"\n", timestamp);
-
-
-	cleanup:
-
-	if (NULL != buffer_ids)
-		g_free(buffer_ids);
-
-
-	LOG_V( "End\n");
-
-	return ret;
-
-}
-
-
-MIX_RESULT mix_videofmt_h264_process_decode(MixVideoFormat *mix,
-					vbp_data_h264 *data, 
-					guint64 timestamp,
-					gboolean discontinuity)
-{
-	MIX_RESULT ret = MIX_RESULT_SUCCESS;
-	int i = 0;	
-	
-	if ((mix == NULL) || (data == NULL))
-	{
-		LOG_E( "Null pointer passed in\n");
-		return MIX_RESULT_NULL_PTR;
-	}
-
-	//Get a frame from the surface pool
-	MixVideoFrame *frame = NULL;
-
-	ret = mix_surfacepool_get(mix->surfacepool, &frame);
-
-	if (ret != MIX_RESULT_SUCCESS)
-	{
-		LOG_E( "Error getting frame from surfacepool\n");
-		return MIX_RESULT_FAIL;
-	}
-
-	
-	for (i = 0; i < data->num_pictures; i++)
-	{
-		ret = mix_videofmt_h264_process_decode_picture(mix, data, timestamp, discontinuity, i, frame);
-		if (ret != 	MIX_RESULT_SUCCESS)
-		{
-			LOG_E( "Failed to process decode picture %d, error =  %#X.", data->buf_number, ret);
-			break;
-		}		
-	}
-	
-	if (ret == MIX_RESULT_SUCCESS)
-	{
-		//Enqueue the decoded frame using frame manager
-		ret = mix_framemanager_enqueue(mix->framemgr, frame);
-		if (ret != MIX_RESULT_SUCCESS)
-               	{
- 			LOG_E( "Error enqueuing frame object\n");
-			mix_videoframe_unref(frame);
-               	}
-		
-	}
-	else
-	{
-		mix_videoframe_unref(frame);
-	}
-	mix_videofmt_h264_release_input_buffers(mix, timestamp);
-	
-	return ret;
-}
 
 MIX_RESULT mix_videofmt_h264_handle_ref_frames(MixVideoFormat *mix, 
 					VAPictureParameterBufferH264* pic_params,
@@ -1519,7 +1480,7 @@
 
 	LOG_V( "Begin\n");
 
-        if (mix == NULL || current_frame == NULL || pic_params == NULL)
+    if (mix == NULL || current_frame == NULL || pic_params == NULL)
 	{
 		LOG_E( "Null pointer passed in\n");
 		return MIX_RESULT_NULL_PTR;
@@ -1545,7 +1506,7 @@
 		LOG_V( "Flags show VA_PICTURE_H264_LONG_TERM_REFERENCE\n");
 #endif
 
-        MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+    MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
 
 
 	//First we need to check the parser DBP against our DPB table
@@ -1556,7 +1517,7 @@
 #endif
 	g_hash_table_foreach_remove(self->dpb_surface_table, mix_videofmt_h264_check_in_DPB, pic_params);
 
-		LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed);
+    LOG_V( "%d entries removed from DPB surface table at this frame\n", num_removed);
 
 
 	MixVideoFrame *mvf = NULL;
@@ -1569,17 +1530,17 @@
 		{
 
 			poc = mix_videofmt_h264_get_poc(&(pic_params->ReferenceFrames[i]));
-		LOG_V( "Looking up poc %d in dpb table\n", poc);
+		    LOG_V( "Looking up poc %d in dpb table\n", poc);
 			found = g_hash_table_lookup_extended(self->dpb_surface_table, (gpointer)poc, NULL, (gpointer)&mvf);
 
 			if (found)
 			{
 				pic_params->ReferenceFrames[i].picture_id = mvf->frame_id;
-		LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id);
+		        LOG_V( "Looked up poc %d in dpb table found frame ID %d\n", poc, (gint)mvf->frame_id);
 			} else {
-		LOG_V( "Looking up poc %d in dpb table did not find value\n", poc);
+		        LOG_V( "Looking up poc %d in dpb table did not find value\n", poc);
 			}
-		LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id);
+		    LOG_V( "For poc %d, set surface id for DPB index %d to %d\n", poc, i, (gint)pic_params->ReferenceFrames[i].picture_id);
 		}
 
 	}
@@ -1589,7 +1550,8 @@
 	pic_params->CurrPic.picture_id = current_frame->frame_id;
 
 	//Check to see if current frame is a reference frame
-	if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE))
+	if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || 
+        (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE))
 	{
 		//Get current frame's POC
 		poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic));	
@@ -1602,6 +1564,44 @@
 		g_hash_table_insert(self->dpb_surface_table, (gpointer)poc, current_frame);
 	}
 
+	LOG_V( "End\n");
+
+	return MIX_RESULT_SUCCESS;
+}
+
+MIX_RESULT mix_videofmt_h264_cleanup_ref_frame(MixVideoFormat *mix, 
+					VAPictureParameterBufferH264* pic_params,
+					MixVideoFrame * current_frame
+					) {
+
+	guint poc = 0;
+
+	LOG_V( "Begin\n");
+
+        if (mix == NULL || current_frame == NULL || pic_params == NULL)
+	{
+		LOG_E( "Null pointer passed in\n");
+		return MIX_RESULT_NULL_PTR;
+	}
+
+
+	LOG_V( "Pic_params has flags %d, topfieldcnt %d, bottomfieldcnt %d.  Surface ID is %d\n", pic_params->CurrPic.flags, pic_params->CurrPic.TopFieldOrderCnt, pic_params->CurrPic.BottomFieldOrderCnt, (gint) current_frame->frame_id);
+
+        MixVideoFormat_H264 *self = MIX_VIDEOFORMAT_H264(mix);
+
+	//Check to see if current frame is a reference frame
+	if ((pic_params->CurrPic.flags & VA_PICTURE_H264_SHORT_TERM_REFERENCE) || (pic_params->CurrPic.flags & VA_PICTURE_H264_LONG_TERM_REFERENCE))
+	{
+		//Get current frame's POC
+		poc = mix_videofmt_h264_get_poc(&(pic_params->CurrPic));	
+
+		//Decrement the reference count for this frame
+//		mix_videoframe_unref(current_frame);
+
+		LOG_V( "Removing poc %d, surfaceID %d\n", poc, (gint)current_frame->frame_id);
+		//Remove this frame from the DPB surface table
+		g_hash_table_remove(self->dpb_surface_table, (gpointer)poc);
+	}
 
 
 	LOG_V( "End\n");
@@ -1611,14 +1611,12 @@
 
 guint mix_videofmt_h264_get_poc(VAPictureH264 *pic)
 {
-
-        if (pic == NULL)
-                return 0;
+    if (pic == NULL)
+        return 0;
 
 	if (pic->flags & VA_PICTURE_H264_BOTTOM_FIELD)
 		return pic->BottomFieldOrderCnt;
 
-	
 	if (pic->flags & VA_PICTURE_H264_TOP_FIELD)
 		return pic->TopFieldOrderCnt;
 
@@ -1631,8 +1629,8 @@
 {
 	gboolean ret = TRUE;
 
-        if ((value == NULL) || (user_data == NULL))  //Note that 0 is valid value for key
-                return FALSE;
+    if ((value == NULL) || (user_data == NULL))  //Note that 0 is valid value for key
+        return FALSE;
 
 	VAPictureH264* vaPic = NULL;
 	int i = 0;
@@ -1653,9 +1651,10 @@
 	return ret;
 }
 
+
 void mix_videofmt_h264_destroy_DPB_key(gpointer data)
 {
-//TODO remove this method and don't register it with the hash table foreach call; it is no longer needed
+    //TODO remove this method and don't register it with the hash table foreach call; it is no longer needed
 	LOG_V( "Begin, poc of %d\n", (guint)data);
 	LOG_V( "End\n");
 
@@ -1665,10 +1664,11 @@
 void mix_videofmt_h264_destroy_DPB_value(gpointer data)
 {
 	LOG_V( "Begin\n");
-        if (data == NULL)
-        	return ;
-	mix_videoframe_unref((MixVideoFrame *)data);
-
+    if (data != NULL)
+    {
+    	mix_videoframe_unref((MixVideoFrame *)data);
+    }
+	LOG_V( "End\n");
 	return;
 }
 
@@ -1677,47 +1677,8 @@
 					guint64 timestamp
 					) {
 
-	MixInputBufferEntry *bufentry = NULL;
-	gboolean done = FALSE;
-
-	LOG_V( "Begin\n");
-
-        if (mix == NULL)
-                return MIX_RESULT_NULL_PTR;
-
-	//Dequeue and release all input buffers for this frame
-		
-	LOG_V( "Releasing all the MixBuffers for this frame\n");
-
-	//While the head of the queue has timestamp == current ts
-	//dequeue the entry, unref the MixBuffer, and free the struct
-	done = FALSE;
-	while (!done)
-	{
-		bufentry = (MixInputBufferEntry *) g_queue_peek_head(
-				mix->inputbufqueue);
-		if (bufentry == NULL) break;
-	LOG_V( "head of queue buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
-
-		if (bufentry->timestamp != timestamp)
-		{
-	LOG_V( "buf %x, timestamp %"G_GINT64_FORMAT", buffer timestamp %"G_GINT64_FORMAT"\n", (guint)bufentry->buf, timestamp, bufentry->timestamp);
-			done = TRUE;
-			break;
-		}
-
-		bufentry = (MixInputBufferEntry *) g_queue_pop_head(
-				mix->inputbufqueue);
-		LOG_V( "Unref this MixBuffers %x\n", (guint)bufentry->buf);
-		mix_buffer_unref(bufentry->buf);
-		g_free(bufentry);
-	}
-	
-
-	LOG_V( "End\n");
-
+    // no longer used, need to be removed.
 	return MIX_RESULT_SUCCESS;
 }
 
 
-
diff --git a/mix_video/src/mixvideoformat_h264.h b/mix_video/src/mixvideoformat_h264.h
index a04048c..b6d062e 100644
--- a/mix_video/src/mixvideoformat_h264.h
+++ b/mix_video/src/mixvideoformat_h264.h
@@ -12,6 +12,8 @@
 #include "mixvideoformat.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 #define MIX_VIDEO_H264_SURFACE_NUM       20
 
 /*
@@ -123,7 +125,7 @@
 void mix_videofmt_h264_destroy_DPB_value(gpointer data);
 guint mix_videofmt_h264_get_poc(VAPictureH264 *pic);
 
-
+G_END_DECLS
 
 
 #endif /* __MIX_VIDEOFORMAT_H264_H__ */
diff --git a/mix_video/src/mixvideoformat_mp42.c b/mix_video/src/mixvideoformat_mp42.c
index c6c7b30..b11ab88 100644
--- a/mix_video/src/mixvideoformat_mp42.c
+++ b/mix_video/src/mixvideoformat_mp42.c
@@ -142,7 +142,7 @@
 	parent->initialized = TRUE;
 	parent->parse_in_progress = FALSE;
 	parent->discontinuity_frame_in_progress = FALSE;
-	parent->current_timestamp = 0;
+	parent->current_timestamp = (guint64)-1;
 
 	/* Close the parser */
 	vbp_ret = vbp_close(parent->parser_handle);
@@ -1149,7 +1149,7 @@
 	 */
 	mix->parse_in_progress = FALSE;
 	mix->discontinuity_frame_in_progress = FALSE;
-	mix->current_timestamp = 0;
+	mix->current_timestamp = (guint64)-1;
 
 	{
 		gint idx = 0;
diff --git a/mix_video/src/mixvideoformat_mp42.h b/mix_video/src/mixvideoformat_mp42.h
index 67ee210..49a1299 100644
--- a/mix_video/src/mixvideoformat_mp42.h
+++ b/mix_video/src/mixvideoformat_mp42.h
@@ -12,6 +12,8 @@
 #include "mixvideoformat.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 //Note: this is only a max limit.  Real number of surfaces allocated is calculated in mix_videoformat_mp42_initialize()
 #define MIX_VIDEO_MP42_SURFACE_NUM	8
 
@@ -114,4 +116,6 @@
 MIX_RESULT mix_videofmt_mp42_release_input_buffers(MixVideoFormat *mix,
 		guint64 timestamp);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMAT_MP42_H__ */
diff --git a/mix_video/src/mixvideoformat_vc1.c b/mix_video/src/mixvideoformat_vc1.c
index 151cff3..bf4d1f4 100644
--- a/mix_video/src/mixvideoformat_vc1.c
+++ b/mix_video/src/mixvideoformat_vc1.c
@@ -120,7 +120,7 @@
         parent->initialized = TRUE;
         parent->parse_in_progress = FALSE;
 	parent->discontinuity_frame_in_progress = FALSE;
-	parent->current_timestamp = 0;
+	parent->current_timestamp = (guint64)-1;
 
 	//Close the parser
         pret = vbp_close(parent->parser_handle);
@@ -1526,7 +1526,7 @@
 	//Clear parse_in_progress flag and current timestamp
 	mix->parse_in_progress = FALSE;
 	mix->discontinuity_frame_in_progress = FALSE;
-	mix->current_timestamp = 0;
+	mix->current_timestamp = (guint64)-1;
 
 	int i = 0;
 	for (; i < 2; i++)
diff --git a/mix_video/src/mixvideoformat_vc1.h b/mix_video/src/mixvideoformat_vc1.h
index 9af8a8d..366428e 100644
--- a/mix_video/src/mixvideoformat_vc1.h
+++ b/mix_video/src/mixvideoformat_vc1.h
@@ -12,6 +12,8 @@
 #include "mixvideoformat.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 //Note: this is only a max limit.  Actual number of surfaces allocated is calculated in mix_videoformat_vc1_initialize()
 #define MIX_VIDEO_VC1_SURFACE_NUM	8
 
@@ -120,4 +122,6 @@
 MIX_RESULT mix_videofmt_vc1_release_input_buffers(MixVideoFormat *mix, 
 					guint64 timestamp);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMAT_VC1_H__ */
diff --git a/mix_video/src/mixvideoformatenc.c b/mix_video/src/mixvideoformatenc.c
index e7d1e8e..f39f77f 100644
--- a/mix_video/src/mixvideoformatenc.c
+++ b/mix_video/src/mixvideoformatenc.c
@@ -30,6 +30,9 @@
 static MIX_RESULT mix_videofmtenc_deinitialize_default(MixVideoFormatEnc *mix);
 static MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size_default(
 	MixVideoFormatEnc *mix, guint *max_size);
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params_enc, 
+	MixEncParamsType params_type);
 
 
 static GObjectClass *parent_class = NULL;
@@ -71,6 +74,10 @@
       self->va_format = VA_RT_FORMAT_YUV420;
       self->va_entrypoint = VAEntrypointEncSlice;
       self->va_profile = VAProfileH264Baseline;	   
+      self->level = 30;
+      self->CIR_frame_cnt = 15; //default value
+      self->force_key_frame = FALSE;
+      self->new_header_required = FALSE;
 	
 	//add more properties here
 }
@@ -91,6 +98,7 @@
 	klass->eos = mix_videofmtenc_eos_default;
 	klass->deinitialize = mix_videofmtenc_deinitialize_default;
 	klass->getmaxencodedbufsize = mix_videofmtenc_get_max_coded_buffer_size_default;
+	klass->set_dynamic_config = mix_videofmtenc_set_dynamic_enc_config_default;
 }
 
 MixVideoFormatEnc *
@@ -350,7 +358,31 @@
         g_mutex_unlock(mix->objectlock);
         return MIX_RESULT_FAIL;
     }			
+
+    ret = mix_videoconfigparamsenc_get_level (config_params_enc,
+            &(mix->level));
     
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E( 
+                "Failed to mix_videoconfigparamsenc_get_level\n");                            
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }			
+
+    ret = mix_videoconfigparamsenc_get_CIR_frame_cnt(config_params_enc, 
+            &(mix->CIR_frame_cnt));
+    
+    if (ret != MIX_RESULT_SUCCESS) {
+        //TODO cleanup
+
+        LOG_E( 
+                "Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");                            
+        g_mutex_unlock(mix->objectlock);
+        return MIX_RESULT_FAIL;
+    }		
+
     
     LOG_V( 
             "======Video Encode Parent Object properities======:\n");
@@ -421,6 +453,157 @@
 	return MIX_RESULT_SUCCESS;	
 }
 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config_default (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params_enc, 
+	MixEncParamsType params_type) {
+
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;	
+
+	if (mix == NULL ||config_params_enc == NULL) {
+		LOG_E( 
+			"!mix || config_params_enc == NULL\n");				
+		return MIX_RESULT_NULL_PTR;
+	}
+
+
+	MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);
+	
+
+	g_mutex_lock(mix->objectlock);
+
+	mix->new_header_required = FALSE;
+
+	switch (params_type) {
+		case MIX_ENC_PARAMS_BITRATE:
+		{
+			ret = mix_videoconfigparamsenc_get_bit_rate (config_params_enc, &(mix->bitrate));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_bit_rate\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}	
+
+			mix->new_header_required = TRUE;
+		}
+			break;
+		case MIX_ENC_PARAMS_SLICE_SIZE:
+		{
+			/*
+			* This type of dynamic control will be handled in H.264 override method
+			*/
+		}
+			break;
+			
+		case MIX_ENC_PARAMS_RC_MODE:	
+		{
+			ret = mix_videoconfigparamsenc_get_rate_control (config_params_enc, &(mix->va_rcmode));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_rate_control\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}	
+
+			mix->new_header_required = TRUE;					
+		}
+			break;
+			
+		case MIX_ENC_PARAMS_RESOLUTION:
+		{
+
+			ret = mix_videoconfigparamsenc_get_picture_res (config_params_enc, &(mix->picture_width), &(mix->picture_height));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_picture_res\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}	
+
+			mix->new_header_required = TRUE;			
+		}
+			break;
+		case MIX_ENC_PARAMS_GOP_SIZE:
+		{
+
+			ret = mix_videoconfigparamsenc_get_intra_period (config_params_enc, &(mix->intra_period));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_intra_period\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}				
+
+			mix->new_header_required = TRUE;						
+
+		}
+			break;
+		case MIX_ENC_PARAMS_FRAME_RATE:
+		{
+			ret = mix_videoconfigparamsenc_get_frame_rate (config_params_enc, &(mix->frame_rate_num),  &(mix->frame_rate_denom));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_frame_rate\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}				
+			
+			mix->new_header_required = TRUE;			
+		}
+			break;
+		case MIX_ENC_PARAMS_FORCE_KEY_FRAME:
+		{
+			mix->new_header_required = TRUE;			
+			
+		}
+			break;
+		case MIX_ENC_PARAMS_QP:
+		{
+			ret = mix_videoconfigparamsenc_get_init_qp (config_params_enc, &(mix->initial_qp));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_init_qp\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}			
+
+			mix->new_header_required = TRUE;			
+		}
+			break;
+		case MIX_ENC_PARAMS_CIR_FRAME_CNT:
+		{
+			ret = mix_videoconfigparamsenc_get_CIR_frame_cnt (config_params_enc, &(mix->CIR_frame_cnt));
+			if (ret != MIX_RESULT_SUCCESS) {
+				//TODO cleanup
+
+				LOG_E(
+					"Failed to mix_videoconfigparamsenc_get_CIR_frame_cnt\n");                            
+				g_mutex_unlock(mix->objectlock);
+				return MIX_RESULT_FAIL;
+			}			
+		}
+			break;
+			
+		default:
+			break;
+	}
+
+ 	g_mutex_unlock(mix->objectlock);	
+
+    	return MIX_RESULT_SUCCESS;	
+}
+
 /* mixvideoformatenc class methods implementation */
 
 MIX_RESULT mix_videofmtenc_getcaps(MixVideoFormatEnc *mix, GString *msg) {
@@ -500,3 +683,15 @@
     
     return MIX_RESULT_FAIL;
 }
+
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params_enc, 
+	MixEncParamsType params_type) {
+
+    MixVideoFormatEncClass *klass = MIX_VIDEOFORMATENC_GET_CLASS(mix);	
+    if (klass->set_dynamic_config) {
+        return klass->set_dynamic_config(mix, config_params_enc, params_type);
+    }
+    
+    return MIX_RESULT_FAIL;	
+}
diff --git a/mix_video/src/mixvideoformatenc.h b/mix_video/src/mixvideoformatenc.h
index c0da910..b66cc6a 100644
--- a/mix_video/src/mixvideoformatenc.h
+++ b/mix_video/src/mixvideoformatenc.h
@@ -22,6 +22,8 @@
 #include "mixvideoformatqueue.h"
 #include "mixvideoencodeparams.h"
 
+G_BEGIN_DECLS
+
 /*
  * Type macros.
  */
@@ -52,6 +54,9 @@
 typedef MIX_RESULT (*MixVideoFmtEncEndOfStreamFunc)(MixVideoFormatEnc *mix);
 typedef MIX_RESULT (*MixVideoFmtEncDeinitializeFunc)(MixVideoFormatEnc *mix);
 typedef MIX_RESULT (*MixVideoFmtEncGetMaxEncodedBufSizeFunc) (MixVideoFormatEnc *mix, guint *max_size);
+typedef MIX_RESULT (*MixVideoFmtEncSetDynamicEncConfigFunc) (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params, 
+	MixEncParamsType params_type);
 
 struct _MixVideoFormatEnc {
     /*< public > */
@@ -82,6 +87,10 @@
     gboolean share_buf_mode;	
     gulong *	ci_frame_id;
     guint	ci_frame_num;	
+
+    gboolean force_key_frame;
+    gboolean new_header_required;
+    guint 	CIR_frame_cnt;	
     
     gulong    drawable;
     gboolean need_display;	
@@ -90,6 +99,7 @@
     VAEntrypoint va_entrypoint;
     guint va_format;
     guint va_rcmode; 	
+    guint8 level;	
 	
     
     MixBufferPool *inputbufpool;
@@ -115,6 +125,7 @@
 	MixVideoFmtEncEndOfStreamFunc eos;
 	MixVideoFmtEncDeinitializeFunc deinitialize;
 	MixVideoFmtEncGetMaxEncodedBufSizeFunc getmaxencodedbufsize;	
+	MixVideoFmtEncSetDynamicEncConfigFunc set_dynamic_config;
 };
 
 /**
@@ -172,7 +183,12 @@
 
 MIX_RESULT mix_videofmtenc_deinitialize(MixVideoFormatEnc *mix);
 
-MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, guint *max_size);
+MIX_RESULT mix_videofmtenc_get_max_coded_buffer_size(MixVideoFormatEnc *mix, 
+	guint *max_size);
 
+MIX_RESULT mix_videofmtenc_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params, 
+	MixEncParamsType params_type);
 
+G_END_DECLS
 #endif /* __MIX_VIDEOFORMATENC_H__ */
diff --git a/mix_video/src/mixvideoformatenc_h263.c b/mix_video/src/mixvideoformatenc_h263.c
index 5f28682..809332e 100644
--- a/mix_video/src/mixvideoformatenc_h263.c
+++ b/mix_video/src/mixvideoformatenc_h263.c
@@ -13,6 +13,7 @@
 
 #include "mixvideoformatenc_h263.h"
 #include "mixvideoconfigparamsenc_h263.h"
+#include <va/va_tpi.h>
 
 #undef SHOW_SRC
 
@@ -43,9 +44,10 @@
     self->is_intra = TRUE;
     self->cur_frame = NULL;
     self->ref_frame = NULL;
-    self->rec_frame = NULL;	
+    self->rec_frame = NULL;
+#ifdef ANDROID	
     self->last_mix_buffer = NULL;
-
+#endif
     self->ci_shared_surfaces = NULL;
     self->surfaces= NULL;
     self->surface_num = 0;
@@ -749,12 +751,12 @@
         mix_videoframe_unref (self->ref_frame);
         self->ref_frame = NULL;       
     }
-
+#ifdef ANDROID
     if(self->last_mix_buffer) {
        mix_buffer_unref(self->last_mix_buffer);
        self->last_mix_buffer = NULL;
     }
-    
+#endif    
     /*reset the properities*/    
     self->encoded_frames = 0;
     self->pic_skipped = FALSE;
@@ -1322,6 +1324,7 @@
 #else
 #define USE_SRC_FMT_NV21
 #endif
+
 #ifdef USE_SRC_FMT_YUV420
             /*need to convert YUV420 to NV12*/
             dst_y = pvbuf +image->offsets[0];
@@ -1448,8 +1451,11 @@
             if (mix->cur_frame == NULL)
             {
                 guint ci_idx;
-//                memcpy (&ci_idx, bufin->data, bufin->size);
+#ifndef ANDROID
+                memcpy (&ci_idx, bufin->data, bufin->size);
+#else
                 memcpy (&ci_idx, bufin->data, sizeof(unsigned int));
+#endif
  
                 LOG_I( 
                         "surface_num = %d\n", mix->surface_num);			 
@@ -1721,7 +1727,8 @@
         mix->coded_buf_index ++; 
         mix->coded_buf_index %=2;
         mix->last_frame = mix->cur_frame;
-        
+
+#ifdef ANDROID        
         if(mix->last_mix_buffer) {
            LOG_V("calls to mix_buffer_unref \n");
            LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount);
@@ -1730,6 +1737,7 @@
 
         LOG_V("ref the current bufin\n");
         mix->last_mix_buffer = mix_buffer_ref(bufin);
+#endif
 
         if (!(parent->need_display)) {
              mix_videoframe_unref (mix->cur_frame);
diff --git a/mix_video/src/mixvideoformatenc_h263.h b/mix_video/src/mixvideoformatenc_h263.h
index ece5fc8..f13db38 100644
--- a/mix_video/src/mixvideoformatenc_h263.h
+++ b/mix_video/src/mixvideoformatenc_h263.h
@@ -12,6 +12,8 @@
 #include "mixvideoformatenc.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 #define MIX_VIDEO_ENC_H263_SURFACE_NUM       20
 
 #define min(X,Y) (((X) < (Y)) ? (X) : (Y))
@@ -48,7 +50,9 @@
 	MixVideoFrame  *ref_frame;  //reference frame
 	MixVideoFrame  *rec_frame;	//reconstructed frame;	
 	MixVideoFrame  *last_frame;	//last frame;
+#ifdef ANDROID
         MixBuffer      *last_mix_buffer;
+#endif
 
 	guint disable_deblocking_filter_idc;
 	guint slice_num;
@@ -137,5 +141,7 @@
 	MixIOVec * iovout);
 MIX_RESULT mix_videofmtenc_h263_send_encode_command (MixVideoFormatEnc_H263 *mix);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMATENC_H263_H__ */
 
diff --git a/mix_video/src/mixvideoformatenc_h264.c b/mix_video/src/mixvideoformatenc_h264.c
index 82fad06..336b4d8 100644
--- a/mix_video/src/mixvideoformatenc_h264.c
+++ b/mix_video/src/mixvideoformatenc_h264.c
@@ -13,6 +13,7 @@
 
 #include "mixvideoformatenc_h264.h"
 #include "mixvideoconfigparamsenc_h264.h"
+#include <va/va_tpi.h>
 
 #undef SHOW_SRC
 
@@ -44,7 +45,9 @@
     self->cur_frame = NULL;
     self->ref_frame = NULL;
     self->rec_frame = NULL;
+#ifdef ANDROID
     self->last_mix_buffer = NULL;
+#endif
 
     self->ci_shared_surfaces = NULL;
     self->surfaces= NULL;
@@ -78,6 +81,7 @@
     video_formatenc_class->eos = mix_videofmtenc_h264_eos;
     video_formatenc_class->deinitialize = mix_videofmtenc_h264_deinitialize;
     video_formatenc_class->getmaxencodedbufsize = mix_videofmtenc_h264_get_max_encoded_buf_size;
+    video_formatenc_class->set_dynamic_config = mix_videofmtenc_h264_set_dynamic_enc_config;
 }
 
 MixVideoFormatEnc_H264 *
@@ -230,7 +234,16 @@
                 "Failed to mix_videoconfigparamsenc_h264_get_delimiter_type\n");                 
         goto cleanup;
     }			
+
+    ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264,
+            &self->idr_interval);
     
+    if (ret != MIX_RESULT_SUCCESS) {
+        LOG_E ( 
+                "Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n");                 
+        goto cleanup;
+    }		
+	
     LOG_V( 
             "======H264 Encode Object properities======:\n");
     
@@ -241,7 +254,9 @@
     LOG_I( "self->slice_num = %d\n", 
             self->slice_num);			
     LOG_I ("self->delimiter_type = %d\n", 
-            self->delimiter_type);				
+            self->delimiter_type);		
+    LOG_I ("self->idr_interval = %d\n", 
+            self->idr_interval);		
     
     LOG_V( 
             "Get properities from params done\n");
@@ -251,6 +266,7 @@
     LOG_V( "Get Display\n");
     LOG_I( "Display = 0x%08x\n", 
             (guint)va_display);			
+	
     
 #if 0
     /* query the vender information, can ignore*/
@@ -776,12 +792,12 @@
         mix_videoframe_unref (self->ref_frame);
         self->ref_frame = NULL;       
     }
-
+#ifdef ANDROID
     if(self->last_mix_buffer) {
        mix_buffer_unref(self->last_mix_buffer);
        self->last_mix_buffer = NULL;
     }
-   
+#endif   
     /*reset the properities*/    
     self->encoded_frames = 0;
     self->pic_skipped = FALSE;
@@ -934,6 +950,7 @@
     /*set up the sequence params for HW*/
     h264_seq_param.level_idc = 30;  //TODO, hard code now
     h264_seq_param.intra_period = parent->intra_period;
+    h264_seq_param.intra_idr_period = mix->idr_interval;
     h264_seq_param.picture_width_in_mbs = parent->picture_width / 16;
     h264_seq_param.picture_height_in_mbs = parent->picture_height/ 16;
     h264_seq_param.bits_per_second = parent->bitrate;
@@ -954,7 +971,9 @@
     LOG_I( "level_idc = %d\n", 
             (guint)h264_seq_param.level_idc);	
     LOG_I( "intra_period = %d\n", 
-            h264_seq_param.intra_period);			
+            h264_seq_param.intra_period);		
+    LOG_I( "idr_interval = %d\n", 
+            h264_seq_param.intra_idr_period);		
     LOG_I( "picture_width_in_mbs = %d\n", 
             h264_seq_param.picture_width_in_mbs);	 
     LOG_I( "picture_height_in_mbs = %d\n", 
@@ -1510,8 +1529,11 @@
         if (mix->cur_frame == NULL)
         {
             guint ci_idx;
-            //memcpy (&ci_idx, bufin->data, bufin->size);
+#ifndef ANDROID
+            memcpy (&ci_idx, bufin->data, bufin->size);
+#else
             memcpy (&ci_idx, bufin->data, sizeof(unsigned int));
+#endif
 
             LOG_I( 
                     "surface_num = %d\n", mix->surface_num);			 
@@ -1551,6 +1573,10 @@
         ret = mix_videoframe_get_frame_id(mix->cur_frame, &surface);
         
     }
+
+    /**
+     * Start encoding process
+     **/
     
     LOG_V( "vaBeginPicture\n");	
     LOG_I( "va_context = 0x%08x\n",(guint)va_context);
@@ -1585,6 +1611,8 @@
             goto cleanup;
         }
     }
+
+    LOG_V( "vaEndPicture\n");		
     
     if (mix->encoded_frames == 0) {
         mix->encoded_frames ++;
@@ -1651,6 +1679,8 @@
         num_seg ++;
     }
 
+    LOG_I ("segment number = %d\n", num_seg);
+
 #if 0
     // first 4 bytes is the size of the buffer
     memcpy (&(iovout->data_size), (void*)buf, 4); 
@@ -1876,6 +1906,7 @@
     mix->coded_buf_index %=2;
     mix->last_frame = mix->cur_frame;
 
+#ifdef ANDROID
     if(mix->last_mix_buffer) {       
        LOG_V("calls to mix_buffer_unref \n");
        LOG_V("refcount = %d\n", MIX_PARAMS(mix->last_mix_buffer)->refcount);
@@ -1884,6 +1915,7 @@
 
     LOG_V("ref the current bufin\n");
     mix->last_mix_buffer = mix_buffer_ref(bufin);
+#endif
 
     if (!(parent->need_display)) {
         mix_videoframe_unref (mix->cur_frame);
@@ -2101,10 +2133,12 @@
 
     LOG_V( "Begin\n");		
 
+    MixVideoFormatEnc *parent = MIX_VIDEOFORMATENC(&(mix->parent));
+	
     if (!MIX_IS_VIDEOFORMATENC_H264(mix))
         return MIX_RESULT_INVALID_PARAM;	
 	
-    if (mix->encoded_frames == 0) {
+    if (mix->encoded_frames == 0 || parent->new_header_required) {
         ret = mix_videofmtenc_h264_send_seq_params (mix);
         if (ret != MIX_RESULT_SUCCESS)
         {
@@ -2112,6 +2146,8 @@
                     "Failed mix_videofmtenc_h264_send_seq_params\n");
             return MIX_RESULT_FAIL;
         }
+
+	 parent->new_header_required = FALSE; //Set to require new header filed to FALSE
     }
     
     ret = mix_videofmtenc_h264_send_picture_parameter (mix);	
@@ -2138,3 +2174,97 @@
     return MIX_RESULT_SUCCESS;
 }
 
+MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params_enc, 
+	MixEncParamsType params_type) {
+
+
+	MIX_RESULT ret = MIX_RESULT_SUCCESS;
+	MixVideoFormatEnc *parent = NULL;
+	MixVideoConfigParamsEncH264 * config_params_enc_h264;
+
+	LOG_V( "Begin\n");		
+
+	if (!MIX_IS_VIDEOFORMATENC_H264(mix))
+		return MIX_RESULT_INVALID_PARAM;	
+
+	MixVideoFormatEnc_H264 *self = MIX_VIDEOFORMATENC_H264(mix);
+
+	parent = MIX_VIDEOFORMATENC(&(mix->parent));
+
+	if (MIX_IS_VIDEOCONFIGPARAMSENC_H264 (config_params_enc)) {
+		config_params_enc_h264 = 
+			MIX_VIDEOCONFIGPARAMSENC_H264 (config_params_enc);
+	} else {
+		LOG_V( 
+			"mix_videofmtenc_h264_initialize:  no h264 config params found\n");
+		return MIX_RESULT_FAIL;
+	}	
+
+	/*
+	* For case params_type == MIX_ENC_PARAMS_SLICE_SIZE
+	* we don't need to chain up to parent method, as we will handle
+	* dynamic slice height change inside this method, and other dynamic
+	* controls will be handled in parent method.
+	*/
+	if (params_type == MIX_ENC_PARAMS_SLICE_SIZE) {
+
+		g_mutex_lock(parent->objectlock);     	
+		
+		ret = mix_videoconfigparamsenc_h264_get_slice_num (config_params_enc_h264,
+			&self->slice_num);
+
+		if (ret != MIX_RESULT_SUCCESS) {
+			LOG_E( 
+				"Failed to mix_videoconfigparamsenc_h264_get_slice_num\n");  
+
+			g_mutex_unlock(parent->objectlock);       
+
+			return ret;
+		}			
+
+		g_mutex_unlock(parent->objectlock);       
+		
+	} else if (params_type == MIX_ENC_PARAMS_IDR_INTERVAL) {
+
+		g_mutex_lock(parent->objectlock);     	
+		
+		ret = mix_videoconfigparamsenc_h264_get_IDR_interval(config_params_enc_h264,
+			&self->idr_interval);
+
+		if (ret != MIX_RESULT_SUCCESS) {
+			LOG_E( 
+				"Failed to mix_videoconfigparamsenc_h264_get_IDR_interval\n");  
+
+			g_mutex_unlock(parent->objectlock);       
+
+			return ret;
+		}			
+
+		parent->new_header_required = TRUE;
+
+		g_mutex_unlock(parent->objectlock);    
+		
+	} else{
+
+		/* Chainup parent method. */
+		if (parent_class->set_dynamic_config) {
+			ret = parent_class->set_dynamic_config(mix, config_params_enc,
+				params_type);
+		}
+
+		if (ret != MIX_RESULT_SUCCESS)
+		{
+			LOG_V( 
+				"chainup parent method (set_dynamic_config) failed \n");
+			return ret;
+		}	
+	}
+
+
+	LOG_V( "End\n");		
+
+	return MIX_RESULT_SUCCESS;
+	
+}
+
diff --git a/mix_video/src/mixvideoformatenc_h264.h b/mix_video/src/mixvideoformatenc_h264.h
index 6f470af..a8f813a 100644
--- a/mix_video/src/mixvideoformatenc_h264.h
+++ b/mix_video/src/mixvideoformatenc_h264.h
@@ -12,6 +12,8 @@
 #include "mixvideoformatenc.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 #define MIX_VIDEO_ENC_H264_SURFACE_NUM       20
 
 #define min(X,Y) (((X) < (Y)) ? (X) : (Y))
@@ -46,12 +48,15 @@
     MixVideoFrame  *cur_frame;	//current input frame to be encoded;	
     MixVideoFrame  *ref_frame;  //reference frame
     MixVideoFrame  *rec_frame;	//reconstructed frame;	
-    MixVideoFrame  *last_frame;	//last frame;	
+    MixVideoFrame  *last_frame;	//last frame;
+#ifdef ANDROID	
     MixBuffer      *last_mix_buffer;
+#endif
 
     guint basic_unit_size;  //for rate control
     guint disable_deblocking_filter_idc;
     MixDelimiterType delimiter_type;
+    guint idr_interval;
     guint slice_num;
     guint va_rcmode; 
 
@@ -129,6 +134,9 @@
 MIX_RESULT mix_videofmtenc_h264_flush(MixVideoFormatEnc *mix);
 MIX_RESULT mix_videofmtenc_h264_eos(MixVideoFormatEnc *mix);
 MIX_RESULT mix_videofmtenc_h264_deinitialize(MixVideoFormatEnc *mix);
+MIX_RESULT mix_videofmtenc_h264_set_dynamic_enc_config (MixVideoFormatEnc * mix, 
+	MixVideoConfigParamsEnc * config_params_enc, 
+	MixEncParamsType params_type);
 
 /* Local Methods */
 
@@ -140,4 +148,6 @@
 
 MIX_RESULT mix_videofmtenc_h264_send_encode_command (MixVideoFormatEnc_H264 *mix);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMATENC_H264_H__ */
diff --git a/mix_video/src/mixvideoformatenc_mpeg4.c b/mix_video/src/mixvideoformatenc_mpeg4.c
index 5e95d64..18c0b16 100644
--- a/mix_video/src/mixvideoformatenc_mpeg4.c
+++ b/mix_video/src/mixvideoformatenc_mpeg4.c
@@ -13,6 +13,7 @@
 
 #include "mixvideoformatenc_mpeg4.h"
 #include "mixvideoconfigparamsenc_mpeg4.h"
+#include <va/va_tpi.h>
 
 #undef SHOW_SRC
 
@@ -1552,7 +1553,7 @@
             if (coded_seg->next == NULL)	
                 break;		
             
-            coded_seg ++;
+            coded_seg = coded_seg->next;
             num_seg ++;
         }
         
@@ -1588,7 +1589,7 @@
             if (coded_seg->next == NULL)	
                 break;		
             
-            coded_seg ++;
+            coded_seg = coded_seg->next;
         }        
         
         iovout->buffer_size = iovout->data_size;
diff --git a/mix_video/src/mixvideoformatenc_mpeg4.h b/mix_video/src/mixvideoformatenc_mpeg4.h
index fc83d95..7e8e29b 100644
--- a/mix_video/src/mixvideoformatenc_mpeg4.h
+++ b/mix_video/src/mixvideoformatenc_mpeg4.h
@@ -12,6 +12,8 @@
 #include "mixvideoformatenc.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 #define MIX_VIDEO_ENC_MPEG4_SURFACE_NUM       20
 
 #define min(X,Y) (((X) < (Y)) ? (X) : (Y))
@@ -139,5 +141,7 @@
 	MixIOVec * iovout);
 MIX_RESULT mix_videofmtenc_mpeg4_send_encode_command (MixVideoFormatEnc_MPEG4 *mix);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMATENC_MPEG4_H__ */
 
diff --git a/mix_video/src/mixvideoformatenc_preview.c b/mix_video/src/mixvideoformatenc_preview.c
index 34aa89b..649ad49 100644
--- a/mix_video/src/mixvideoformatenc_preview.c
+++ b/mix_video/src/mixvideoformatenc_preview.c
@@ -13,6 +13,7 @@
 
 #include "mixvideoformatenc_preview.h"
 #include "mixvideoconfigparamsenc_preview.h"
+#include <va/va_tpi.h>
 
 #undef SHOW_SRC
 
diff --git a/mix_video/src/mixvideoformatenc_preview.h b/mix_video/src/mixvideoformatenc_preview.h
index 45ae101..09bc149 100644
--- a/mix_video/src/mixvideoformatenc_preview.h
+++ b/mix_video/src/mixvideoformatenc_preview.h
@@ -12,6 +12,8 @@
 #include "mixvideoformatenc.h"
 #include "mixvideoframe_private.h"
 
+G_BEGIN_DECLS
+
 #define MIX_VIDEO_ENC_PREVIEW_SURFACE_NUM       20
 
 #define min(X,Y) (((X) < (Y)) ? (X) : (Y))
@@ -130,4 +132,6 @@
 MIX_RESULT mix_videofmtenc_preview_process_encode (MixVideoFormatEnc_Preview *mix, MixBuffer * bufin, 
         MixIOVec * iovout);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMATENC_PREVIEW_H__ */
diff --git a/mix_video/src/mixvideoformatqueue.h b/mix_video/src/mixvideoformatqueue.h
index 5594aba..f21edfb 100644
--- a/mix_video/src/mixvideoformatqueue.h
+++ b/mix_video/src/mixvideoformatqueue.h
@@ -11,6 +11,8 @@
 
 #include "mixbuffer.h"
 
+G_BEGIN_DECLS
+
 typedef struct _MixInputBufferEntry MixInputBufferEntry;
 
 struct _MixInputBufferEntry
@@ -21,4 +23,6 @@
 
 };
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFORMATQUEUE_H__ */
diff --git a/mix_video/src/mixvideoframe.c b/mix_video/src/mixvideoframe.c
index 4ea0252..2891cf0 100644
--- a/mix_video/src/mixvideoframe.c
+++ b/mix_video/src/mixvideoframe.c
@@ -492,10 +492,10 @@
 MIX_RESULT mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder) {
 
 	MIX_VIDEOFRAME_GETTER_CHECK_INPUT(obj, displayorder);
-        MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj);
+    MixVideoFramePrivate *priv = VIDEOFRAME_PRIVATE(obj);
 
-        *displayorder = priv -> displayorder;
-	return MIX_RESULT_SUCCESS;
+    *displayorder = priv -> displayorder;
+    return MIX_RESULT_SUCCESS;
 }
 
 
diff --git a/mix_video/src/mixvideoframe.h b/mix_video/src/mixvideoframe.h
index 626e2b1..c3089a9 100644
--- a/mix_video/src/mixvideoframe.h
+++ b/mix_video/src/mixvideoframe.h
@@ -12,6 +12,8 @@
 #include <mixparams.h>
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOFRAME:
  *
@@ -235,4 +237,6 @@
 MIX_RESULT mix_videoframe_get_vadisplay(MixVideoFrame * obj, void **va_display);
 MIX_RESULT mix_videoframe_get_frame_structure(MixVideoFrame * obj, guint32* frame_structure);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFRAME_H__ */
diff --git a/mix_video/src/mixvideoframe_private.h b/mix_video/src/mixvideoframe_private.h
index f337417..bb8bd1e 100644
--- a/mix_video/src/mixvideoframe_private.h
+++ b/mix_video/src/mixvideoframe_private.h
@@ -12,6 +12,8 @@
 #include "mixvideoframe.h"
 #include "mixsurfacepool.h"
 
+G_BEGIN_DECLS
+
 typedef enum _MixFrameType
 {
   TYPE_I,
@@ -33,7 +35,7 @@
   gboolean sync_flag;
   guint32 frame_structure; // 0: frame, 1: top field, 2: bottom field
   void *va_display;
-  guint32 displayorder;  	
+  guint32 displayorder; 
 };
 
 /**
@@ -86,4 +88,7 @@
 MIX_RESULT
 mix_videoframe_get_displayorder(MixVideoFrame *obj, guint32 *displayorder);
 
+
+G_END_DECLS
+
 #endif /* __MIX_VIDEOFRAME_PRIVATE_H__ */
diff --git a/mix_video/src/mixvideoinitparams.h b/mix_video/src/mixvideoinitparams.h
index e234999..bd83224 100644
--- a/mix_video/src/mixvideoinitparams.h
+++ b/mix_video/src/mixvideoinitparams.h
@@ -13,6 +13,8 @@
 #include "mixdisplay.h"
 #include "mixvideodef.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEOINITPARAMS:
  * 
@@ -158,4 +160,6 @@
 MIX_RESULT mix_videoinitparams_get_display (MixVideoInitParams * obj,
 					    MixDisplay ** dislay);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEOINITPARAMS_H__ */
diff --git a/mix_video/src/mixvideolog.h b/mix_video/src/mixvideolog.h
index 89a8827..7bb9ace 100644
--- a/mix_video/src/mixvideolog.h
+++ b/mix_video/src/mixvideolog.h
@@ -10,6 +10,7 @@
 #define __MIX_VIDEO_LOG_H__
 #include <mixlog.h>
 
+G_BEGIN_DECLS
 #ifdef MIX_LOG_ENABLE
 #define LOG_V(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_VERBOSE, format, ##__VA_ARGS__)
 #define LOG_I(format, ...) mix_log(MIX_VIDEO_COMP, MIX_LOG_LEVEL_INFO, format, ##__VA_ARGS__)
@@ -22,4 +23,6 @@
 #define LOG_E(format, ...)
 #endif
 
+G_END_DECLS
+
 #endif /*  __MIX_VIDEO_LOG_H__ */
diff --git a/mix_video/src/mixvideorenderparams.h b/mix_video/src/mixvideorenderparams.h
index e17136d..b377136 100644
--- a/mix_video/src/mixvideorenderparams.h
+++ b/mix_video/src/mixvideorenderparams.h
@@ -14,6 +14,8 @@
 #include "mixdisplay.h"
 #include "mixvideoframe.h"
 
+G_BEGIN_DECLS
+
 /**
  * MIX_TYPE_VIDEORENDERPARAMS:
  *
@@ -253,4 +255,6 @@
 
 /* TODO: Add getters and setters for other properties */
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEORENDERPARAMS_H__ */
diff --git a/mix_video/src/mixvideorenderparams_internal.h b/mix_video/src/mixvideorenderparams_internal.h
index 8619173..c7d1fe4 100644
--- a/mix_video/src/mixvideorenderparams_internal.h
+++ b/mix_video/src/mixvideorenderparams_internal.h
@@ -9,6 +9,9 @@
 #ifndef __MIX_VIDEORENDERPARAMS_PRIVATE_H__
 #define __MIX_VIDEORENDERPARAMS_PRIVATE_H__
 
+G_BEGIN_DECLS
+
+
 typedef struct _MixVideoRenderParamsPrivate MixVideoRenderParamsPrivate;
 
 struct _MixVideoRenderParamsPrivate {
@@ -33,4 +36,6 @@
 		VARectangle ** va_cliprects,
 		guint* number_of_cliprects);
 
+G_END_DECLS
+
 #endif /* __MIX_VIDEORENDERPARAMS_PRIVATE_H__ */
diff --git a/mix_video/test/configure.ac b/mix_video/test/configure.ac
index 4e3a279..82235ad 100644
--- a/mix_video/test/configure.ac
+++ b/mix_video/test/configure.ac
@@ -1,5 +1,5 @@
 
-AC_INIT([testmixvideo], [0.1], [tao.q.tao@intel.com])
+AC_INIT([testmixvideo],[0.1],[tao.q.tao@intel.com])
 
 dnl AC_CONFIG_MACRO_DIR([m4])
 
@@ -35,8 +35,8 @@
   AC_MSG_ERROR(You need mixvideo development packages installed !)
 fi
 
-AC_ARG_ENABLE(optimization,     AC_HELP_STRING([  --disable-optimization], [Do not optimize the library for speed. Might be required for debugging.]))
-AC_ARG_ENABLE(debuginfo,        AC_HELP_STRING([  --enable-debuginfo    ],  [add -g to the compiler flags (to create debug information)]))
+AC_ARG_ENABLE(optimization,     AS_HELP_STRING([  --disable-optimization],[Do not optimize the library for speed. Might be required for debugging.]))
+AC_ARG_ENABLE(debuginfo,        AS_HELP_STRING([  --enable-debuginfo    ],[add -g to the compiler flags (to create debug information)]))
 
 if test "$enable_optimization" = "no" ; then
         DEBUG=true