Add H.264 HW encoder and decoder support for Android.

- Allow to configure MediaCodec Java wrapper to use VP8
and H.264 codec.
- Save H.264 config frames with SPS and PPS NALUs and append them to every key frame.
- Correctly handle the case when one encoded frame may generate several output NALUs.
- Add code to find H.264 start codes.
- Add a flag (non configurable yet) to use H.264 in AppRTCDemo.
- Improve MediaCodec logging.

R=wzh@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/43379004

Cr-Commit-Position: refs/heads/master@{#8465}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8465 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/app/webrtc/java/jni/androidmediacodeccommon.h b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
index b92091f..d19d1a4 100644
--- a/talk/app/webrtc/java/jni/androidmediacodeccommon.h
+++ b/talk/app/webrtc/java/jni/androidmediacodeccommon.h
@@ -30,6 +30,7 @@
 #define TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
 
 #include <android/log.h>
+#include "talk/app/webrtc/java/jni/classreferenceholder.h"
 #include "webrtc/base/thread.h"
 #include "webrtc/system_wrappers/interface/tick_util.h"
 
@@ -37,7 +38,7 @@
 
 // Uncomment this define to enable verbose logging for every encoded/decoded
 // video frame.
-// #define TRACK_BUFFER_TIMING
+//#define TRACK_BUFFER_TIMING
 
 #define TAG "MediaCodecVideo"
 #ifdef TRACK_BUFFER_TIMING
@@ -78,6 +79,15 @@
     current_thread->SetAllowBlockingCalls(true);
 }
 
+// Return the (singleton) Java Enum object corresponding to |index|;
+// |state_class_fragment| is something like "MediaSource$State".
+static inline jobject JavaEnumFromIndex(
+    JNIEnv* jni, const std::string& state_class_fragment, int index) {
+  const std::string state_class = "org/webrtc/" + state_class_fragment;
+  return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
+                           state_class, index);
+}
+
 }  // namespace webrtc_jni
 
 #endif  // TALK_APP_WEBRTC_JAVA_JNI_ANDROIDMEDIACODECCOMMON_H_
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
index 008bbab..db0c49a 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.cc
@@ -57,6 +57,8 @@
 using webrtc::TextureVideoFrame;
 using webrtc::TickTime;
 using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
 using webrtc::kVideoCodecVP8;
 
 namespace webrtc_jni {
@@ -66,7 +68,7 @@
 class MediaCodecVideoDecoder : public webrtc::VideoDecoder,
                                public rtc::MessageHandler {
  public:
-  explicit MediaCodecVideoDecoder(JNIEnv* jni);
+  explicit MediaCodecVideoDecoder(JNIEnv* jni, VideoCodecType codecType);
   virtual ~MediaCodecVideoDecoder();
 
   static int SetAndroidObjects(JNIEnv* jni, jobject render_egl_context);
@@ -100,6 +102,9 @@
   // true on success.
   bool DeliverPendingOutputs(JNIEnv* jni, int dequeue_timeout_us);
 
+  // Type of video codec.
+  VideoCodecType codecType_;
+
   bool key_frame_required_;
   bool inited_;
   bool use_surface_;
@@ -109,7 +114,7 @@
   NativeHandleImpl native_handle_;
   DecodedImageCallback* callback_;
   int frames_received_;  // Number of frames received by decoder.
-  int frames_decoded_;  // Number of frames decoded by decoder
+  int frames_decoded_;  // Number of frames decoded by decoder.
   int64_t start_time_ms_;  // Start time for statistics.
   int current_frames_;  // Number of frames in the current statistics interval.
   int current_bytes_;  // Encoded bytes in the current statistics interval.
@@ -119,6 +124,9 @@
   std::vector<int64_t> ntp_times_ms_;
   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
                                              // decoder input.
+  int32_t output_timestamp_;  // Last output frame timestamp from timestamps_ Q.
+  int64_t output_ntp_time_ms_; // Last output frame ntp time from
+                               // ntp_times_ms_ queue.
 
   // State that is constant for the lifetime of this object once the ctor
   // returns.
@@ -153,8 +161,10 @@
   jobject previous_surface_texture_;
 };
 
-MediaCodecVideoDecoder::MediaCodecVideoDecoder(JNIEnv* jni)
-  : key_frame_required_(true),
+MediaCodecVideoDecoder::MediaCodecVideoDecoder(
+    JNIEnv* jni, VideoCodecType codecType) :
+    codecType_(codecType),
+    key_frame_required_(true),
     inited_(false),
     error_count_(0),
     surface_texture_(NULL),
@@ -176,7 +186,8 @@
 
   j_init_decode_method_ = GetMethodID(
       jni, *j_media_codec_video_decoder_class_, "initDecode",
-      "(IIZZLandroid/opengl/EGLContext;)Z");
+      "(Lorg/webrtc/MediaCodecVideoDecoder$VideoCodecType;"
+      "IIZZLandroid/opengl/EGLContext;)Z");
   j_release_method_ =
       GetMethodID(jni, *j_media_codec_video_decoder_class_, "release", "()V");
   j_dequeue_input_buffer_method_ = GetMethodID(
@@ -224,8 +235,9 @@
 
   CHECK_EXCEPTION(jni) << "MediaCodecVideoDecoder ctor failed";
   use_surface_ = true;
-  if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL)
+  if (MediaCodecVideoDecoderFactory::render_egl_context_ == NULL) {
     use_surface_ = false;
+  }
   memset(&codec_, 0, sizeof(codec_));
   AllowBlockingCalls();
 }
@@ -235,17 +247,24 @@
   Release();
   // Delete global references.
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
-  if (previous_surface_texture_ != NULL)
+  if (previous_surface_texture_ != NULL) {
     jni->DeleteGlobalRef(previous_surface_texture_);
-  if (surface_texture_ != NULL)
+  }
+  if (surface_texture_ != NULL) {
     jni->DeleteGlobalRef(surface_texture_);
+  }
 }
 
 int32_t MediaCodecVideoDecoder::InitDecode(const VideoCodec* inst,
     int32_t numberOfCores) {
   if (inst == NULL) {
+    ALOGE("NULL VideoCodec instance");
     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
   }
+  // Factory should guard against other codecs being used with us.
+  CHECK(inst->codecType == codecType_) << "Unsupported codec " <<
+      inst->codecType << " for " << codecType_;
+
   int ret_val = Release();
   if (ret_val < 0) {
     return ret_val;
@@ -270,17 +289,21 @@
   CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
-  ALOGD("InitDecodeOnCodecThread: %d x %d. Fps: %d. Errors: %d",
-      codec_.width, codec_.height, codec_.maxFramerate, error_count_);
+  ALOGD("InitDecodeOnCodecThread Type: %d. %d x %d. Fps: %d. Errors: %d",
+      (int)codecType_, codec_.width, codec_.height,
+      codec_.maxFramerate, error_count_);
   bool use_sw_codec = false;
   if (error_count_ > 1) {
     // If more than one critical errors happen for HW codec, switch to SW codec.
     use_sw_codec = true;
   }
 
+  jobject j_video_codec_enum = JavaEnumFromIndex(
+      jni, "MediaCodecVideoDecoder$VideoCodecType", codecType_);
   bool success = jni->CallBooleanMethod(
       *j_media_codec_video_decoder_,
       j_init_decode_method_,
+      j_video_codec_enum,
       codec_.width,
       codec_.height,
       use_sw_codec,
@@ -300,6 +323,8 @@
   current_frames_ = 0;
   current_bytes_ = 0;
   current_decoding_time_ms_ = 0;
+  output_timestamp_ = 0;
+  output_ntp_time_ms_ = 0;
   timestamps_.clear();
   ntp_times_ms_.clear();
   frame_rtc_times_ms_.clear();
@@ -329,6 +354,7 @@
 }
 
 int32_t MediaCodecVideoDecoder::Release() {
+  ALOGD("DecoderRelease request");
   return codec_thread_->Invoke<int32_t>(
         Bind(&MediaCodecVideoDecoder::ReleaseOnCodecThread, this));
 }
@@ -339,7 +365,7 @@
   }
   CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
-  ALOGD("DecoderRelease: Frames received: %d.", frames_received_);
+  ALOGD("DecoderReleaseOnCodecThread: Frames received: %d.", frames_received_);
   ScopedLocalRefFrame local_ref_frame(jni);
   for (size_t i = 0; i < input_buffers_.size(); i++) {
     jni->DeleteGlobalRef(input_buffers_[i]);
@@ -384,9 +410,11 @@
   // Always start with a complete key frame.
   if (key_frame_required_) {
     if (inputImage._frameType != webrtc::kKeyFrame) {
+      ALOGE("Key frame is required");
       return WEBRTC_VIDEO_CODEC_ERROR;
     }
     if (!inputImage._completeFrame) {
+      ALOGE("Complete frame is required");
       return WEBRTC_VIDEO_CODEC_ERROR;
     }
     key_frame_required_ = false;
@@ -401,8 +429,6 @@
 
 int32_t MediaCodecVideoDecoder::DecodeOnCodecThread(
     const EncodedImage& inputImage) {
-  static uint8_t yVal_ = 0x7f;
-
   CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
@@ -449,8 +475,9 @@
     Reset();
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
-  ALOGV("Decoder frame in # %d. Buffer # %d. Size: %d",
-      frames_received_, j_input_buffer_index, inputImage._length);
+  ALOGV("Decoder frame in # %d. Type: %d. Buffer # %d. Size: %d",
+      frames_received_, inputImage._frameType,
+      j_input_buffer_index, inputImage._length);
   memcpy(buffer, inputImage._buffer, inputImage._length);
 
   // Save input image timestamps for later output.
@@ -570,14 +597,19 @@
   }
 
   // Get frame timestamps from a queue.
-  int32_t timestamp = timestamps_.front();
-  timestamps_.erase(timestamps_.begin());
-  int64_t ntp_time_ms = ntp_times_ms_.front();
-  ntp_times_ms_.erase(ntp_times_ms_.begin());
-  int64_t frame_decoding_time_ms = GetCurrentTimeMs() -
-      frame_rtc_times_ms_.front();
-  frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
-
+  if (timestamps_.size() > 0) {
+    output_timestamp_ = timestamps_.front();
+    timestamps_.erase(timestamps_.begin());
+  }
+  if (ntp_times_ms_.size() > 0) {
+    output_ntp_time_ms_ = ntp_times_ms_.front();
+    ntp_times_ms_.erase(ntp_times_ms_.begin());
+  }
+  int64_t frame_decoding_time_ms = 0;
+  if (frame_rtc_times_ms_.size() > 0) {
+    frame_decoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+    frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+  }
   ALOGV("Decoder frame out # %d. %d x %d. %d x %d. Color: 0x%x. Size: %d."
       " DecTime: %lld", frames_decoded_, width, height, stride, slice_height,
       color_format, output_buffer_size, frame_decoding_time_ms);
@@ -616,12 +648,12 @@
   if (use_surface_) {
     native_handle_.SetTextureObject(surface_texture_, texture_id);
     TextureVideoFrame texture_image(
-        &native_handle_, width, height, timestamp, 0);
-    texture_image.set_ntp_time_ms(ntp_time_ms);
+        &native_handle_, width, height, output_timestamp_, 0);
+    texture_image.set_ntp_time_ms(output_ntp_time_ms_);
     callback_status = callback_->Decoded(texture_image);
   } else {
-    decoded_image_.set_timestamp(timestamp);
-    decoded_image_.set_ntp_time_ms(ntp_time_ms);
+    decoded_image_.set_timestamp(output_timestamp_);
+    decoded_image_.set_ntp_time_ms(output_ntp_time_ms_);
     callback_status = callback_->Decoded(decoded_image_);
   }
   if (callback_status > 0) {
@@ -692,20 +724,43 @@
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
   jclass j_decoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoDecoder");
-  is_platform_supported_ = jni->CallStaticBooleanMethod(
+  supported_codec_types_.clear();
+
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
       j_decoder_class,
-      GetStaticMethodID(jni, j_decoder_class, "isPlatformSupported", "()Z"));
+      GetStaticMethodID(jni, j_decoder_class, "isVp8HwSupported", "()Z"));
   CHECK_EXCEPTION(jni);
+  if (is_vp8_hw_supported) {
+    ALOGD("VP8 HW Decoder supported.");
+    supported_codec_types_.push_back(kVideoCodecVP8);
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_decoder_class,
+      GetStaticMethodID(jni, j_decoder_class, "isH264HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_h264_hw_supported) {
+    ALOGD("H264 HW Decoder supported.");
+    supported_codec_types_.push_back(kVideoCodecH264);
+  }
 }
 
 MediaCodecVideoDecoderFactory::~MediaCodecVideoDecoderFactory() {}
 
 webrtc::VideoDecoder* MediaCodecVideoDecoderFactory::CreateVideoDecoder(
-    webrtc::VideoCodecType type) {
-  if (type != kVideoCodecVP8 || !is_platform_supported_) {
+    VideoCodecType type) {
+  if (supported_codec_types_.empty()) {
     return NULL;
   }
-  return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded());
+  for (std::vector<VideoCodecType>::const_iterator it =
+      supported_codec_types_.begin(); it != supported_codec_types_.end();
+      ++it) {
+    if (*it == type) {
+      ALOGD("Create HW video decoder for type %d.", (int)type);
+      return new MediaCodecVideoDecoder(AttachCurrentThreadIfNeeded(), type);
+    }
+  }
+  return NULL;
 }
 
 void MediaCodecVideoDecoderFactory::DestroyVideoDecoder(
diff --git a/talk/app/webrtc/java/jni/androidmediadecoder_jni.h b/talk/app/webrtc/java/jni/androidmediadecoder_jni.h
index c6b4288..9e59936 100644
--- a/talk/app/webrtc/java/jni/androidmediadecoder_jni.h
+++ b/talk/app/webrtc/java/jni/androidmediadecoder_jni.h
@@ -51,7 +51,7 @@
   static jobject render_egl_context_;
 
  private:
-  bool is_platform_supported_;
+  std::vector<webrtc::VideoCodecType> supported_codec_types_;
 };
 
 }  // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index 7028431..edefe20 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -49,10 +49,22 @@
 using webrtc::I420VideoFrame;
 using webrtc::RTPFragmentationHeader;
 using webrtc::VideoCodec;
+using webrtc::VideoCodecType;
+using webrtc::kVideoCodecH264;
 using webrtc::kVideoCodecVP8;
 
 namespace webrtc_jni {
 
+// H.264 start code length.
+#define H264_SC_LENGTH 4
+// Maximum allowed NALUs in one output frame.
+#define MAX_NALUS_PERFRAME 32
+// Maximum supported HW video encoder resolution.
+#define MAX_VIDEO_WIDTH 1280
+#define MAX_VIDEO_HEIGHT 1280
+// Maximum supported HW video encoder fps.
+#define MAX_VIDEO_FPS 30
+
 // MediaCodecVideoEncoder is a webrtc::VideoEncoder implementation that uses
 // Android's MediaCodec SDK API behind the scenes to implement (hopefully)
 // HW-backed video encode.  This C++ class is implemented as a very thin shim,
@@ -63,7 +75,7 @@
                                public rtc::MessageHandler {
  public:
   virtual ~MediaCodecVideoEncoder();
-  explicit MediaCodecVideoEncoder(JNIEnv* jni);
+  explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
 
   // webrtc::VideoEncoder implementation.  Everything trampolines to
   // |codec_thread_| for execution.
@@ -112,13 +124,18 @@
   jobject GetOutputBufferInfoBuffer(JNIEnv* jni, jobject j_output_buffer_info);
   bool GetOutputBufferInfoIsKeyFrame(JNIEnv* jni, jobject j_output_buffer_info);
   jlong GetOutputBufferInfoPresentationTimestampUs(
-      JNIEnv* jni,
-      jobject j_output_buffer_info);
+      JNIEnv* jni, jobject j_output_buffer_info);
 
   // Deliver any outputs pending in the MediaCodec to our |callback_| and return
   // true on success.
   bool DeliverPendingOutputs(JNIEnv* jni);
 
+  // Search for H.264 start codes.
+  int32_t NextNaluPosition(uint8_t *buffer, size_t buffer_size);
+
+  // Type of video codec.
+  VideoCodecType codecType_;
+
   // Valid all the time since RegisterEncodeCompleteCallback() Invoke()s to
   // |codec_thread_| synchronously.
   webrtc::EncodedImageCallback* callback_;
@@ -152,6 +169,7 @@
   int last_set_fps_;  // Last-requested frame rate.
   int64_t current_timestamp_us_;  // Current frame timestamps in us.
   int frames_received_;  // Number of frames received by encoder.
+  int frames_encoded_;  // Number of frames encoded by encoder.
   int frames_dropped_;  // Number of frames dropped by encoder.
   int frames_resolution_update_;  // Number of frames with new codec resolution.
   int frames_in_queue_;  // Number of frames in encoder queue.
@@ -165,6 +183,9 @@
   std::vector<int64_t> render_times_ms_;  // Video frames render time queue.
   std::vector<int64_t> frame_rtc_times_ms_;  // Time when video frame is sent to
                                              // encoder input.
+  int32_t output_timestamp_;  // Last output frame timestamp from timestamps_ Q.
+  int64_t output_render_time_ms_; // Last output frame render time from
+                                  // render_times_ms_ queue.
   // Frame size in bytes fed to MediaCodec.
   int yuv_size_;
   // True only when between a callback_->Encoded() call return a positive value
@@ -179,8 +200,10 @@
   Release();
 }
 
-MediaCodecVideoEncoder::MediaCodecVideoEncoder(JNIEnv* jni)
-  : callback_(NULL),
+MediaCodecVideoEncoder::MediaCodecVideoEncoder(
+    JNIEnv* jni, VideoCodecType codecType) :
+    codecType_(codecType),
+    callback_(NULL),
     inited_(false),
     picture_id_(0),
     codec_thread_(new Thread()),
@@ -207,10 +230,12 @@
 
   jclass j_output_buffer_info_class =
       FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
-  j_init_encode_method_ = GetMethodID(jni,
-                                      *j_media_codec_video_encoder_class_,
-                                      "initEncode",
-                                      "(IIII)[Ljava/nio/ByteBuffer;");
+  j_init_encode_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "initEncode",
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
+      "[Ljava/nio/ByteBuffer;");
   j_dequeue_input_buffer_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
   j_encode_method_ = GetMethodID(
@@ -219,11 +244,11 @@
       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
   j_set_rates_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "setRates", "(II)Z");
-  j_dequeue_output_buffer_method_ =
-      GetMethodID(jni,
-                  *j_media_codec_video_encoder_class_,
-                  "dequeueOutputBuffer",
-                  "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
+  j_dequeue_output_buffer_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "dequeueOutputBuffer",
+      "()Lorg/webrtc/MediaCodecVideoEncoder$OutputBufferInfo;");
   j_release_output_buffer_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "releaseOutputBuffer", "(I)Z");
 
@@ -245,9 +270,15 @@
     const webrtc::VideoCodec* codec_settings,
     int32_t /* number_of_cores */,
     size_t /* max_payload_size */) {
+  if (codec_settings == NULL) {
+    ALOGE("NULL VideoCodec instance");
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
   // Factory should guard against other codecs being used with us.
-  CHECK(codec_settings->codecType == kVideoCodecVP8) << "Unsupported codec";
+  CHECK(codec_settings->codecType == codecType_) << "Unsupported codec " <<
+      codec_settings->codecType << " for " << codecType_;
 
+  ALOGD("InitEncode request");
   return codec_thread_->Invoke<int32_t>(
       Bind(&MediaCodecVideoEncoder::InitEncodeOnCodecThread,
            this,
@@ -274,6 +305,7 @@
 }
 
 int32_t MediaCodecVideoEncoder::Release() {
+  ALOGD("EncoderRelease request");
   return codec_thread_->Invoke<int32_t>(
       Bind(&MediaCodecVideoEncoder::ReleaseOnCodecThread, this));
 }
@@ -335,8 +367,8 @@
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
-  ALOGD("InitEncodeOnCodecThread %d x %d. Bitrate: %d kbps. Fps: %d",
-      width, height, kbps, fps);
+  ALOGD("InitEncodeOnCodecThread Type: %d. %d x %d. Bitrate: %d kbps. Fps: %d",
+      (int)codecType_, width, height, kbps, fps);
   if (kbps == 0) {
     kbps = last_set_bitrate_kbps_;
   }
@@ -350,6 +382,7 @@
   last_set_fps_ = fps;
   yuv_size_ = width_ * height_ * 3 / 2;
   frames_received_ = 0;
+  frames_encoded_ = 0;
   frames_dropped_ = 0;
   frames_resolution_update_ = 0;
   frames_in_queue_ = 0;
@@ -360,22 +393,28 @@
   current_encoding_time_ms_ = 0;
   last_input_timestamp_ms_ = -1;
   last_output_timestamp_ms_ = -1;
+  output_timestamp_ = 0;
+  output_render_time_ms_ = 0;
   timestamps_.clear();
   render_times_ms_.clear();
   frame_rtc_times_ms_.clear();
   drop_next_input_frame_ = false;
   picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
   // We enforce no extra stride/padding in the format creation step.
+  jobject j_video_codec_enum = JavaEnumFromIndex(
+      jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
   jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
       jni->CallObjectMethod(*j_media_codec_video_encoder_,
                             j_init_encode_method_,
+                            j_video_codec_enum,
                             width_,
                             height_,
                             kbps,
                             fps));
   CHECK_EXCEPTION(jni);
-  if (IsNull(jni, input_buffers))
+  if (IsNull(jni, input_buffers)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
+  }
 
   inited_ = true;
   switch (GetIntField(jni, *j_media_codec_video_encoder_,
@@ -477,8 +516,8 @@
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
-  ALOGV("Encode frame # %d. Buffer # %d. TS: %lld.",
-      frames_received_, j_input_buffer_index, current_timestamp_us_ / 1000);
+  ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
+      frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
 
   jobject j_input_buffer = input_buffers_[j_input_buffer_index];
   uint8* yuv_buffer =
@@ -533,8 +572,8 @@
   }
   CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
-  ALOGD("EncoderRelease: Frames received: %d. Frames dropped: %d.",
-      frames_received_, frames_dropped_);
+  ALOGD("EncoderReleaseOnCodecThread: Frames received: %d. Encoded: %d. "
+      "Dropped: %d.", frames_received_, frames_encoded_, frames_dropped_);
   ScopedLocalRefFrame local_ref_frame(jni);
   for (size_t i = 0; i < input_buffers_.size(); ++i)
     jni->DeleteGlobalRef(input_buffers_[i]);
@@ -614,36 +653,41 @@
       return false;
     }
 
-    // Get frame timestamps from a queue.
-    last_output_timestamp_ms_ =
-        GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
-        1000;
-    int32_t timestamp = timestamps_.front();
-    timestamps_.erase(timestamps_.begin());
-    int64_t render_time_ms = render_times_ms_.front();
-    render_times_ms_.erase(render_times_ms_.begin());
-    int64_t frame_encoding_time_ms = GetCurrentTimeMs() -
-        frame_rtc_times_ms_.front();
-    frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
-    frames_in_queue_--;
-
-    // Extract payload and key frame flag.
-    int32_t callback_status = 0;
+    // Get key and config frame flags.
     jobject j_output_buffer =
         GetOutputBufferInfoBuffer(jni, j_output_buffer_info);
     bool key_frame = GetOutputBufferInfoIsKeyFrame(jni, j_output_buffer_info);
+
+    // Get frame timestamps from a queue - for non config frames only.
+    int64_t frame_encoding_time_ms = 0;
+    last_output_timestamp_ms_ =
+        GetOutputBufferInfoPresentationTimestampUs(jni, j_output_buffer_info) /
+        1000;
+    if (frames_in_queue_ > 0) {
+      output_timestamp_ = timestamps_.front();
+      timestamps_.erase(timestamps_.begin());
+      output_render_time_ms_ = render_times_ms_.front();
+      render_times_ms_.erase(render_times_ms_.begin());
+      frame_encoding_time_ms = GetCurrentTimeMs() - frame_rtc_times_ms_.front();
+      frame_rtc_times_ms_.erase(frame_rtc_times_ms_.begin());
+      frames_in_queue_--;
+    }
+
+    // Extract payload.
     size_t payload_size = jni->GetDirectBufferCapacity(j_output_buffer);
     uint8* payload = reinterpret_cast<uint8_t*>(
         jni->GetDirectBufferAddress(j_output_buffer));
     CHECK_EXCEPTION(jni);
 
-    ALOGV("Encoder got output buffer # %d. Size: %d. TS: %lld. Latency: %lld."
-        " EncTime: %lld",
-        output_buffer_index, payload_size, last_output_timestamp_ms_,
+    ALOGV("Encoder frame out # %d. Key: %d. Size: %d. TS: %lld."
+        " Latency: %lld. EncTime: %lld",
+        frames_encoded_, key_frame, payload_size,
+        last_output_timestamp_ms_,
         last_input_timestamp_ms_ - last_output_timestamp_ms_,
         frame_encoding_time_ms);
 
     // Calculate and print encoding statistics - every 3 seconds.
+    frames_encoded_++;
     current_frames_++;
     current_bytes_ += payload_size;
     current_encoding_time_ms_ += frame_encoding_time_ms;
@@ -663,36 +707,73 @@
     }
 
     // Callback - return encoded frame.
+    int32_t callback_status = 0;
     if (callback_) {
       scoped_ptr<webrtc::EncodedImage> image(
           new webrtc::EncodedImage(payload, payload_size, payload_size));
       image->_encodedWidth = width_;
       image->_encodedHeight = height_;
-      image->_timeStamp = timestamp;
-      image->capture_time_ms_ = render_time_ms;
+      image->_timeStamp = output_timestamp_;
+      image->capture_time_ms_ = output_render_time_ms_;
       image->_frameType = (key_frame ? webrtc::kKeyFrame : webrtc::kDeltaFrame);
       image->_completeFrame = true;
 
       webrtc::CodecSpecificInfo info;
       memset(&info, 0, sizeof(info));
-      info.codecType = kVideoCodecVP8;
-      info.codecSpecific.VP8.pictureId = picture_id_;
-      info.codecSpecific.VP8.nonReference = false;
-      info.codecSpecific.VP8.simulcastIdx = 0;
-      info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
-      info.codecSpecific.VP8.layerSync = false;
-      info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
-      info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
-      picture_id_ = (picture_id_ + 1) & 0x7FFF;
+      info.codecType = codecType_;
+      if (codecType_ == kVideoCodecVP8) {
+        info.codecSpecific.VP8.pictureId = picture_id_;
+        info.codecSpecific.VP8.nonReference = false;
+        info.codecSpecific.VP8.simulcastIdx = 0;
+        info.codecSpecific.VP8.temporalIdx = webrtc::kNoTemporalIdx;
+        info.codecSpecific.VP8.layerSync = false;
+        info.codecSpecific.VP8.tl0PicIdx = webrtc::kNoTl0PicIdx;
+        info.codecSpecific.VP8.keyIdx = webrtc::kNoKeyIdx;
+        picture_id_ = (picture_id_ + 1) & 0x7FFF;
+      }
 
       // Generate a header describing a single fragment.
       webrtc::RTPFragmentationHeader header;
       memset(&header, 0, sizeof(header));
-      header.VerifyAndAllocateFragmentationHeader(1);
-      header.fragmentationOffset[0] = 0;
-      header.fragmentationLength[0] = image->_length;
-      header.fragmentationPlType[0] = 0;
-      header.fragmentationTimeDiff[0] = 0;
+      if (codecType_ == kVideoCodecVP8) {
+        header.VerifyAndAllocateFragmentationHeader(1);
+        header.fragmentationOffset[0] = 0;
+        header.fragmentationLength[0] = image->_length;
+        header.fragmentationPlType[0] = 0;
+        header.fragmentationTimeDiff[0] = 0;
+      } else if (codecType_ == kVideoCodecH264) {
+        // For H.264 search for start codes.
+        int32_t scPositions[MAX_NALUS_PERFRAME + 1] = {};
+        int32_t scPositionsLength = 0;
+        int32_t scPosition = 0;
+        while (scPositionsLength < MAX_NALUS_PERFRAME) {
+          int32_t naluPosition = NextNaluPosition(
+              payload + scPosition, payload_size - scPosition);
+          if (naluPosition < 0) {
+            break;
+          }
+          scPosition += naluPosition;
+          scPositions[scPositionsLength++] = scPosition;
+          scPosition += H264_SC_LENGTH;
+        }
+        if (scPositionsLength == 0) {
+          ALOGE("Start code is not found!");
+          ALOGE("Data 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x",
+              image->_buffer[0], image->_buffer[1], image->_buffer[2],
+              image->_buffer[3], image->_buffer[4], image->_buffer[5]);
+          ResetCodec();
+          return false;
+        }
+        scPositions[scPositionsLength] = payload_size;
+        header.VerifyAndAllocateFragmentationHeader(scPositionsLength);
+        for (size_t i = 0; i < scPositionsLength; i++) {
+          header.fragmentationOffset[i] = scPositions[i] + H264_SC_LENGTH;
+          header.fragmentationLength[i] =
+              scPositions[i + 1] - header.fragmentationOffset[i];
+          header.fragmentationPlType[i] = 0;
+          header.fragmentationTimeDiff[i] = 0;
+        }
+      }
 
       callback_status = callback_->Encoded(*image, &info, &header);
     }
@@ -709,39 +790,90 @@
 
     if (callback_status > 0) {
       drop_next_input_frame_ = true;
-    // Theoretically could handle callback_status<0 here, but unclear what that
-    // would mean for us.
+      // Theoretically could handle callback_status<0 here, but unclear what
+      // that would mean for us.
     }
   }
 
   return true;
 }
 
+int32_t MediaCodecVideoEncoder::NextNaluPosition(
+    uint8_t *buffer, size_t buffer_size) {
+  if (buffer_size < H264_SC_LENGTH) {
+    return -1;
+  }
+  uint8_t *head = buffer;
+  // Set end buffer pointer to 4 bytes before actual buffer end so we can
+  // access head[1], head[2] and head[3] in a loop without buffer overrun.
+  uint8_t *end = buffer + buffer_size - H264_SC_LENGTH;
+
+  while (head < end) {
+    if (head[0]) {
+      head++;
+      continue;
+    }
+    if (head[1]) { // got 00xx
+      head += 2;
+      continue;
+    }
+    if (head[2]) { // got 0000xx
+      head += 3;
+      continue;
+    }
+    if (head[3] != 0x01) { // got 000000xx
+      head += 4; // xx != 1, continue searching.
+      continue;
+    }
+    return (int32_t)(head - buffer);
+  }
+  return -1;
+}
+
+
 MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
   jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
-  bool is_platform_supported = jni->CallStaticBooleanMethod(
-      j_encoder_class,
-      GetStaticMethodID(jni, j_encoder_class, "isPlatformSupported", "()Z"));
-  CHECK_EXCEPTION(jni);
-  if (!is_platform_supported)
-    return;
+  supported_codecs_.clear();
 
-  // Wouldn't it be nice if MediaCodec exposed the maximum capabilities of the
-  // encoder?  Sure would be. Too bad it doesn't.  So we hard-code some
-  // reasonable defaults.
-  supported_codecs_.push_back(
-      VideoCodec(kVideoCodecVP8, "VP8", 1280, 1280, 30));
+  bool is_vp8_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isVp8HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_vp8_hw_supported) {
+    ALOGD("VP8 HW Encoder supported.");
+    supported_codecs_.push_back(VideoCodec(kVideoCodecVP8, "VP8",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
+
+  bool is_h264_hw_supported = jni->CallStaticBooleanMethod(
+      j_encoder_class,
+      GetStaticMethodID(jni, j_encoder_class, "isH264HwSupported", "()Z"));
+  CHECK_EXCEPTION(jni);
+  if (is_h264_hw_supported) {
+    ALOGD("H.264 HW Encoder supported.");
+    supported_codecs_.push_back(VideoCodec(kVideoCodecH264, "H264",
+        MAX_VIDEO_WIDTH, MAX_VIDEO_HEIGHT, MAX_VIDEO_FPS));
+  }
 }
 
 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
 
 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
-    webrtc::VideoCodecType type) {
-  if (type != kVideoCodecVP8 || supported_codecs_.empty())
+    VideoCodecType type) {
+  if (supported_codecs_.empty()) {
     return NULL;
-  return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded());
+  }
+  for (std::vector<VideoCodec>::const_iterator it = supported_codecs_.begin();
+         it != supported_codecs_.end(); ++it) {
+    if (it->type == type) {
+      ALOGD("Create HW video encoder for type %d (%s).",
+          (int)type, it->name.c_str());
+      return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
+    }
+  }
+  return NULL;
 }
 
 const std::vector<MediaCodecVideoEncoderFactory::VideoCodec>&
@@ -751,6 +883,7 @@
 
 void MediaCodecVideoEncoderFactory::DestroyVideoEncoder(
     webrtc::VideoEncoder* encoder) {
+  ALOGD("Destroy video encoder.");
   delete encoder;
 }
 
diff --git a/talk/app/webrtc/java/jni/classreferenceholder.cc b/talk/app/webrtc/java/jni/classreferenceholder.cc
index 8c30a0a..0c25575 100644
--- a/talk/app/webrtc/java/jni/classreferenceholder.cc
+++ b/talk/app/webrtc/java/jni/classreferenceholder.cc
@@ -75,8 +75,10 @@
   LoadClass(jni, "org/webrtc/VideoCapturerAndroid$NativeObserver");
   LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder");
   LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoEncoder$VideoCodecType");
   LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder");
   LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$DecoderOutputBufferInfo");
+  LoadClass(jni, "org/webrtc/MediaCodecVideoDecoder$VideoCodecType");
   jclass j_decoder_class = GetClass("org/webrtc/MediaCodecVideoDecoder");
   jmethodID j_is_egl14_supported_method = jni->GetStaticMethodID(
       j_decoder_class, "isEGL14Supported", "()Z");
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index b8d1311..fedc33d 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -153,7 +153,7 @@
 
 // Return the (singleton) Java Enum object corresponding to |index|;
 // |state_class_fragment| is something like "MediaSource$State".
-jobject JavaEnumFromIndex(
+static jobject JavaEnumFromIndex(
     JNIEnv* jni, const std::string& state_class_fragment, int index) {
   const std::string state_class = "org/webrtc/" + state_class_fragment;
   return JavaEnumFromIndex(jni, FindClass(jni, state_class.c_str()),
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
index ac4ee34..7ac88ed 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoDecoder.java
@@ -59,16 +59,27 @@
 
   private static final String TAG = "MediaCodecVideoDecoder";
 
+  // Tracks webrtc::VideoCodecType.
+  public enum VideoCodecType {
+    VIDEO_CODEC_VP8,
+    VIDEO_CODEC_VP9,
+    VIDEO_CODEC_H264
+  }
+
   private static final int DEQUEUE_INPUT_TIMEOUT = 500000;  // 500 ms timeout.
   private Thread mediaCodecThread;
   private MediaCodec mediaCodec;
   private ByteBuffer[] inputBuffers;
   private ByteBuffer[] outputBuffers;
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+  private static final String H264_MIME_TYPE = "video/avc";
   // List of supported HW VP8 decoders.
-  private static final String[] supportedHwCodecPrefixes =
+  private static final String[] supportedVp8HwCodecPrefixes =
     {"OMX.qcom.", "OMX.Nvidia.", "OMX.Exynos." };
-  // List of supported SW VP8 decoders.
+  // List of supported HW H.264 decoders.
+  private static final String[] supportedH264HwCodecPrefixes =
+    {"OMX.qcom." };
+  // List of supported SW decoders.
   private static final String[] supportedSwCodecPrefixes =
     {"OMX.google."};
   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
@@ -113,14 +124,11 @@
     public final int colorFormat;  // Color format supported by codec.
   }
 
-  private static DecoderProperties findVp8Decoder(boolean useSwCodec) {
+  private static DecoderProperties findDecoder(
+      String mime, String[] supportedCodecPrefixes) {
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
       return null; // MediaCodec.setParameters is missing.
     }
-    String[] supportedCodecPrefixes = supportedHwCodecPrefixes;
-    if (useSwCodec) {
-      supportedCodecPrefixes = supportedSwCodecPrefixes;
-    }
     for (int i = 0; i < MediaCodecList.getCodecCount(); ++i) {
       MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
       if (info.isEncoder()) {
@@ -128,15 +136,15 @@
       }
       String name = null;
       for (String mimeType : info.getSupportedTypes()) {
-        if (mimeType.equals(VP8_MIME_TYPE)) {
+        if (mimeType.equals(mime)) {
           name = info.getName();
           break;
         }
       }
       if (name == null) {
-        continue;  // No VP8 support in this codec; try the next one.
+        continue;  // No HW support in this codec; try the next one.
       }
-      Log.d(TAG, "Found candidate decoder " + name);
+      Log.v(TAG, "Found candidate decoder " + name);
 
       // Check if this is supported decoder.
       boolean supportedCodec = false;
@@ -152,14 +160,14 @@
 
       // Check if codec supports either yuv420 or nv12.
       CodecCapabilities capabilities =
-          info.getCapabilitiesForType(VP8_MIME_TYPE);
+          info.getCapabilitiesForType(mime);
       for (int colorFormat : capabilities.colorFormats) {
-        Log.d(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
+        Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
       }
       for (int supportedColorFormat : supportedColorList) {
         for (int codecColorFormat : capabilities.colorFormats) {
           if (codecColorFormat == supportedColorFormat) {
-            // Found supported HW VP8 decoder.
+            // Found supported HW decoder.
             Log.d(TAG, "Found target decoder " + name +
                 ". Color: 0x" + Integer.toHexString(codecColorFormat));
             return new DecoderProperties(name, codecColorFormat);
@@ -167,7 +175,7 @@
         }
       }
     }
-    return null;  // No HW VP8 decoder.
+    return null;  // No HW decoder.
   }
 
   private static boolean isEGL14Supported() {
@@ -175,8 +183,12 @@
     return (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION);
   }
 
-  private static boolean isPlatformSupported() {
-    return findVp8Decoder(false) != null;
+  private static boolean isVp8HwSupported() {
+    return findDecoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+  }
+
+  private static boolean isH264HwSupported() {
+    return findDecoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
   }
 
   private void checkOnMediaCodecThread() {
@@ -281,7 +293,8 @@
     }
   }
 
-  private boolean initDecode(int width, int height, boolean useSwCodec,
+  private boolean initDecode(
+      VideoCodecType type, int width, int height, boolean useSwCodec,
       boolean useSurface, EGLContext sharedContext) {
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
@@ -289,11 +302,25 @@
     if (useSurface && sharedContext == null) {
       throw new RuntimeException("No shared EGL context.");
     }
-    DecoderProperties properties = findVp8Decoder(useSwCodec);
-    if (properties == null) {
-      throw new RuntimeException("Cannot find HW VP8 decoder");
+    String mime = null;
+    String[] supportedCodecPrefixes = null;
+    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+      mime = VP8_MIME_TYPE;
+      supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
+    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+      mime = H264_MIME_TYPE;
+      supportedCodecPrefixes = supportedH264HwCodecPrefixes;
+    } else {
+      throw new RuntimeException("Non supported codec " + type);
     }
-    Log.d(TAG, "Java initDecode: " + width + " x " + height +
+    if (useSwCodec) {
+      supportedCodecPrefixes = supportedSwCodecPrefixes;
+    }
+    DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
+    if (properties == null) {
+      throw new RuntimeException("Cannot find HW decoder for " + type);
+    }
+    Log.d(TAG, "Java initDecode: " + type + " : "+ width + " x " + height +
         ". Color: 0x" + Integer.toHexString(properties.colorFormat) +
         ". Use Surface: " + useSurface + ". Use SW codec: " + useSwCodec);
     if (sharedContext != null) {
@@ -336,8 +363,7 @@
         decodeSurface = surface;
      }
 
-      MediaFormat format =
-          MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
+      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
       if (!useSurface) {
         format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
       }
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 21dd0fa..54fd4ec 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -51,15 +51,28 @@
 
   private static final String TAG = "MediaCodecVideoEncoder";
 
+  // Tracks webrtc::VideoCodecType.
+  public enum VideoCodecType {
+    VIDEO_CODEC_VP8,
+    VIDEO_CODEC_VP9,
+    VIDEO_CODEC_H264
+  }
+
   private static final int DEQUEUE_TIMEOUT = 0;  // Non-blocking, no wait.
   private Thread mediaCodecThread;
   private MediaCodec mediaCodec;
   private ByteBuffer[] outputBuffers;
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
+  private static final String H264_MIME_TYPE = "video/avc";
   // List of supported HW VP8 codecs.
-  private static final String[] supportedHwCodecPrefixes =
+  private static final String[] supportedVp8HwCodecPrefixes =
     {"OMX.qcom.", "OMX.Nvidia." };
-  // Bitrate mode
+  // List of supported HW H.264 codecs.
+  private static final String[] supportedH264HwCodecPrefixes =
+    {"OMX.qcom." };
+  // Bitrate modes - should be in sync with OMX_VIDEO_CONTROLRATETYPE defined
+  // in OMX_Video.h
+  private static final int VIDEO_ControlRateVariable = 1;
   private static final int VIDEO_ControlRateConstant = 2;
   // NV12 color format supported by QCOM codec, but not declared in MediaCodec -
   // see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
@@ -73,20 +86,25 @@
     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
   };
   private int colorFormat;
+  // Video encoder type.
+  private VideoCodecType type;
+  // SPS and PPS NALs (Config frame) for H.264.
+  private ByteBuffer configData = null;
 
   private MediaCodecVideoEncoder() {}
 
-  // Helper struct for findVp8HwEncoder() below.
+  // Helper struct for findHwEncoder() below.
   private static class EncoderProperties {
     public EncoderProperties(String codecName, int colorFormat) {
       this.codecName = codecName;
       this.colorFormat = colorFormat;
     }
-    public final String codecName; // OpenMax component name for VP8 codec.
+    public final String codecName; // OpenMax component name for HW codec.
     public final int colorFormat;  // Color format supported by codec.
   }
 
-  private static EncoderProperties findVp8HwEncoder() {
+  private static EncoderProperties findHwEncoder(
+      String mime, String[] supportedHwCodecPrefixes) {
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT)
       return null; // MediaCodec.setParameters is missing.
 
@@ -97,15 +115,15 @@
       }
       String name = null;
       for (String mimeType : info.getSupportedTypes()) {
-        if (mimeType.equals(VP8_MIME_TYPE)) {
+        if (mimeType.equals(mime)) {
           name = info.getName();
           break;
         }
       }
       if (name == null) {
-        continue;  // No VP8 support in this codec; try the next one.
+        continue;  // No HW support in this codec; try the next one.
       }
-      Log.d(TAG, "Found candidate encoder " + name);
+      Log.v(TAG, "Found candidate encoder " + name);
 
       // Check if this is supported HW encoder.
       boolean supportedCodec = false;
@@ -119,18 +137,17 @@
         continue;
       }
 
-      CodecCapabilities capabilities =
-          info.getCapabilitiesForType(VP8_MIME_TYPE);
+      CodecCapabilities capabilities = info.getCapabilitiesForType(mime);
       for (int colorFormat : capabilities.colorFormats) {
-        Log.d(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
+        Log.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
       }
 
       // Check if codec supports either yuv420 or nv12.
       for (int supportedColorFormat : supportedColorList) {
         for (int codecColorFormat : capabilities.colorFormats) {
           if (codecColorFormat == supportedColorFormat) {
-            // Found supported HW VP8 encoder.
-            Log.d(TAG, "Found target encoder " + name +
+            // Found supported HW encoder.
+            Log.d(TAG, "Found target encoder for mime " + mime + " : " + name +
                 ". Color: 0x" + Integer.toHexString(codecColorFormat));
             return new EncoderProperties(name, codecColorFormat);
           }
@@ -140,8 +157,12 @@
     return null;  // No HW VP8 encoder.
   }
 
-  public static boolean isPlatformSupported() {
-    return findVp8HwEncoder() != null;
+  public static boolean isVp8HwSupported() {
+    return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+  }
+
+  public static boolean isH264HwSupported() {
+    return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
   }
 
   private void checkOnMediaCodecThread() {
@@ -163,27 +184,38 @@
   }
 
   // Return the array of input buffers, or null on failure.
-  private ByteBuffer[] initEncode(int width, int height, int kbps, int fps) {
-    Log.d(TAG, "Java initEncode: " + width + " x " + height +
+  private ByteBuffer[] initEncode(
+      VideoCodecType type, int width, int height, int kbps, int fps) {
+    Log.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
         ". @ " + kbps + " kbps. Fps: " + fps +
         ". Color: 0x" + Integer.toHexString(colorFormat));
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
     }
-    EncoderProperties properties = findVp8HwEncoder();
+    this.type = type;
+    EncoderProperties properties = null;
+    String mime = null;
+    int keyFrameIntervalSec = 0;
+    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
+      mime = VP8_MIME_TYPE;
+      properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes);
+      keyFrameIntervalSec = 100;
+    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
+      mime = H264_MIME_TYPE;
+      properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes);
+      keyFrameIntervalSec = 20;
+    }
     if (properties == null) {
-      throw new RuntimeException("Can not find HW VP8 encoder");
+      throw new RuntimeException("Can not find HW encoder for " + type);
     }
     mediaCodecThread = Thread.currentThread();
     try {
-      MediaFormat format =
-          MediaFormat.createVideoFormat(VP8_MIME_TYPE, width, height);
+      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
       format.setInteger(MediaFormat.KEY_BIT_RATE, 1000 * kbps);
       format.setInteger("bitrate-mode", VIDEO_ControlRateConstant);
       format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
-      // Default WebRTC settings
       format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
-      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 100);
+      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
       Log.d(TAG, "  Format: " + format);
       mediaCodec = createByCodecName(properties.codecName);
       if (mediaCodec == null) {
@@ -273,8 +305,8 @@
   // Helper struct for dequeueOutputBuffer() below.
   private static class OutputBufferInfo {
     public OutputBufferInfo(
-        int index, ByteBuffer buffer, boolean isKeyFrame,
-        long presentationTimestampUs) {
+        int index, ByteBuffer buffer,
+        boolean isKeyFrame, long presentationTimestampUs) {
       this.index = index;
       this.buffer = buffer;
       this.isKeyFrame = isKeyFrame;
@@ -294,6 +326,23 @@
     try {
       MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
       int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+      // Check if this is config frame and save configuration data.
+      if (result >= 0) {
+        boolean isConfigFrame =
+            (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
+        if (isConfigFrame) {
+          Log.d(TAG, "Config frame generated. Offset: " + info.offset +
+              ". Size: " + info.size);
+          configData = ByteBuffer.allocateDirect(info.size);
+          outputBuffers[result].position(info.offset);
+          outputBuffers[result].limit(info.offset + info.size);
+          configData.put(outputBuffers[result]);
+          // Release buffer back.
+          mediaCodec.releaseOutputBuffer(result, false);
+          // Query next output.
+          result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
+        }
+      }
       if (result >= 0) {
         // MediaCodec doesn't care about Buffer position/remaining/etc so we can
         // mess with them to get a slice and avoid having to pass extra
@@ -301,13 +350,29 @@
         ByteBuffer outputBuffer = outputBuffers[result].duplicate();
         outputBuffer.position(info.offset);
         outputBuffer.limit(info.offset + info.size);
+        // Check key frame flag.
         boolean isKeyFrame =
             (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
         if (isKeyFrame) {
           Log.d(TAG, "Sync frame generated");
         }
-        return new OutputBufferInfo(
-            result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
+        if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
+          Log.d(TAG, "Appending config frame of size " + configData.capacity() +
+              " to output buffer with offset " + info.offset + ", size " +
+              info.size);
+          // For H.264 key frame append SPS and PPS NALs at the start
+          ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(
+              configData.capacity() + info.size);
+          configData.rewind();
+          keyFrameBuffer.put(configData);
+          keyFrameBuffer.put(outputBuffer);
+          keyFrameBuffer.position(0);
+          return new OutputBufferInfo(result, keyFrameBuffer,
+              isKeyFrame, info.presentationTimeUs);
+        } else {
+          return new OutputBufferInfo(result, outputBuffer.slice(),
+              isKeyFrame, info.presentationTimeUs);
+        }
       } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
         outputBuffers = mediaCodec.getOutputBuffers();
         return dequeueOutputBuffer();
diff --git a/talk/examples/android/src/org/appspot/apprtc/PeerConnectionClient.java b/talk/examples/android/src/org/appspot/apprtc/PeerConnectionClient.java
index 8340351..78fe176 100644
--- a/talk/examples/android/src/org/appspot/apprtc/PeerConnectionClient.java
+++ b/talk/examples/android/src/org/appspot/apprtc/PeerConnectionClient.java
@@ -68,8 +68,11 @@
   public static final String AUDIO_TRACK_ID = "ARDAMSa0";
   private static final String TAG = "PCRTCClient";
   private static final boolean PREFER_ISAC = false;
+  private static final boolean PREFER_H264 = false;
+  public static final String AUDIO_CODEC_ISAC = "ISAC";
   public static final String VIDEO_CODEC_VP8 = "VP8";
   public static final String VIDEO_CODEC_VP9 = "VP9";
+  public static final String VIDEO_CODEC_H264 = "H264";
   private static final String FIELD_TRIAL_VP9 = "WebRTC-SupportVP9/Enabled/";
   private static final String MAX_VIDEO_WIDTH_CONSTRAINT = "maxWidth";
   private static final String MIN_VIDEO_WIDTH_CONSTRAINT = "minWidth";
@@ -216,7 +219,7 @@
       // If HW video encoder is supported and video resolution is not
       // specified force it to HD.
       if ((videoWidth == 0 || videoHeight == 0) && videoCodecHwAcceleration &&
-          MediaCodecVideoEncoder.isPlatformSupported()) {
+          MediaCodecVideoEncoder.isVp8HwSupported()) {
         videoWidth = HD_VIDEO_WIDTH;
         videoHeight = HD_VIDEO_HEIGHT;
       }
@@ -477,7 +480,10 @@
         }
         String sdpDescription = sdp.description;
         if (PREFER_ISAC) {
-          sdpDescription = preferISAC(sdpDescription);
+          sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+        }
+        if (PREFER_H264) {
+          sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
         }
         if (peerConnectionParameters.videoStartBitrate > 0) {
           sdpDescription = setStartBitrate(VIDEO_CODEC_VP8,
@@ -572,41 +578,47 @@
       if (i == lineIndex) {
         String bitrateSet = "a=fmtp:" + codecRtpMap
             + " x-google-start-bitrate=" + bitrateKbps;
-        Log.d(TAG, "Add remote SDP line: " + bitrateSet);
+        Log.d(TAG, "Add bitrate SDP line: " + bitrateSet);
         newSdpDescription.append(bitrateSet).append("\r\n");
       }
     }
     return newSdpDescription.toString();
   }
 
-  // Mangle SDP to prefer ISAC/16000 over any other audio codec.
-  private static String preferISAC(String sdpDescription) {
+  private static String preferCodec(
+      String sdpDescription, String codec, boolean isAudio) {
     String[] lines = sdpDescription.split("\r\n");
     int mLineIndex = -1;
-    String isac16kRtpMap = null;
-    Pattern isac16kPattern =
-        Pattern.compile("^a=rtpmap:(\\d+) ISAC/16000[\r]?$");
-    for (int i = 0;
-         (i < lines.length) && (mLineIndex == -1 || isac16kRtpMap == null);
-         ++i) {
-      if (lines[i].startsWith("m=audio ")) {
+    String codecRtpMap = null;
+    // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>]
+    String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$";
+    Pattern codecPattern = Pattern.compile(regex);
+    String mediaDescription = "m=video ";
+    if (isAudio) {
+      mediaDescription = "m=audio ";
+    }
+    for (int i = 0; (i < lines.length) &&
+        (mLineIndex == -1 || codecRtpMap == null); i++) {
+      if (lines[i].startsWith(mediaDescription)) {
         mLineIndex = i;
         continue;
       }
-      Matcher isac16kMatcher = isac16kPattern.matcher(lines[i]);
-      if (isac16kMatcher.matches()) {
-        isac16kRtpMap = isac16kMatcher.group(1);
+      Matcher codecMatcher = codecPattern.matcher(lines[i]);
+      if (codecMatcher.matches()) {
+        codecRtpMap = codecMatcher.group(1);
         continue;
       }
     }
     if (mLineIndex == -1) {
-      Log.d(TAG, "No m=audio line, so can't prefer iSAC");
+      Log.w(TAG, "No " + mediaDescription + " line, so can't prefer " + codec);
       return sdpDescription;
     }
-    if (isac16kRtpMap == null) {
-      Log.d(TAG, "No ISAC/16000 line, so can't prefer iSAC");
+    if (codecRtpMap == null) {
+      Log.w(TAG, "No rtpmap for " + codec);
       return sdpDescription;
     }
+    Log.d(TAG, "Found " +  codec + " rtpmap " + codecRtpMap + ", prefer at " +
+        lines[mLineIndex]);
     String[] origMLineParts = lines[mLineIndex].split(" ");
     StringBuilder newMLine = new StringBuilder();
     int origPartIndex = 0;
@@ -614,13 +626,14 @@
     newMLine.append(origMLineParts[origPartIndex++]).append(" ");
     newMLine.append(origMLineParts[origPartIndex++]).append(" ");
     newMLine.append(origMLineParts[origPartIndex++]).append(" ");
-    newMLine.append(isac16kRtpMap);
-    for (; origPartIndex < origMLineParts.length; ++origPartIndex) {
-      if (!origMLineParts[origPartIndex].equals(isac16kRtpMap)) {
+    newMLine.append(codecRtpMap);
+    for (; origPartIndex < origMLineParts.length; origPartIndex++) {
+      if (!origMLineParts[origPartIndex].equals(codecRtpMap)) {
         newMLine.append(" ").append(origMLineParts[origPartIndex]);
       }
     }
     lines[mLineIndex] = newMLine.toString();
+    Log.d(TAG, "Change media description: " + lines[mLineIndex]);
     StringBuilder newSdpDescription = new StringBuilder();
     for (String line : lines) {
       newSdpDescription.append(line).append("\r\n");
@@ -758,7 +771,10 @@
       }
       String sdpDescription = origSdp.description;
       if (PREFER_ISAC) {
-        sdpDescription = preferISAC(sdpDescription);
+        sdpDescription = preferCodec(sdpDescription, AUDIO_CODEC_ISAC, true);
+      }
+      if (PREFER_H264) {
+        sdpDescription = preferCodec(sdpDescription, VIDEO_CODEC_H264, false);
       }
       final SessionDescription sdp = new SessionDescription(
           origSdp.type, sdpDescription);