Reland "Prepare MediaCodecVideoEncoder for surface textures.""

This reverts commit 12f680214e28dc5f0a13ac8afc0d1445f89e67e6.
Original cl in https://codereview.webrtc.org/1396073003/
Prepare MediaCodecVideoEncoder for surface textures.
This refactors MediaVideoEncoder to prepare for adding support to encode from textures. The C++ layer does not have any functional changes.
- Moves ResetEncoder to always work on the codec thread
- Adds use of ThreadChecker.
- Change Java MediaEncoder.Init to return true or false and introduce method getInputBuffers.
- Add simple unit test for Java MediaCodecVideoEncoder.

The pure revert of the revert is in patchset 1.
Patchset 2, moves getting the input buffer to before storing pending timestamps etc to fix b/24984012.

BUG=webrtc:4993 b/24984012

Review URL: https://codereview.webrtc.org/1406203002

Cr-Commit-Position: refs/heads/master@{#10622}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
new file mode 100644
index 0000000..29f3022
--- /dev/null
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -0,0 +1,95 @@
+/*
+ * libjingle
+ * Copyright 2015 Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package org.webrtc;
+
+import java.nio.ByteBuffer;
+
+import android.test.ActivityTestCase;
+import android.test.suitebuilder.annotation.SmallTest;
+import android.util.Log;
+
+import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
+
+public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
+  final static String TAG = "MediaCodecVideoEncoderTest";
+
+  @SmallTest
+  public static void testInitReleaseUsingByteBuffer() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG,
+            "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30));
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testEncoderUsingByteBuffer() throws InterruptedException {
+    if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingByteBuffer");
+      return;
+    }
+
+    final int width = 640;
+    final int height = 480;
+    final int min_size = width * height * 3 / 2;
+    final long presentationTimestampUs = 2;
+
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30));
+    ByteBuffer[] inputBuffers = encoder.getInputBuffers();
+    assertNotNull(inputBuffers);
+    assertTrue(min_size <= inputBuffers[0].capacity());
+
+    int bufferIndex;
+    do {
+      Thread.sleep(10);
+      bufferIndex = encoder.dequeueInputBuffer();
+    } while (bufferIndex == -1); // |-1| is returned when there is no buffer available yet.
+
+    assertTrue(bufferIndex >= 0);
+    assertTrue(bufferIndex < inputBuffers.length);
+    assertTrue(encoder.encodeBuffer(true, bufferIndex, min_size, presentationTimestampUs));
+
+    OutputBufferInfo info;
+    do {
+      info = encoder.dequeueOutputBuffer();
+      Thread.sleep(10);
+    } while (info == null);
+    assertTrue(info.index >= 0);
+    assertEquals(presentationTimestampUs, info.presentationTimestampUs);
+    assertTrue(info.buffer.capacity() > 0);
+    encoder.releaseOutputBuffer(info.index);
+
+    encoder.release();
+  }
+}
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index ac349e7..e1793b8 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -33,6 +33,7 @@
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/thread.h"
+#include "webrtc/base/thread_checker.h"
 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
 #include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
 #include "webrtc/modules/video_coding/utility/include/quality_scaler.h"
@@ -79,7 +80,8 @@
                                public rtc::MessageHandler {
  public:
   virtual ~MediaCodecVideoEncoder();
-  explicit MediaCodecVideoEncoder(JNIEnv* jni, VideoCodecType codecType);
+  MediaCodecVideoEncoder(JNIEnv* jni,
+                         VideoCodecType codecType);
 
   // webrtc::VideoEncoder implementation.  Everything trampolines to
   // |codec_thread_| for execution.
@@ -104,12 +106,10 @@
   int GetTargetFramerate() override;
 
  private:
-  // CHECK-fail if not running on |codec_thread_|.
-  void CheckOnCodecThread();
-
-  // Release() and InitEncode() in an attempt to restore the codec to an
+  // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
+  // InitEncodeOnCodecThread() in an attempt to restore the codec to an
   // operable state.  Necessary after all manner of OMX-layer errors.
-  void ResetCodec();
+  bool ResetCodecOnCodecThread();
 
   // Implementation of webrtc::VideoEncoder methods above, all running on the
   // codec thread exclusively.
@@ -118,9 +118,15 @@
   // previously-current values are reused instead of the passed parameters
   // (makes it easier to reason about thread-safety).
   int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
+  // Reconfigure to match |frame| in width, height. Returns false if
+  // reconfiguring fails.
+  bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
   int32_t EncodeOnCodecThread(
       const webrtc::VideoFrame& input_image,
       const std::vector<webrtc::FrameType>* frame_types);
+  bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+
   int32_t RegisterEncodeCompleteCallbackOnCodecThread(
       webrtc::EncodedImageCallback* callback);
   int32_t ReleaseOnCodecThread();
@@ -150,11 +156,13 @@
   // State that is constant for the lifetime of this object once the ctor
   // returns.
   scoped_ptr<Thread> codec_thread_;  // Thread on which to operate MediaCodec.
+  rtc::ThreadChecker codec_thread_checker_;
   ScopedGlobalRef<jclass> j_media_codec_video_encoder_class_;
   ScopedGlobalRef<jobject> j_media_codec_video_encoder_;
   jmethodID j_init_encode_method_;
+  jmethodID j_get_input_buffers_method_;
   jmethodID j_dequeue_input_buffer_method_;
-  jmethodID j_encode_method_;
+  jmethodID j_encode_buffer_method_;
   jmethodID j_release_method_;
   jmethodID j_set_rates_method_;
   jmethodID j_dequeue_output_buffer_method_;
@@ -239,19 +247,23 @@
   // thread.
   codec_thread_->SetName("MediaCodecVideoEncoder", NULL);
   RTC_CHECK(codec_thread_->Start()) << "Failed to start MediaCodecVideoEncoder";
-
+  codec_thread_checker_.DetachFromThread();
   jclass j_output_buffer_info_class =
       FindClass(jni, "org/webrtc/MediaCodecVideoEncoder$OutputBufferInfo");
   j_init_encode_method_ = GetMethodID(
       jni,
       *j_media_codec_video_encoder_class_,
       "initEncode",
-      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)"
-      "[Ljava/nio/ByteBuffer;");
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z");
+  j_get_input_buffers_method_ = GetMethodID(
+      jni,
+      *j_media_codec_video_encoder_class_,
+      "getInputBuffers",
+      "()[Ljava/nio/ByteBuffer;");
   j_dequeue_input_buffer_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
-  j_encode_method_ = GetMethodID(
-      jni, *j_media_codec_video_encoder_class_, "encode", "(ZIIJ)Z");
+  j_encode_buffer_method_ = GetMethodID(
+      jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
   j_release_method_ =
       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
   j_set_rates_method_ = GetMethodID(
@@ -374,6 +386,7 @@
 }
 
 void MediaCodecVideoEncoder::OnMessage(rtc::Message* msg) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
@@ -381,7 +394,6 @@
   // functor), so expect no ID/data.
   RTC_CHECK(!msg->message_id) << "Unexpected message!";
   RTC_CHECK(!msg->pdata) << "Unexpected message!";
-  CheckOnCodecThread();
   if (!inited_) {
     return;
   }
@@ -393,26 +405,23 @@
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
 }
 
-void MediaCodecVideoEncoder::CheckOnCodecThread() {
-  RTC_CHECK(codec_thread_ == ThreadManager::Instance()->CurrentThread())
-      << "Running on wrong thread!";
-}
-
-void MediaCodecVideoEncoder::ResetCodec() {
-  ALOGE << "ResetCodec";
-  if (Release() != WEBRTC_VIDEO_CODEC_OK ||
-      codec_thread_->Invoke<int32_t>(Bind(
-          &MediaCodecVideoEncoder::InitEncodeOnCodecThread, this,
-          width_, height_, 0, 0)) != WEBRTC_VIDEO_CODEC_OK) {
+bool MediaCodecVideoEncoder::ResetCodecOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  ALOGE << "ResetOnCodecThread";
+  if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
+      InitEncodeOnCodecThread(width_, height_, 0, 0)
+          != WEBRTC_VIDEO_CODEC_OK) {
     // TODO(fischman): wouldn't it be nice if there was a way to gracefully
     // degrade to a SW encoder at this point?  There isn't one AFAICT :(
     // https://code.google.com/p/webrtc/issues/detail?id=2920
+    return false;
   }
+  return true;
 }
 
 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
     int width, int height, int kbps, int fps) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
@@ -449,23 +458,27 @@
   frame_rtc_times_ms_.clear();
   drop_next_input_frame_ = false;
   picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+
   // We enforce no extra stride/padding in the format creation step.
   jobject j_video_codec_enum = JavaEnumFromIndex(
       jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
+  const bool encode_status = jni->CallBooleanMethod(
+      *j_media_codec_video_encoder_, j_init_encode_method_,
+      j_video_codec_enum, width, height, kbps, fps);
+  if (!encode_status) {
+    ALOGE << "Failed to configure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  CHECK_EXCEPTION(jni);
+
   jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
       jni->CallObjectMethod(*j_media_codec_video_encoder_,
-                            j_init_encode_method_,
-                            j_video_codec_enum,
-                            width_,
-                            height_,
-                            kbps,
-                            fps));
+          j_get_input_buffers_method_));
   CHECK_EXCEPTION(jni);
   if (IsNull(jni, input_buffers)) {
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
-  inited_ = true;
   switch (GetIntField(jni, *j_media_codec_video_encoder_,
       j_color_format_field_)) {
     case COLOR_FormatYUV420Planar:
@@ -494,6 +507,8 @@
   }
   CHECK_EXCEPTION(jni);
 
+
+  inited_ = true;
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
   return WEBRTC_VIDEO_CODEC_OK;
 }
@@ -501,21 +516,22 @@
 int32_t MediaCodecVideoEncoder::EncodeOnCodecThread(
     const webrtc::VideoFrame& frame,
     const std::vector<webrtc::FrameType>* frame_types) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
   }
+
   frames_received_++;
   if (!DeliverPendingOutputs(jni)) {
-    ResetCodec();
-    // Continue as if everything's fine.
+    if (!ResetCodecOnCodecThread())
+      return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
   if (drop_next_input_frame_) {
-    ALOGV("Encoder drop frame - failed callback.");
+    ALOGW << "Encoder drop frame - failed callback.";
     drop_next_input_frame_ = false;
     return WEBRTC_VIDEO_CODEC_OK;
   }
@@ -528,13 +544,9 @@
   const VideoFrame& input_frame =
       scale_ ? quality_scaler_.GetScaledFrame(frame) : frame;
 
-  if (input_frame.width() != width_ || input_frame.height() != height_) {
-    ALOGD << "Frame resolution change from " << width_ << " x " << height_ <<
-        " to " << input_frame.width() << " x " << input_frame.height();
-    width_ = input_frame.width();
-    height_ = input_frame.height();
-    ResetCodec();
-    return WEBRTC_VIDEO_CODEC_OK;
+  if (!MaybeReconfigureEncoderOnCodecThread(input_frame)) {
+    ALOGE << "Failed to reconfigure encoder.";
+    return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
   // Check if we accumulated too many frames in encoder input buffers
@@ -553,36 +565,23 @@
   }
 
   int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
-                                                j_dequeue_input_buffer_method_);
+      j_dequeue_input_buffer_method_);
   CHECK_EXCEPTION(jni);
   if (j_input_buffer_index == -1) {
     // Video codec falls behind - no input buffer available.
-    ALOGV("Encoder drop frame - no input buffers available");
+    ALOGW << "Encoder drop frame - no input buffers available";
     frames_dropped_++;
     // Report dropped frame to quality_scaler_.
     OnDroppedFrame();
     return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
   }
   if (j_input_buffer_index == -2) {
-    ResetCodec();
+    ResetCodecOnCodecThread();
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
 
-  ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
-      frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
-
-  jobject j_input_buffer = input_buffers_[j_input_buffer_index];
-  uint8_t* yuv_buffer =
-      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
-  CHECK_EXCEPTION(jni);
-  RTC_CHECK(yuv_buffer) << "Indirect buffer??";
-  RTC_CHECK(!libyuv::ConvertFromI420(
-      input_frame.buffer(webrtc::kYPlane), input_frame.stride(webrtc::kYPlane),
-      input_frame.buffer(webrtc::kUPlane), input_frame.stride(webrtc::kUPlane),
-      input_frame.buffer(webrtc::kVPlane), input_frame.stride(webrtc::kVPlane),
-      yuv_buffer, width_, width_, height_, encoder_fourcc_))
-      << "ConvertFromI420 failed";
-  last_input_timestamp_ms_ = current_timestamp_us_ / 1000;
+  last_input_timestamp_ms_ =
+      current_timestamp_us_ / rtc::kNumMicrosecsPerMillisec;
   frames_in_queue_++;
 
   // Save input image timestamps for later output
@@ -590,27 +589,77 @@
   render_times_ms_.push_back(input_frame.render_time_ms());
   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
 
-  bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+  const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+  const bool encode_status =
+      EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+          j_input_buffer_index);
+
+  current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
+
+  if (!encode_status || !DeliverPendingOutputs(jni)) {
+    ALOGE << "Failed deliver pending outputs.";
+    ResetCodecOnCodecThread();
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::MaybeReconfigureEncoderOnCodecThread(
+    const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+  const bool reconfigure_due_to_size =
+      frame.width() != width_ || frame.height() != height_;
+
+  if (reconfigure_due_to_size) {
+    ALOGD << "Reconfigure encoder due to frame resolution change from "
+        << width_ << " x " << height_ << " to " << frame.width() << " x "
+        << frame.height();
+    width_ = frame.width();
+    height_ = frame.height();
+  }
+
+  if (!reconfigure_due_to_size)
+    return true;
+
+  ReleaseOnCodecThread();
+
+  return InitEncodeOnCodecThread(width_, height_, 0, 0) ==
+      WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+
+  ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
+      frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
+
+  jobject j_input_buffer = input_buffers_[input_buffer_index];
+  uint8_t* yuv_buffer =
+      reinterpret_cast<uint8_t*>(jni->GetDirectBufferAddress(j_input_buffer));
+  CHECK_EXCEPTION(jni);
+  RTC_CHECK(yuv_buffer) << "Indirect buffer??";
+  RTC_CHECK(!libyuv::ConvertFromI420(
+      frame.buffer(webrtc::kYPlane), frame.stride(webrtc::kYPlane),
+      frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane),
+      frame.buffer(webrtc::kVPlane), frame.stride(webrtc::kVPlane),
+      yuv_buffer, width_, width_, height_, encoder_fourcc_))
+      << "ConvertFromI420 failed";
+
   bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
-                                              j_encode_method_,
+                                              j_encode_buffer_method_,
                                               key_frame,
-                                              j_input_buffer_index,
+                                              input_buffer_index,
                                               yuv_size_,
                                               current_timestamp_us_);
   CHECK_EXCEPTION(jni);
-  current_timestamp_us_ += 1000000 / last_set_fps_;
-
-  if (!encode_status || !DeliverPendingOutputs(jni)) {
-    ResetCodec();
-    return WEBRTC_VIDEO_CODEC_ERROR;
-  }
-
-  return WEBRTC_VIDEO_CODEC_OK;
+  return encode_status;
 }
 
 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
     webrtc::EncodedImageCallback* callback) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
   callback_ = callback;
@@ -618,10 +667,10 @@
 }
 
 int32_t MediaCodecVideoEncoder::ReleaseOnCodecThread() {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   if (!inited_) {
     return WEBRTC_VIDEO_CODEC_OK;
   }
-  CheckOnCodecThread();
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ALOGD << "EncoderReleaseOnCodecThread: Frames received: " <<
       frames_received_ << ". Encoded: " << frames_encoded_ <<
@@ -640,7 +689,7 @@
 
 int32_t MediaCodecVideoEncoder::SetRatesOnCodecThread(uint32_t new_bit_rate,
                                                       uint32_t frame_rate) {
-  CheckOnCodecThread();
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   if (last_set_bitrate_kbps_ == new_bit_rate &&
       last_set_fps_ == frame_rate) {
     return WEBRTC_VIDEO_CODEC_OK;
@@ -659,7 +708,7 @@
                                        last_set_fps_);
   CHECK_EXCEPTION(jni);
   if (!ret) {
-    ResetCodec();
+    ResetCodecOnCodecThread();
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
   return WEBRTC_VIDEO_CODEC_OK;
@@ -691,6 +740,7 @@
 }
 
 bool MediaCodecVideoEncoder::DeliverPendingOutputs(JNIEnv* jni) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   while (true) {
     jobject j_output_buffer_info = jni->CallObjectMethod(
         *j_media_codec_video_encoder_, j_dequeue_output_buffer_method_);
@@ -702,7 +752,7 @@
     int output_buffer_index =
         GetOutputBufferInfoIndex(jni, j_output_buffer_info);
     if (output_buffer_index == -1) {
-      ResetCodec();
+      ResetCodecOnCodecThread();
       return false;
     }
 
@@ -829,7 +879,7 @@
           ALOGE << "Data:" <<  image->_buffer[0] << " " << image->_buffer[1]
               << " " << image->_buffer[2] << " " << image->_buffer[3]
               << " " << image->_buffer[4] << " " << image->_buffer[5];
-          ResetCodec();
+          ResetCodecOnCodecThread();
           return false;
         }
         scPositions[scPositionsLength] = payload_size;
@@ -852,7 +902,7 @@
                                           output_buffer_index);
     CHECK_EXCEPTION(jni);
     if (!success) {
-      ResetCodec();
+      ResetCodecOnCodecThread();
       return false;
     }
 
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 988d62a..0eac091 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -61,7 +61,7 @@
 
   private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; // Timeout for codec releasing.
   private static final int DEQUEUE_TIMEOUT = 0;  // Non-blocking, no wait.
-  // Active running encoder instance. Set in initDecode() (called from native code)
+  // Active running encoder instance. Set in initEncode() (called from native code)
   // and reset to null in release() call.
   private static MediaCodecVideoEncoder runningInstance = null;
   private static MediaCodecVideoEncoderErrorCallback errorCallback = null;
@@ -102,15 +102,12 @@
     CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
   };
-  private int colorFormat;
-  // Video encoder type.
   private VideoCodecType type;
+  private int colorFormat;  // Used by native code.
+
   // SPS and PPS NALs (Config frame) for H.264.
   private ByteBuffer configData = null;
 
-  private MediaCodecVideoEncoder() {
-  }
-
   // MediaCodec error handler - invoked when critical error happens which may prevent
   // further use of media codec API. Now it means that one of media codec instances
   // is hanging and can no longer be used in the next call.
@@ -237,16 +234,14 @@
     }
   }
 
-  // Return the array of input buffers, or null on failure.
-  private ByteBuffer[] initEncode(
-      VideoCodecType type, int width, int height, int kbps, int fps) {
+  // Returns false if the hardware encoder currently can't be used.
+  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps) {
     Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
-        ". @ " + kbps + " kbps. Fps: " + fps +
-        ". Color: 0x" + Integer.toHexString(colorFormat));
+        ". @ " + kbps + " kbps. Fps: " + fps + ".");
+
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
     }
-    this.type = type;
     EncoderProperties properties = null;
     String mime = null;
     int keyFrameIntervalSec = 0;
@@ -263,6 +258,9 @@
       throw new RuntimeException("Can not find HW encoder for " + type);
     }
     runningInstance = this; // Encoder is now running and can be queried for stack traces.
+    colorFormat = properties.colorFormat;
+    Logging.d(TAG, "Color format: " + colorFormat);
+
     mediaCodecThread = Thread.currentThread();
     try {
       MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
@@ -273,26 +271,32 @@
       format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, keyFrameIntervalSec);
       Logging.d(TAG, "  Format: " + format);
       mediaCodec = createByCodecName(properties.codecName);
+      this.type = type;
       if (mediaCodec == null) {
         Logging.e(TAG, "Can not create media encoder");
-        return null;
+        return false;
       }
       mediaCodec.configure(
           format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
+
       mediaCodec.start();
-      colorFormat = properties.colorFormat;
       outputBuffers = mediaCodec.getOutputBuffers();
-      ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
-      Logging.d(TAG, "Input buffers: " + inputBuffers.length +
-          ". Output buffers: " + outputBuffers.length);
-      return inputBuffers;
+      Logging.d(TAG, "Output buffers: " + outputBuffers.length);
+
     } catch (IllegalStateException e) {
       Logging.e(TAG, "initEncode failed", e);
-      return null;
+      return false;
     }
+    return true;
   }
 
-  private boolean encode(
+  ByteBuffer[]  getInputBuffers() {
+    ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
+    Logging.d(TAG, "Input buffers: " + inputBuffers.length);
+    return inputBuffers;
+  }
+
+  boolean encodeBuffer(
       boolean isKeyframe, int inputBuffer, int size,
       long presentationTimestampUs) {
     checkOnMediaCodecThread();
@@ -312,12 +316,12 @@
       return true;
     }
     catch (IllegalStateException e) {
-      Logging.e(TAG, "encode failed", e);
+      Logging.e(TAG, "encodeBuffer failed", e);
       return false;
     }
   }
 
-  private void release() {
+  void release() {
     Logging.d(TAG, "Java releaseEncoder");
     checkOnMediaCodecThread();
 
@@ -374,7 +378,7 @@
 
   // Dequeue an input buffer and return its index, -1 if no input buffer is
   // available, or -2 if the codec is no longer operative.
-  private int dequeueInputBuffer() {
+  int dequeueInputBuffer() {
     checkOnMediaCodecThread();
     try {
       return mediaCodec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
@@ -385,7 +389,7 @@
   }
 
   // Helper struct for dequeueOutputBuffer() below.
-  private static class OutputBufferInfo {
+  static class OutputBufferInfo {
     public OutputBufferInfo(
         int index, ByteBuffer buffer,
         boolean isKeyFrame, long presentationTimestampUs) {
@@ -395,15 +399,15 @@
       this.presentationTimestampUs = presentationTimestampUs;
     }
 
-    private final int index;
-    private final ByteBuffer buffer;
-    private final boolean isKeyFrame;
-    private final long presentationTimestampUs;
+    public final int index;
+    public final ByteBuffer buffer;
+    public final boolean isKeyFrame;
+    public final long presentationTimestampUs;
   }
 
   // Dequeue and return an output buffer, or null if no output is ready.  Return
   // a fake OutputBufferInfo with index -1 if the codec is no longer operable.
-  private OutputBufferInfo dequeueOutputBuffer() {
+  OutputBufferInfo dequeueOutputBuffer() {
     checkOnMediaCodecThread();
     try {
       MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
@@ -472,7 +476,7 @@
 
   // Release a dequeued output buffer back to the codec for re-use.  Return
   // false if the codec is no longer operable.
-  private boolean releaseOutputBuffer(int index) {
+  boolean releaseOutputBuffer(int index) {
     checkOnMediaCodecThread();
     try {
       mediaCodec.releaseOutputBuffer(index, false);