This cl add support to encode from textures to MediaCodecVideoEncoder.

This has also partly been reviewed in https://codereview.webrtc.org/1375953002/.

BUG=webrtc:4993
TBR=glaznew@webrtc.org

Review URL: https://codereview.webrtc.org/1403713002

Cr-Commit-Position: refs/heads/master@{#10725}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
index 29f3022..ee62008 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/MediaCodecVideoEncoderTest.java
@@ -26,19 +26,26 @@
  */
 package org.webrtc;
 
-import java.nio.ByteBuffer;
-
+import android.annotation.TargetApi;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Build;
 import android.test.ActivityTestCase;
 import android.test.suitebuilder.annotation.SmallTest;
 import android.util.Log;
 
 import org.webrtc.MediaCodecVideoEncoder.OutputBufferInfo;
 
+import java.nio.ByteBuffer;
+
+import javax.microedition.khronos.egl.EGL10;
+
+@TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
 public final class MediaCodecVideoEncoderTest extends ActivityTestCase {
   final static String TAG = "MediaCodecVideoEncoderTest";
 
   @SmallTest
-  public static void testInitReleaseUsingByteBuffer() {
+  public static void testInitializeUsingByteBuffer() {
     if (!MediaCodecVideoEncoder.isVp8HwSupported()) {
       Log.i(TAG,
             "Hardware does not support VP8 encoding, skipping testInitReleaseUsingByteBuffer");
@@ -46,7 +53,37 @@
     }
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
     assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30));
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30, null));
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testInitilizeUsingTextures() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        EGL10.EGL_NO_CONTEXT));
+    encoder.release();
+  }
+
+  @SmallTest
+  public static void testInitializeUsingByteBufferReInitilizeUsingTextures() {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        null));
+    encoder.release();
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, 640, 480, 300, 30,
+        EGL10.EGL_NO_CONTEXT));
     encoder.release();
   }
 
@@ -65,7 +102,7 @@
     MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
 
     assertTrue(encoder.initEncode(
-        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30));
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30, null));
     ByteBuffer[] inputBuffers = encoder.getInputBuffers();
     assertNotNull(inputBuffers);
     assertTrue(min_size <= inputBuffers[0].capacity());
@@ -92,4 +129,49 @@
 
     encoder.release();
   }
+
+  @SmallTest
+  public static void testEncoderUsingTextures() throws InterruptedException {
+    if (!MediaCodecVideoEncoder.isVp8HwSupportedUsingTextures()) {
+      Log.i(TAG, "Hardware does not support VP8 encoding, skipping testEncoderUsingTextures");
+      return;
+    }
+
+    final int width = 640;
+    final int height = 480;
+    final long presentationTs = 2;
+
+    final EglBase eglOesBase = new EglBase(EGL10.EGL_NO_CONTEXT, EglBase.ConfigType.PIXEL_BUFFER);
+    eglOesBase.createDummyPbufferSurface();
+    eglOesBase.makeCurrent();
+    int oesTextureId = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
+
+    // TODO(perkj): This test is week since we don't fill the texture with valid data with correct
+    // width and height and verify the encoded data. Fill the OES texture and figure out a way to
+    // verify that the output make sense.
+
+    MediaCodecVideoEncoder encoder = new MediaCodecVideoEncoder();
+
+    assertTrue(encoder.initEncode(
+        MediaCodecVideoEncoder.VideoCodecType.VIDEO_CODEC_VP8, width, height, 300, 30,
+        eglOesBase.getContext()));
+    assertTrue(encoder.encodeTexture(true, oesTextureId, RendererCommon.identityMatrix(),
+        presentationTs));
+    GlUtil.checkNoGLES2Error("encodeTexture");
+
+    // It should be Ok to delete the texture after calling encodeTexture.
+    GLES20.glDeleteTextures(1, new int[] {oesTextureId}, 0);
+
+    OutputBufferInfo info = encoder.dequeueOutputBuffer();
+    while (info == null) {
+      info = encoder.dequeueOutputBuffer();
+      Thread.sleep(20);
+    }
+    assertTrue(info.index != -1);
+    assertTrue(info.buffer.capacity() > 0);
+    encoder.releaseOutputBuffer(info.index);
+
+    encoder.release();
+    eglOesBase.release();
+  }
 }
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index 29bdbff..3fc1322 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -29,13 +29,14 @@
 #include "talk/app/webrtc/java/jni/androidmediaencoder_jni.h"
 #include "talk/app/webrtc/java/jni/classreferenceholder.h"
 #include "talk/app/webrtc/java/jni/androidmediacodeccommon.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
 #include "webrtc/base/bind.h"
 #include "webrtc/base/checks.h"
 #include "webrtc/base/logging.h"
 #include "webrtc/base/thread.h"
 #include "webrtc/base/thread_checker.h"
 #include "webrtc/modules/rtp_rtcp/source/h264_bitstream_parser.h"
-#include "webrtc/modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "webrtc/modules/video_coding/include/video_codec_interface.h"
 #include "webrtc/modules/video_coding/utility/quality_scaler.h"
 #include "webrtc/modules/video_coding/utility/vp8_header_parser.h"
 #include "webrtc/system_wrappers/include/field_trial.h"
@@ -82,7 +83,8 @@
  public:
   virtual ~MediaCodecVideoEncoder();
   MediaCodecVideoEncoder(JNIEnv* jni,
-                         VideoCodecType codecType);
+                         VideoCodecType codecType,
+                         jobject egl_context);
 
   // webrtc::VideoEncoder implementation.  Everything trampolines to
   // |codec_thread_| for execution.
@@ -106,6 +108,8 @@
 
   int GetTargetFramerate() override;
 
+  bool SupportsNativeHandle() const override { return true; }
+
  private:
   // ResetCodecOnCodecThread() calls ReleaseOnCodecThread() and
   // InitEncodeOnCodecThread() in an attempt to restore the codec to an
@@ -118,15 +122,19 @@
   // If width==0 then this is assumed to be a re-initialization and the
   // previously-current values are reused instead of the passed parameters
   // (makes it easier to reason about thread-safety).
-  int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps);
-  // Reconfigure to match |frame| in width, height. Returns false if
-  // reconfiguring fails.
+  int32_t InitEncodeOnCodecThread(int width, int height, int kbps, int fps,
+      bool use_surface);
+  // Reconfigure to match |frame| in width, height. Also reconfigures the
+  // encoder if |frame| is a texture/byte buffer and the encoder is initialized
+  // for byte buffer/texture. Returns false if reconfiguring fails.
   bool MaybeReconfigureEncoderOnCodecThread(const webrtc::VideoFrame& frame);
   int32_t EncodeOnCodecThread(
       const webrtc::VideoFrame& input_image,
       const std::vector<webrtc::FrameType>* frame_types);
   bool EncodeByteBufferOnCodecThread(JNIEnv* jni,
       bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index);
+  bool EncodeTextureOnCodecThread(JNIEnv* jni,
+      bool key_frame, const webrtc::VideoFrame& frame);
 
   int32_t RegisterEncodeCompleteCallbackOnCodecThread(
       webrtc::EncodedImageCallback* callback);
@@ -164,6 +172,7 @@
   jmethodID j_get_input_buffers_method_;
   jmethodID j_dequeue_input_buffer_method_;
   jmethodID j_encode_buffer_method_;
+  jmethodID j_encode_texture_method_;
   jmethodID j_release_method_;
   jmethodID j_set_rates_method_;
   jmethodID j_dequeue_output_buffer_method_;
@@ -179,6 +188,7 @@
   int width_;   // Frame width in pixels.
   int height_;  // Frame height in pixels.
   bool inited_;
+  bool use_surface_;
   uint16_t picture_id_;
   enum libyuv::FourCC encoder_fourcc_;  // Encoder color space format.
   int last_set_bitrate_kbps_;  // Last-requested bitrate in kbps.
@@ -220,6 +230,10 @@
                            // non-flexible VP9 mode.
   uint8_t tl0_pic_idx_;
   size_t gof_idx_;
+
+  // EGL context - owned by factory, should not be allocated/destroyed
+  // by MediaCodecVideoEncoder.
+  jobject egl_context_;
 };
 
 MediaCodecVideoEncoder::~MediaCodecVideoEncoder() {
@@ -228,10 +242,11 @@
 }
 
 MediaCodecVideoEncoder::MediaCodecVideoEncoder(
-    JNIEnv* jni, VideoCodecType codecType) :
+    JNIEnv* jni, VideoCodecType codecType, jobject egl_context) :
     codecType_(codecType),
     callback_(NULL),
     inited_(false),
+    use_surface_(false),
     picture_id_(0),
     codec_thread_(new Thread()),
     j_media_codec_video_encoder_class_(
@@ -243,7 +258,8 @@
                        GetMethodID(jni,
                                    *j_media_codec_video_encoder_class_,
                                    "<init>",
-                                   "()V"))) {
+                                   "()V"))),
+    egl_context_(egl_context) {
   ScopedLocalRefFrame local_ref_frame(jni);
   // It would be nice to avoid spinning up a new thread per MediaCodec, and
   // instead re-use e.g. the PeerConnectionFactory's |worker_thread_|, but bug
@@ -261,7 +277,8 @@
       jni,
       *j_media_codec_video_encoder_class_,
       "initEncode",
-      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;IIII)Z");
+      "(Lorg/webrtc/MediaCodecVideoEncoder$VideoCodecType;"
+      "IIIILjavax/microedition/khronos/egl/EGLContext;)Z");
   j_get_input_buffers_method_ = GetMethodID(
       jni,
       *j_media_codec_video_encoder_class_,
@@ -271,6 +288,9 @@
       jni, *j_media_codec_video_encoder_class_, "dequeueInputBuffer", "()I");
   j_encode_buffer_method_ = GetMethodID(
       jni, *j_media_codec_video_encoder_class_, "encodeBuffer", "(ZIIJ)Z");
+  j_encode_texture_method_ = GetMethodID(
+        jni, *j_media_codec_video_encoder_class_, "encodeTexture",
+        "(ZI[FJ)Z");
   j_release_method_ =
       GetMethodID(jni, *j_media_codec_video_encoder_class_, "release", "()V");
   j_set_rates_method_ = GetMethodID(
@@ -351,7 +371,8 @@
            codec_settings->width,
            codec_settings->height,
            codec_settings->startBitrate,
-           codec_settings->maxFramerate));
+           codec_settings->maxFramerate,
+           false /* use_surface */));
 }
 
 int32_t MediaCodecVideoEncoder::Encode(
@@ -417,8 +438,8 @@
   RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
   ALOGE << "ResetOnCodecThread";
   if (ReleaseOnCodecThread() != WEBRTC_VIDEO_CODEC_OK ||
-      InitEncodeOnCodecThread(width_, height_, 0, 0)
-          != WEBRTC_VIDEO_CODEC_OK) {
+      InitEncodeOnCodecThread(width_, height_, 0, 0, false) !=
+          WEBRTC_VIDEO_CODEC_OK) {
     // TODO(fischman): wouldn't it be nice if there was a way to gracefully
     // degrade to a SW encoder at this point?  There isn't one AFAICT :(
     // https://code.google.com/p/webrtc/issues/detail?id=2920
@@ -428,8 +449,9 @@
 }
 
 int32_t MediaCodecVideoEncoder::InitEncodeOnCodecThread(
-    int width, int height, int kbps, int fps) {
+    int width, int height, int kbps, int fps, bool use_surface) {
   RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface || egl_context_ != nullptr) << "EGL context not set.";
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
 
@@ -465,6 +487,7 @@
   render_times_ms_.clear();
   frame_rtc_times_ms_.clear();
   drop_next_input_frame_ = false;
+  use_surface_ = use_surface;
   picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
   gof_.SetGofInfoVP9(webrtc::TemporalStructureMode::kTemporalStructureMode1);
   tl0_pic_idx_ = static_cast<uint8_t>(rand());
@@ -475,49 +498,52 @@
       jni, "MediaCodecVideoEncoder$VideoCodecType", codecType_);
   const bool encode_status = jni->CallBooleanMethod(
       *j_media_codec_video_encoder_, j_init_encode_method_,
-      j_video_codec_enum, width, height, kbps, fps);
+      j_video_codec_enum, width, height, kbps, fps,
+      (use_surface ? egl_context_ : nullptr));
   if (!encode_status) {
     ALOGE << "Failed to configure encoder.";
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
   CHECK_EXCEPTION(jni);
 
-  jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
-      jni->CallObjectMethod(*j_media_codec_video_encoder_,
-          j_get_input_buffers_method_));
-  CHECK_EXCEPTION(jni);
-  if (IsNull(jni, input_buffers)) {
-    return WEBRTC_VIDEO_CODEC_ERROR;
-  }
-
-  switch (GetIntField(jni, *j_media_codec_video_encoder_,
-      j_color_format_field_)) {
-    case COLOR_FormatYUV420Planar:
-      encoder_fourcc_ = libyuv::FOURCC_YU12;
-      break;
-    case COLOR_FormatYUV420SemiPlanar:
-    case COLOR_QCOM_FormatYUV420SemiPlanar:
-    case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
-      encoder_fourcc_ = libyuv::FOURCC_NV12;
-      break;
-    default:
-      LOG(LS_ERROR) << "Wrong color format.";
-      return WEBRTC_VIDEO_CODEC_ERROR;
-  }
-  size_t num_input_buffers = jni->GetArrayLength(input_buffers);
-  RTC_CHECK(input_buffers_.empty())
-      << "Unexpected double InitEncode without Release";
-  input_buffers_.resize(num_input_buffers);
-  for (size_t i = 0; i < num_input_buffers; ++i) {
-    input_buffers_[i] =
-        jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
-    int64_t yuv_buffer_capacity =
-        jni->GetDirectBufferCapacity(input_buffers_[i]);
+  if (use_surface) {
+    scale_ = false; // TODO(perkj): Implement scaling when using textures.
+  } else {
+    jobjectArray input_buffers = reinterpret_cast<jobjectArray>(
+        jni->CallObjectMethod(*j_media_codec_video_encoder_,
+            j_get_input_buffers_method_));
     CHECK_EXCEPTION(jni);
-    RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
-  }
-  CHECK_EXCEPTION(jni);
+    if (IsNull(jni, input_buffers)) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
 
+    switch (GetIntField(jni, *j_media_codec_video_encoder_,
+        j_color_format_field_)) {
+      case COLOR_FormatYUV420Planar:
+        encoder_fourcc_ = libyuv::FOURCC_YU12;
+        break;
+      case COLOR_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FormatYUV420SemiPlanar:
+      case COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m:
+        encoder_fourcc_ = libyuv::FOURCC_NV12;
+        break;
+      default:
+        LOG(LS_ERROR) << "Wrong color format.";
+        return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    size_t num_input_buffers = jni->GetArrayLength(input_buffers);
+    RTC_CHECK(input_buffers_.empty())
+        << "Unexpected double InitEncode without Release";
+    input_buffers_.resize(num_input_buffers);
+    for (size_t i = 0; i < num_input_buffers; ++i) {
+      input_buffers_[i] =
+          jni->NewGlobalRef(jni->GetObjectArrayElement(input_buffers, i));
+      int64_t yuv_buffer_capacity =
+          jni->GetDirectBufferCapacity(input_buffers_[i]);
+      CHECK_EXCEPTION(jni);
+      RTC_CHECK(yuv_buffer_capacity >= yuv_size_) << "Insufficient capacity";
+    }
+  }
 
   inited_ = true;
   codec_thread_->PostDelayed(kMediaCodecPollMs, this);
@@ -575,18 +601,32 @@
     }
   }
 
-  int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
-      j_dequeue_input_buffer_method_);
-  CHECK_EXCEPTION(jni);
-  if (j_input_buffer_index == -1) {
-    // Video codec falls behind - no input buffer available.
-    ALOGW << "Encoder drop frame - no input buffers available";
-    frames_dropped_++;
-    // Report dropped frame to quality_scaler_.
-    OnDroppedFrame();
-    return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
+  const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
+  bool encode_status = true;
+  if (!input_frame.native_handle()) {
+    int j_input_buffer_index = jni->CallIntMethod(*j_media_codec_video_encoder_,
+        j_dequeue_input_buffer_method_);
+    CHECK_EXCEPTION(jni);
+    if (j_input_buffer_index == -1) {
+      // Video codec falls behind - no input buffer available.
+      ALOGW << "Encoder drop frame - no input buffers available";
+      frames_dropped_++;
+      // Report dropped frame to quality_scaler_.
+      OnDroppedFrame();
+      return WEBRTC_VIDEO_CODEC_OK;  // TODO(fischman): see webrtc bug 2887.
+    }
+    if (j_input_buffer_index == -2) {
+      ResetCodecOnCodecThread();
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    encode_status = EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
+        j_input_buffer_index);
+  } else {
+    encode_status = EncodeTextureOnCodecThread(jni, key_frame, input_frame);
   }
-  if (j_input_buffer_index == -2) {
+
+  if (!encode_status) {
+    ALOGE << "Failed encode frame with timestamp: " << input_frame.timestamp();
     ResetCodecOnCodecThread();
     return WEBRTC_VIDEO_CODEC_ERROR;
   }
@@ -599,15 +639,9 @@
   timestamps_.push_back(input_frame.timestamp());
   render_times_ms_.push_back(input_frame.render_time_ms());
   frame_rtc_times_ms_.push_back(GetCurrentTimeMs());
-
-  const bool key_frame = frame_types->front() != webrtc::kVideoFrameDelta;
-  const bool encode_status =
-      EncodeByteBufferOnCodecThread(jni, key_frame, input_frame,
-          j_input_buffer_index);
-
   current_timestamp_us_ += rtc::kNumMicrosecsPerSec / last_set_fps_;
 
-  if (!encode_status || !DeliverPendingOutputs(jni)) {
+  if (!DeliverPendingOutputs(jni)) {
     ALOGE << "Failed deliver pending outputs.";
     ResetCodecOnCodecThread();
     return WEBRTC_VIDEO_CODEC_ERROR;
@@ -619,9 +653,17 @@
     const webrtc::VideoFrame& frame) {
   RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
 
+  const bool is_texture_frame = frame.native_handle() != nullptr;
+  const bool reconfigure_due_to_format = is_texture_frame != use_surface_;
   const bool reconfigure_due_to_size =
       frame.width() != width_ || frame.height() != height_;
 
+  if (reconfigure_due_to_format) {
+      ALOGD << "Reconfigure encoder due to format change. "
+            << (use_surface_ ?
+                "Reconfiguring to encode from byte buffer." :
+                "Reconfiguring to encode from texture.");
+  }
   if (reconfigure_due_to_size) {
     ALOGD << "Reconfigure encoder due to frame resolution change from "
         << width_ << " x " << height_ << " to " << frame.width() << " x "
@@ -630,18 +672,19 @@
     height_ = frame.height();
   }
 
-  if (!reconfigure_due_to_size)
+  if (!reconfigure_due_to_format && !reconfigure_due_to_size)
     return true;
 
   ReleaseOnCodecThread();
 
-  return InitEncodeOnCodecThread(width_, height_, 0, 0) ==
+  return InitEncodeOnCodecThread(width_, height_, 0, 0 , is_texture_frame) ==
       WEBRTC_VIDEO_CODEC_OK;
 }
 
 bool MediaCodecVideoEncoder::EncodeByteBufferOnCodecThread(JNIEnv* jni,
     bool key_frame, const webrtc::VideoFrame& frame, int input_buffer_index) {
   RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(!use_surface_);
 
   ALOGV("Encoder frame in # %d. TS: %lld. Q: %d",
       frames_received_ - 1, current_timestamp_us_ / 1000, frames_in_queue_);
@@ -668,6 +711,25 @@
   return encode_status;
 }
 
+bool MediaCodecVideoEncoder::EncodeTextureOnCodecThread(JNIEnv* jni,
+    bool key_frame, const webrtc::VideoFrame& frame) {
+  RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
+  RTC_CHECK(use_surface_);
+  NativeHandleImpl* handle =
+      static_cast<NativeHandleImpl*>(frame.native_handle());
+  jfloatArray sampling_matrix = jni->NewFloatArray(16);
+  jni->SetFloatArrayRegion(sampling_matrix, 0, 16, handle->sampling_matrix);
+
+  bool encode_status = jni->CallBooleanMethod(*j_media_codec_video_encoder_,
+                                              j_encode_texture_method_,
+                                              key_frame,
+                                              handle->oes_texture_id,
+                                              sampling_matrix,
+                                              current_timestamp_us_);
+  CHECK_EXCEPTION(jni);
+  return encode_status;
+}
+
 int32_t MediaCodecVideoEncoder::RegisterEncodeCompleteCallbackOnCodecThread(
     webrtc::EncodedImageCallback* callback) {
   RTC_DCHECK(codec_thread_checker_.CalledOnValidThread());
@@ -694,6 +756,7 @@
   CHECK_EXCEPTION(jni);
   rtc::MessageQueueManager::Clear(this);
   inited_ = false;
+  use_surface_ = false;
   ALOGD << "EncoderReleaseOnCodecThread done.";
   return WEBRTC_VIDEO_CODEC_OK;
 }
@@ -991,7 +1054,8 @@
   return scale_ ? quality_scaler_.GetTargetFramerate() : -1;
 }
 
-MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory() {
+MediaCodecVideoEncoderFactory::MediaCodecVideoEncoderFactory()
+  : egl_context_ (nullptr) {
   JNIEnv* jni = AttachCurrentThreadIfNeeded();
   ScopedLocalRefFrame local_ref_frame(jni);
   jclass j_encoder_class = FindClass(jni, "org/webrtc/MediaCodecVideoEncoder");
@@ -1030,6 +1094,33 @@
 
 MediaCodecVideoEncoderFactory::~MediaCodecVideoEncoderFactory() {}
 
+void MediaCodecVideoEncoderFactory::SetEGLContext(
+    JNIEnv* jni, jobject render_egl_context) {
+  ALOGD << "MediaCodecVideoEncoderFactory::SetEGLContext";
+  if (egl_context_) {
+    jni->DeleteGlobalRef(egl_context_);
+    egl_context_ = NULL;
+  }
+  if (!IsNull(jni, render_egl_context)) {
+    egl_context_ = jni->NewGlobalRef(render_egl_context);
+    if (CheckException(jni)) {
+      ALOGE << "error calling NewGlobalRef for EGL Context.";
+      egl_context_ = NULL;
+    } else {
+      jclass j_egl_context_class =
+          FindClass(jni, "javax/microedition/khronos/egl/EGLContext");
+      if (!jni->IsInstanceOf(egl_context_, j_egl_context_class)) {
+        ALOGE << "Wrong EGL Context.";
+        jni->DeleteGlobalRef(egl_context_);
+        egl_context_ = NULL;
+      }
+    }
+  }
+  if (egl_context_ == NULL) {
+    ALOGW << "NULL VideoDecoder EGL context - HW surface encoding is disabled.";
+  }
+}
+
 webrtc::VideoEncoder* MediaCodecVideoEncoderFactory::CreateVideoEncoder(
     VideoCodecType type) {
   if (supported_codecs_.empty()) {
@@ -1041,7 +1132,8 @@
     if (it->type == type) {
       ALOGD << "Create HW video encoder for type " << (int)type <<
           " (" << it->name << ").";
-      return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type);
+      return new MediaCodecVideoEncoder(AttachCurrentThreadIfNeeded(), type,
+          egl_context_);
     }
   }
   ALOGW << "Can not find HW video encoder for type " << (int)type;
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
index ff124aa..8ff8164 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.h
@@ -43,6 +43,8 @@
   MediaCodecVideoEncoderFactory();
   virtual ~MediaCodecVideoEncoderFactory();
 
+  void SetEGLContext(JNIEnv* jni, jobject render_egl_context);
+
   // WebRtcVideoEncoderFactory implementation.
   webrtc::VideoEncoder* CreateVideoEncoder(webrtc::VideoCodecType type)
       override;
@@ -50,6 +52,7 @@
   void DestroyVideoEncoder(webrtc::VideoEncoder* encoder) override;
 
  private:
+  jobject egl_context_;
   // Empty if platform support is lacking, const after ctor returns.
   std::vector<VideoCodec> supported_codecs_;
 };
diff --git a/talk/app/webrtc/java/jni/peerconnection_jni.cc b/talk/app/webrtc/java/jni/peerconnection_jni.cc
index 984227d..c79e3a5 100644
--- a/talk/app/webrtc/java/jni/peerconnection_jni.cc
+++ b/talk/app/webrtc/java/jni/peerconnection_jni.cc
@@ -1292,21 +1292,30 @@
 }
 
 JOW(void, PeerConnectionFactory_nativeSetVideoHwAccelerationOptions)(
-    JNIEnv* jni, jclass, jlong native_factory, jobject render_egl_context) {
+    JNIEnv* jni, jclass, jlong native_factory, jobject local_egl_context,
+    jobject remote_egl_context) {
 #if defined(ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
   OwnedFactoryAndThreads* owned_factory =
       reinterpret_cast<OwnedFactoryAndThreads*>(native_factory);
+
+  MediaCodecVideoEncoderFactory* encoder_factory =
+      static_cast<MediaCodecVideoEncoderFactory*>
+          (owned_factory->encoder_factory());
+  if (encoder_factory) {
+    LOG(LS_INFO) << "Set EGL context for HW encoding.";
+    encoder_factory->SetEGLContext(jni, local_egl_context);
+  }
+
   MediaCodecVideoDecoderFactory* decoder_factory =
       static_cast<MediaCodecVideoDecoderFactory*>
           (owned_factory->decoder_factory());
   if (decoder_factory) {
-    LOG(LS_INFO) << "Set EGL context for HW acceleration.";
-    decoder_factory->SetEGLContext(jni, render_egl_context);
+    LOG(LS_INFO) << "Set EGL context for HW decoding.";
+    decoder_factory->SetEGLContext(jni, remote_egl_context);
   }
 #endif
 }
 
-
 static std::string
 GetJavaEnumName(JNIEnv* jni, const std::string& className, jobject j_enum) {
   jclass enumClass = FindClass(jni, className.c_str());
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 8e8b211..47886ef 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -33,8 +33,10 @@
 import android.media.MediaCodecInfo;
 import android.media.MediaCodecList;
 import android.media.MediaFormat;
+import android.opengl.GLES20;
 import android.os.Build;
 import android.os.Bundle;
+import android.view.Surface;
 
 import org.webrtc.Logging;
 
@@ -43,6 +45,8 @@
 import java.util.List;
 import java.util.concurrent.CountDownLatch;
 
+import javax.microedition.khronos.egl.EGLContext;
+
 // Java-side of peerconnection_jni.cc:MediaCodecVideoEncoder.
 // This class is an implementation detail of the Java PeerConnection API.
 @TargetApi(19)
@@ -73,6 +77,9 @@
   private Thread mediaCodecThread;
   private MediaCodec mediaCodec;
   private ByteBuffer[] outputBuffers;
+  private EglBase eglBase;
+  private Surface inputSurface;
+  private GlRectDrawer drawer;
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
   private static final String VP9_MIME_TYPE = "video/x-vnd.on2.vp9";
   private static final String H264_MIME_TYPE = "video/avc";
@@ -109,6 +116,9 @@
     CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
     COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
   };
+  private static final int[] supportedSurfaceColorList = {
+    CodecCapabilities.COLOR_FormatSurface
+  };
   private VideoCodecType type;
   private int colorFormat;  // Used by native code.
 
@@ -138,7 +148,7 @@
   }
 
   private static EncoderProperties findHwEncoder(
-      String mime, String[] supportedHwCodecPrefixes) {
+      String mime, String[] supportedHwCodecPrefixes, int[] colorList) {
     // MediaCodec.setParameters is missing for JB and below, so bitrate
     // can not be adjusted dynamically.
     if (Build.VERSION.SDK_INT < Build.VERSION_CODES.KITKAT) {
@@ -188,8 +198,7 @@
         Logging.v(TAG, "   Color: 0x" + Integer.toHexString(colorFormat));
       }
 
-      // Check if codec supports either yuv420 or nv12.
-      for (int supportedColorFormat : supportedColorList) {
+      for (int supportedColorFormat : colorList) {
         for (int codecColorFormat : capabilities.colorFormats) {
           if (codecColorFormat == supportedColorFormat) {
             // Found supported HW encoder.
@@ -204,14 +213,30 @@
   }
 
   public static boolean isVp8HwSupported() {
-    return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes) != null;
+    return findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedColorList) != null;
   }
 
   public static boolean isVp9HwSupported() {
-    return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes) != null;
+    return findHwEncoder(VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedColorList) != null;
   }
+
   public static boolean isH264HwSupported() {
-    return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes) != null;
+    return findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedColorList) != null;
+  }
+
+  public static boolean isVp8HwSupportedUsingTextures() {
+    return findHwEncoder(
+        VP8_MIME_TYPE, supportedVp8HwCodecPrefixes, supportedSurfaceColorList) != null;
+  }
+
+  public static boolean isVp9HwSupportedUsingTextures() {
+    return findHwEncoder(
+        VP9_MIME_TYPE, supportedVp9HwCodecPrefixes, supportedSurfaceColorList) != null;
+  }
+
+  public static boolean isH264HwSupportedUsingTextures() {
+    return findHwEncoder(
+        H264_MIME_TYPE, supportedH264HwCodecPrefixes, supportedSurfaceColorList) != null;
   }
 
   private void checkOnMediaCodecThread() {
@@ -244,10 +269,11 @@
     }
   }
 
-  // Returns false if the hardware encoder currently can't be used.
-  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps) {
+  boolean initEncode(VideoCodecType type, int width, int height, int kbps, int fps,
+      EGLContext sharedContext) {
+    final boolean useSurface = sharedContext != null;
     Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
-        ". @ " + kbps + " kbps. Fps: " + fps + ".");
+        ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
 
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
@@ -257,15 +283,18 @@
     int keyFrameIntervalSec = 0;
     if (type == VideoCodecType.VIDEO_CODEC_VP8) {
       mime = VP8_MIME_TYPE;
-      properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes);
+      properties = findHwEncoder(VP8_MIME_TYPE, supportedVp8HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
       keyFrameIntervalSec = 100;
     } else if (type == VideoCodecType.VIDEO_CODEC_VP9) {
       mime = VP9_MIME_TYPE;
-      properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes);
+      properties = findHwEncoder(VP9_MIME_TYPE, supportedH264HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
       keyFrameIntervalSec = 100;
     } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
       mime = H264_MIME_TYPE;
-      properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes);
+      properties = findHwEncoder(H264_MIME_TYPE, supportedH264HwCodecPrefixes,
+          useSurface ? supportedSurfaceColorList : supportedColorList);
       keyFrameIntervalSec = 20;
     }
     if (properties == null) {
@@ -293,6 +322,13 @@
       mediaCodec.configure(
           format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
 
+      if (useSurface) {
+        eglBase = new EglBase(sharedContext, EglBase.ConfigType.RECORDABLE);
+        // Create an input surface and keep a reference since we must release the surface when done.
+        inputSurface = mediaCodec.createInputSurface();
+        eglBase.createSurface(inputSurface);
+        drawer = new GlRectDrawer();
+      }
       mediaCodec.start();
       outputBuffers = mediaCodec.getOutputBuffers();
       Logging.d(TAG, "Output buffers: " + outputBuffers.length);
@@ -335,6 +371,29 @@
     }
   }
 
+  boolean encodeTexture(boolean isKeyframe, int oesTextureId, float[] transformationMatrix,
+      long presentationTimestampUs) {
+    checkOnMediaCodecThread();
+    try {
+      if (isKeyframe) {
+        Logging.d(TAG, "Sync frame request");
+        Bundle b = new Bundle();
+        b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
+        mediaCodec.setParameters(b);
+      }
+      eglBase.makeCurrent();
+      drawer.drawOes(oesTextureId, transformationMatrix);
+      // TODO(perkj): Do we have to call EGLExt.eglPresentationTimeANDROID ?
+      // If not, remove |presentationTimestampUs|.
+      eglBase.swapBuffers();
+      return true;
+    }
+    catch (RuntimeException e) {
+      Logging.e(TAG, "encodeTexture failed", e);
+      return false;
+    }
+  }
+
   void release() {
     Logging.d(TAG, "Java releaseEncoder");
     checkOnMediaCodecThread();
@@ -370,6 +429,18 @@
 
     mediaCodec = null;
     mediaCodecThread = null;
+    if (drawer != null) {
+      drawer.release();
+      drawer = null;
+    }
+    if (eglBase != null) {
+      eglBase.release();
+      eglBase = null;
+    }
+    if (inputSurface != null) {
+      inputSurface.release();
+      inputSurface = null;
+    }
     runningInstance = null;
     Logging.d(TAG, "Java releaseEncoder done");
   }
diff --git a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
index 83999ec..70562c3 100644
--- a/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
+++ b/talk/app/webrtc/java/src/org/webrtc/PeerConnectionFactory.java
@@ -135,8 +135,22 @@
     nativeSetOptions(nativeFactory, options);
   }
 
+  @Deprecated
   public void setVideoHwAccelerationOptions(Object renderEGLContext) {
-    nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext);
+    nativeSetVideoHwAccelerationOptions(nativeFactory, renderEGLContext, renderEGLContext);
+  }
+
+  /** Set the EGL context used by HW Video encoding and decoding.
+   *
+   *
+   * @param localEGLContext   An instance of javax.microedition.khronos.egl.EGLContext.
+   *                          Must be the same as used by VideoCapturerAndroid and any local
+   *                          video renderer.
+   * @param remoteEGLContext  An instance of javax.microedition.khronos.egl.EGLContext.
+   *                          Must be the same as used by any remote video renderer.
+   */
+  public void setVideoHwAccelerationOptions(Object localEGLContext, Object remoteEGLContext) {
+    nativeSetVideoHwAccelerationOptions(nativeFactory, localEGLContext, remoteEGLContext);
   }
 
   public void dispose() {
@@ -204,7 +218,7 @@
   public native void nativeSetOptions(long nativeFactory, Options options);
 
   private static native void nativeSetVideoHwAccelerationOptions(
-      long nativeFactory, Object renderEGLContext);
+      long nativeFactory, Object localEGLContext, Object remoteEGLContext);
 
   private static native void nativeThreadsCallbacks(long nativeFactory);