Android: Refactor renderers to allow apps to inject custom shaders

This CL:
 * Abstracts the functions in GlRectDrawer to an interface.
 * Adds viewport location as argument to the draw() functions, because this information may be needed by some shaders. This also moves the responsibility of calling GLES20.glViewport() to the drawer.
 * Moves uploadYuvData() into a separate helper class.
 * Adds new SurfaceViewRenderer.init() function and new VideoRendererGui.create() function that takes a custom drawer as argument. Each YuvImageRenderer in VideoRendererGui now has their own drawer instead of a common one.

BUG=b/25694445
R=nisse@webrtc.org, perkj@webrtc.org

Review URL: https://codereview.webrtc.org/1520243003 .

Cr-Commit-Position: refs/heads/master@{#11031}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
index 005071b..63c05fb 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -115,7 +115,7 @@
 
     // Draw the RGB frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -162,7 +162,7 @@
 
     // Draw the YUV frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix());
+    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -250,7 +250,7 @@
         GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
             HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
         // Draw the RGB data onto the SurfaceTexture.
-        drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
+        drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix(), 0, 0, WIDTH, HEIGHT);
         eglBase.swapBuffers();
       }
 
@@ -288,7 +288,7 @@
     // Draw the OES texture on the pixel buffer.
     eglBase.makeCurrent();
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+    drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, WIDTH, HEIGHT);
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
index 1a6731b..9e0164d 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/SurfaceTextureHelperTest.java
@@ -149,7 +149,7 @@
       // Wait for an OES texture to arrive and draw it onto the pixel buffer.
       listener.waitForNewFrame();
       eglBase.makeCurrent();
-      drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+      drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
 
       surfaceTextureHelper.returnTextureFrame();
 
@@ -220,7 +220,7 @@
     // Draw the pending texture frame onto the pixel buffer.
     eglBase.makeCurrent();
     final GlRectDrawer drawer = new GlRectDrawer();
-    drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
+    drawer.drawOes(listener.oesTextureId, listener.transformMatrix, 0, 0, width, height);
     drawer.release();
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
diff --git a/talk/app/webrtc/java/android/org/webrtc/EglBase.java b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
index d502138..035645b 100644
--- a/talk/app/webrtc/java/android/org/webrtc/EglBase.java
+++ b/talk/app/webrtc/java/android/org/webrtc/EglBase.java
@@ -57,6 +57,14 @@
     EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
     EGL10.EGL_NONE
   };
+  public static final int[] CONFIG_RGBA = {
+    EGL10.EGL_RED_SIZE, 8,
+    EGL10.EGL_GREEN_SIZE, 8,
+    EGL10.EGL_BLUE_SIZE, 8,
+    EGL10.EGL_ALPHA_SIZE, 8,
+    EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+    EGL10.EGL_NONE
+  };
   public static final int[] CONFIG_PIXEL_BUFFER = {
     EGL10.EGL_RED_SIZE, 8,
     EGL10.EGL_GREEN_SIZE, 8,
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
index 2cb8af7..6d3d5d2 100644
--- a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
@@ -40,13 +40,13 @@
 import java.util.Map;
 
 /**
- * Helper class to draw a quad that covers the entire viewport. Rotation, mirror, and cropping is
- * specified using a 4x4 texture coordinate transform matrix. The frame input can either be an OES
- * texture or YUV textures in I420 format. The GL state must be preserved between draw calls, this
- * is intentional to maximize performance. The function release() must be called manually to free
- * the resources held by this object.
+ * Helper class to draw an opaque quad on the target viewport location. Rotation, mirror, and
+ * cropping is specified using a 4x4 texture coordinate transform matrix. The frame input can either
+ * be an OES texture or YUV textures in I420 format. The GL state must be preserved between draw
+ * calls, this is intentional to maximize performance. The function release() must be called
+ * manually to free the resources held by this object.
  */
-public class GlRectDrawer {
+public class GlRectDrawer implements RendererCommon.GlDrawer {
   // Simple vertex shader, used for both YUV and OES.
   private static final String VERTEX_SHADER_STRING =
         "varying vec2 interp_tc;\n"
@@ -118,67 +118,31 @@
             1.0f, 1.0f   // Top right.
           });
 
-  // The keys are one of the fragments shaders above.
-  private final Map<String, GlShader> shaders = new IdentityHashMap<String, GlShader>();
-  private GlShader currentShader;
-  private float[] currentTexMatrix;
-  private int texMatrixLocation;
-  // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
-  // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader that
-  // handles stride and compare performance with intermediate copy.
-  private ByteBuffer copyBuffer;
+  private static class Shader {
+    public final GlShader glShader;
+    public final int texMatrixLocation;
 
-  /**
-   * Upload |planes| into |outputYuvTextures|, taking stride into consideration. |outputYuvTextures|
-   * must have been generated in advance.
-   */
-  public void uploadYuvData(
-      int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
-    // Make a first pass to see if we need a temporary copy buffer.
-    int copyCapacityNeeded = 0;
-    for (int i = 0; i < 3; ++i) {
-      final int planeWidth = (i == 0) ? width : width / 2;
-      final int planeHeight = (i == 0) ? height : height / 2;
-      if (strides[i] > planeWidth) {
-        copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidth * planeHeight);
-      }
-    }
-    // Allocate copy buffer if necessary.
-    if (copyCapacityNeeded > 0
-        && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
-      copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
-    }
-    // Upload each plane.
-    for (int i = 0; i < 3; ++i) {
-      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
-      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
-      final int planeWidth = (i == 0) ? width : width / 2;
-      final int planeHeight = (i == 0) ? height : height / 2;
-      // GLES only accepts packed data, i.e. stride == planeWidth.
-      final ByteBuffer packedByteBuffer;
-      if (strides[i] == planeWidth) {
-        // Input is packed already.
-        packedByteBuffer = planes[i];
-      } else {
-        VideoRenderer.nativeCopyPlane(
-            planes[i], planeWidth, planeHeight, strides[i], copyBuffer, planeWidth);
-        packedByteBuffer = copyBuffer;
-      }
-      GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidth, planeHeight, 0,
-          GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+    public Shader(String fragmentShader) {
+      this.glShader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+      this.texMatrixLocation = glShader.getUniformLocation("texMatrix");
     }
   }
 
+  // The keys are one of the fragments shaders above.
+  private final Map<String, Shader> shaders = new IdentityHashMap<String, Shader>();
+
   /**
    * Draw an OES texture frame with specified texture transformation matrix. Required resources are
    * allocated at the first call to this function.
    */
-  public void drawOes(int oesTextureId, float[] texMatrix) {
-    prepareShader(OES_FRAGMENT_SHADER_STRING);
+  @Override
+  public void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(OES_FRAGMENT_SHADER_STRING, texMatrix);
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
     // updateTexImage() may be called from another thread in another EGL context, so we need to
     // bind/unbind the texture in each draw call so that GLES understads it's a new texture.
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, oesTextureId);
-    drawRectangle(texMatrix);
+    drawRectangle(x, y, width, height);
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
   }
 
@@ -186,10 +150,12 @@
    * Draw a RGB(A) texture frame with specified texture transformation matrix. Required resources
    * are allocated at the first call to this function.
    */
-  public void drawRgb(int textureId, float[] texMatrix) {
-    prepareShader(RGB_FRAGMENT_SHADER_STRING);
+  @Override
+  public void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(RGB_FRAGMENT_SHADER_STRING, texMatrix);
+    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
     GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
-    drawRectangle(texMatrix);
+    drawRectangle(x, y, width, height);
     // Unbind the texture as a precaution.
     GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, 0);
   }
@@ -198,14 +164,15 @@
    * Draw a YUV frame with specified texture transformation matrix. Required resources are
    * allocated at the first call to this function.
    */
-  public void drawYuv(int[] yuvTextures, float[] texMatrix) {
-    prepareShader(YUV_FRAGMENT_SHADER_STRING);
+  @Override
+  public void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height) {
+    prepareShader(YUV_FRAGMENT_SHADER_STRING, texMatrix);
     // Bind the textures.
     for (int i = 0; i < 3; ++i) {
       GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
       GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, yuvTextures[i]);
     }
-    drawRectangle(texMatrix);
+    drawRectangle(x, y, width, height);
     // Unbind the textures as a precaution..
     for (int i = 0; i < 3; ++i) {
       GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
@@ -213,60 +180,51 @@
     }
   }
 
-  private void drawRectangle(float[] texMatrix) {
-    // Try avoid uploading the texture if possible.
-    if (!Arrays.equals(currentTexMatrix, texMatrix)) {
-      currentTexMatrix = texMatrix.clone();
-      // Copy the texture transformation matrix over.
-      GLES20.glUniformMatrix4fv(texMatrixLocation, 1, false, texMatrix, 0);
-    }
+  private void drawRectangle(int x, int y, int width, int height) {
     // Draw quad.
+    GLES20.glViewport(x, y, width, height);
     GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
   }
 
-  private void prepareShader(String fragmentShader) {
-    // Lazy allocation.
-    if (!shaders.containsKey(fragmentShader)) {
-      final GlShader shader = new GlShader(VERTEX_SHADER_STRING, fragmentShader);
+  private void prepareShader(String fragmentShader, float[] texMatrix) {
+    final Shader shader;
+    if (shaders.containsKey(fragmentShader)) {
+      shader = shaders.get(fragmentShader);
+    } else {
+      // Lazy allocation.
+      shader = new Shader(fragmentShader);
       shaders.put(fragmentShader, shader);
-      shader.useProgram();
+      shader.glShader.useProgram();
       // Initialize fragment shader uniform values.
       if (fragmentShader == YUV_FRAGMENT_SHADER_STRING) {
-        GLES20.glUniform1i(shader.getUniformLocation("y_tex"), 0);
-        GLES20.glUniform1i(shader.getUniformLocation("u_tex"), 1);
-        GLES20.glUniform1i(shader.getUniformLocation("v_tex"), 2);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("y_tex"), 0);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("u_tex"), 1);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("v_tex"), 2);
       } else if (fragmentShader == RGB_FRAGMENT_SHADER_STRING) {
-        GLES20.glUniform1i(shader.getUniformLocation("rgb_tex"), 0);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("rgb_tex"), 0);
       } else if (fragmentShader == OES_FRAGMENT_SHADER_STRING) {
-        GLES20.glUniform1i(shader.getUniformLocation("oes_tex"), 0);
+        GLES20.glUniform1i(shader.glShader.getUniformLocation("oes_tex"), 0);
       } else {
         throw new IllegalStateException("Unknown fragment shader: " + fragmentShader);
       }
       GlUtil.checkNoGLES2Error("Initialize fragment shader uniform values.");
       // Initialize vertex shader attributes.
-      shader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
-      shader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
+      shader.glShader.setVertexAttribArray("in_pos", 2, FULL_RECTANGLE_BUF);
+      shader.glShader.setVertexAttribArray("in_tc", 2, FULL_RECTANGLE_TEX_BUF);
     }
-
-    // Update GLES state if shader is not already current.
-    final GlShader shader = shaders.get(fragmentShader);
-    if (currentShader != shader) {
-      currentShader = shader;
-      shader.useProgram();
-      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
-      currentTexMatrix = null;
-      texMatrixLocation = shader.getUniformLocation("texMatrix");
-    }
+    shader.glShader.useProgram();
+    // Copy the texture transformation matrix over.
+    GLES20.glUniformMatrix4fv(shader.texMatrixLocation, 1, false, texMatrix, 0);
   }
 
   /**
    * Release all GLES resources. This needs to be done manually, otherwise the resources are leaked.
    */
+  @Override
   public void release() {
-    for (GlShader shader : shaders.values()) {
-      shader.release();
+    for (Shader shader : shaders.values()) {
+      shader.glShader.release();
     }
     shaders.clear();
-    copyBuffer = null;
   }
 }
diff --git a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
index 22bb327..5ada4cc 100644
--- a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
+++ b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
@@ -28,8 +28,11 @@
 package org.webrtc;
 
 import android.graphics.Point;
+import android.opengl.GLES20;
 import android.opengl.Matrix;
 
+import java.nio.ByteBuffer;
+
 /**
  * Static helper functions for renderer implementations.
  */
@@ -47,6 +50,73 @@
     public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation);
   }
 
+  /** Interface for rendering frames on an EGLSurface. */
+  public static interface GlDrawer {
+    /**
+     * Functions for drawing frames with different sources. The rendering surface target is
+     * implied by the current EGL context of the calling thread and requires no explicit argument.
+     * The coordinates specify the viewport location on the surface target.
+     */
+    void drawOes(int oesTextureId, float[] texMatrix, int x, int y, int width, int height);
+    void drawRgb(int textureId, float[] texMatrix, int x, int y, int width, int height);
+    void drawYuv(int[] yuvTextures, float[] texMatrix, int x, int y, int width, int height);
+
+    /**
+     * Release all GL resources. This needs to be done manually, otherwise resources may leak.
+     */
+    void release();
+  }
+
+  /**
+   * Helper class for uploading YUV bytebuffer frames to textures that handles stride > width. This
+   * class keeps an internal ByteBuffer to avoid unnecessary allocations for intermediate copies.
+   */
+  public static class YuvUploader {
+    // Intermediate copy buffer for uploading yuv frames that are not packed, i.e. stride > width.
+    // TODO(magjed): Investigate when GL_UNPACK_ROW_LENGTH is available, or make a custom shader
+    // that handles stride and compare performance with intermediate copy.
+    private ByteBuffer copyBuffer;
+
+    /**
+     * Upload |planes| into |outputYuvTextures|, taking stride into consideration.
+     * |outputYuvTextures| must have been generated in advance.
+     */
+    public void uploadYuvData(
+        int[] outputYuvTextures, int width, int height, int[] strides, ByteBuffer[] planes) {
+      final int[] planeWidths = new int[] {width, width / 2, width / 2};
+      final int[] planeHeights = new int[] {height, height / 2, height / 2};
+      // Make a first pass to see if we need a temporary copy buffer.
+      int copyCapacityNeeded = 0;
+      for (int i = 0; i < 3; ++i) {
+        if (strides[i] > planeWidths[i]) {
+          copyCapacityNeeded = Math.max(copyCapacityNeeded, planeWidths[i] * planeHeights[i]);
+        }
+      }
+      // Allocate copy buffer if necessary.
+      if (copyCapacityNeeded > 0
+          && (copyBuffer == null || copyBuffer.capacity() < copyCapacityNeeded)) {
+        copyBuffer = ByteBuffer.allocateDirect(copyCapacityNeeded);
+      }
+      // Upload each plane.
+      for (int i = 0; i < 3; ++i) {
+        GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i);
+        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, outputYuvTextures[i]);
+        // GLES only accepts packed data, i.e. stride == planeWidth.
+        final ByteBuffer packedByteBuffer;
+        if (strides[i] == planeWidths[i]) {
+          // Input is packed already.
+          packedByteBuffer = planes[i];
+        } else {
+          VideoRenderer.nativeCopyPlane(
+              planes[i], planeWidths[i], planeHeights[i], strides[i], copyBuffer, planeWidths[i]);
+          packedByteBuffer = copyBuffer;
+        }
+        GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, planeWidths[i],
+            planeHeights[i], 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, packedByteBuffer);
+      }
+    }
+  }
+
   // Types of video scaling:
   // SCALE_ASPECT_FIT - video frame is scaled to fit the size of the view by
   //    maintaining the aspect ratio (black borders may be displayed).
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index 452b3e3..fa199b3 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -66,7 +66,8 @@
   // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed
   // from the render thread.
   private EglBase eglBase;
-  private GlRectDrawer drawer;
+  private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+  private RendererCommon.GlDrawer drawer;
   // Texture ids for YUV frames. Allocated on first arrival of a YUV frame.
   private int[] yuvTextures = null;
 
@@ -154,16 +155,27 @@
    */
   public void init(
       EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) {
+    init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer());
+  }
+
+  /**
+   * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used
+   * for drawing frames on the EGLSurface. This class is responsible for calling release() on
+   * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous
+   * init()/release() cycle.
+   */
+  public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents,
+      int[] configAttributes, RendererCommon.GlDrawer drawer) {
     synchronized (handlerLock) {
       if (renderThreadHandler != null) {
         throw new IllegalStateException(getResourceName() + "Already initialized");
       }
       Logging.d(TAG, getResourceName() + "Initializing.");
       this.rendererEvents = rendererEvents;
+      this.drawer = drawer;
       renderThread = new HandlerThread(TAG);
       renderThread.start();
-      drawer = new GlRectDrawer();
-      eglBase = EglBase.create(sharedContext, EglBase.CONFIG_PLAIN);
+      eglBase = EglBase.create(sharedContext, configAttributes);
       renderThreadHandler = new Handler(renderThread.getLooper());
     }
     tryCreateEglSurface();
@@ -481,7 +493,6 @@
       texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
     }
 
-    GLES20.glViewport(0, 0, surfaceSize.x, surfaceSize.y);
     // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's
     // a workaround for bug 5147. Performance will be slightly worse.
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
@@ -493,11 +504,11 @@
           yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
         }
       }
-      drawer.uploadYuvData(
+      yuvUploader.uploadYuvData(
           yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes);
-      drawer.drawYuv(yuvTextures, texMatrix);
+      drawer.drawYuv(yuvTextures, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
     } else {
-      drawer.drawOes(frame.textureId, texMatrix);
+      drawer.drawOes(frame.textureId, texMatrix, 0, 0, surfaceSize.x, surfaceSize.y);
     }
 
     eglBase.swapBuffers();
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index 93d4c04..6eb9c2a 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -68,8 +68,6 @@
   private int screenHeight;
   // List of yuv renderers.
   private final ArrayList<YuvImageRenderer> yuvImageRenderers;
-  // |drawer| is synchronized on |yuvImageRenderers|.
-  private GlRectDrawer drawer;
   // Render and draw threads.
   private static Thread renderFrameThread;
   private static Thread drawThread;
@@ -98,6 +96,8 @@
     // currently leaking resources to avoid a rare crash in release() where the EGLContext has
     // become invalid beforehand.
     private int[] yuvTextures = { 0, 0, 0 };
+    private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader();
+    private final RendererCommon.GlDrawer drawer;
     // Resources for making a deep copy of incoming OES texture frame.
     private GlTextureFrameBuffer textureCopy;
 
@@ -156,12 +156,13 @@
     private YuvImageRenderer(
         GLSurfaceView surface, int id,
         int x, int y, int width, int height,
-        RendererCommon.ScalingType scalingType, boolean mirror) {
+        RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
       Logging.d(TAG, "YuvImageRenderer.Create id: " + id);
       this.surface = surface;
       this.id = id;
       this.scalingType = scalingType;
       this.mirror = mirror;
+      this.drawer = drawer;
       layoutInPercentage = new Rect(x, y, Math.min(100, x + width), Math.min(100, y + height));
       updateLayoutProperties = false;
       rotationDegree = 0;
@@ -173,6 +174,7 @@
 
     private synchronized void release() {
       surface = null;
+      drawer.release();
       synchronized (pendingFrameLock) {
         if (pendingFrame != null) {
           VideoRenderer.renderFrameDone(pendingFrame);
@@ -225,7 +227,7 @@
       }
     }
 
-    private void draw(GlRectDrawer drawer) {
+    private void draw() {
       if (!seenFrame) {
         // No frame received yet - nothing to render.
         return;
@@ -244,7 +246,7 @@
               pendingFrame.samplingMatrix, pendingFrame.rotationDegree);
           if (pendingFrame.yuvFrame) {
             rendererType = RendererType.RENDERER_YUV;
-            drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
+            yuvUploader.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
                 pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
           } else {
             rendererType = RendererType.RENDERER_TEXTURE;
@@ -257,8 +259,8 @@
             GlUtil.checkNoGLES2Error("glBindFramebuffer");
 
             // Copy the OES texture content. This will also normalize the sampling matrix.
-             GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
-             drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
+             drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix,
+                 0, 0, textureCopy.getWidth(), textureCopy.getHeight());
              rotatedSamplingMatrix = RendererCommon.identityMatrix();
 
              // Restore normal framebuffer.
@@ -271,17 +273,17 @@
         }
       }
 
-      // OpenGL defaults to lower left origin - flip vertically.
-      GLES20.glViewport(displayLayout.left, screenHeight - displayLayout.bottom,
-                        displayLayout.width(), displayLayout.height());
-
       updateLayoutMatrix();
       final float[] texMatrix =
           RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
+      // OpenGL defaults to lower left origin - flip viewport position vertically.
+      final int viewportY = screenHeight - displayLayout.bottom;
       if (rendererType == RendererType.RENDERER_YUV) {
-        drawer.drawYuv(yuvTextures, texMatrix);
+        drawer.drawYuv(yuvTextures, texMatrix,
+            displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
       } else {
-        drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
+        drawer.drawRgb(textureCopy.getTextureId(), texMatrix,
+            displayLayout.left, viewportY, displayLayout.width(), displayLayout.height());
       }
 
       if (isNewFrame) {
@@ -463,6 +465,16 @@
    */
   public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
       RendererCommon.ScalingType scalingType, boolean mirror) {
+    return create(x, y, width, height, scalingType, mirror, new GlRectDrawer());
+  }
+
+  /**
+   * Creates VideoRenderer.Callbacks with top left corner at (x, y) and resolution (width, height).
+   * All parameters are in percentage of screen resolution. The custom |drawer| will be used for
+   * drawing frames on the EGLSurface. This class is responsible for calling release() on |drawer|.
+   */
+  public static synchronized YuvImageRenderer create(int x, int y, int width, int height,
+      RendererCommon.ScalingType scalingType, boolean mirror, RendererCommon.GlDrawer drawer) {
     // Check display region parameters.
     if (x < 0 || x > 100 || y < 0 || y > 100 ||
         width < 0 || width > 100 || height < 0 || height > 100 ||
@@ -476,7 +488,7 @@
     }
     final YuvImageRenderer yuvImageRenderer = new YuvImageRenderer(
         instance.surface, instance.yuvImageRenderers.size(),
-        x, y, width, height, scalingType, mirror);
+        x, y, width, height, scalingType, mirror, drawer);
     synchronized (instance.yuvImageRenderers) {
       if (instance.onSurfaceCreatedCalled) {
         // onSurfaceCreated has already been called for VideoRendererGui -
@@ -600,8 +612,6 @@
     }
 
     synchronized (yuvImageRenderers) {
-      // Create drawer for YUV/OES frames.
-      drawer = new GlRectDrawer();
       // Create textures for all images.
       for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
         yuvImageRenderer.createTextures();
@@ -642,7 +652,7 @@
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
     synchronized (yuvImageRenderers) {
       for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
-        yuvImageRenderer.draw(drawer);
+        yuvImageRenderer.draw();
       }
     }
   }
diff --git a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
index 347a150..54d40cd 100644
--- a/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
+++ b/talk/app/webrtc/java/src/org/webrtc/MediaCodecVideoEncoder.java
@@ -76,6 +76,8 @@
   private MediaCodec mediaCodec;
   private ByteBuffer[] outputBuffers;
   private EglBase eglBase;
+  private int width;
+  private int height;
   private Surface inputSurface;
   private GlRectDrawer drawer;
   private static final String VP8_MIME_TYPE = "video/x-vnd.on2.vp8";
@@ -273,6 +275,8 @@
     Logging.d(TAG, "Java initEncode: " + type + " : " + width + " x " + height +
         ". @ " + kbps + " kbps. Fps: " + fps + ". Encode from texture : " + useSurface);
 
+    this.width = width;
+    this.height = height;
     if (mediaCodecThread != null) {
       throw new RuntimeException("Forgot to release()?");
     }
@@ -383,7 +387,7 @@
       // TODO(perkj): glClear() shouldn't be necessary since every pixel is covered anyway,
       // but it's a workaround for bug webrtc:5147.
       GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
-      drawer.drawOes(oesTextureId, transformationMatrix);
+      drawer.drawOes(oesTextureId, transformationMatrix, 0, 0, width, height);
       // TODO(perkj): Do we have to call EGLExt.eglPresentationTimeANDROID ?
       // If not, remove |presentationTimestampUs|.
       eglBase.swapBuffers();