Android RendererCommon: Refactor getSamplingMatrix()

This CL refactors RendererCommon.getSamplingMatrix() so it does not have any dependecy to SurfaceTeture. The purpose is to prepare for a change in how texture frames are represented - only the texture matrix will be exposed, not the SurfaceTexture itself. This CL also adds an extra test for RendererCommon.rotateTextureMatrix().

R=hbos@webrtc.org

Review URL: https://codereview.webrtc.org/1375593002 .

Cr-Commit-Position: refs/heads/master@{#10118}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
index 99f88a3..6fef81b 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/GlRectDrawerTest.java
@@ -119,9 +119,7 @@
 
     // Draw the RGB frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    final float[] identityMatrix = new float[16];
-    Matrix.setIdentityM(identityMatrix, 0);
-    drawer.drawRgb(rgbTexture, identityMatrix);
+    drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer rgbaData = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -168,9 +166,7 @@
 
     // Draw the YUV frame onto the pixel buffer.
     final GlRectDrawer drawer = new GlRectDrawer();
-    final float[] texMatrix = new float[16];
-    Matrix.setIdentityM(texMatrix, 0);
-    drawer.drawYuv(yuvTextures, texMatrix);
+    drawer.drawYuv(yuvTextures, RendererCommon.identityMatrix());
 
     // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
     final ByteBuffer data = ByteBuffer.allocateDirect(WIDTH * HEIGHT * 4);
@@ -257,9 +253,7 @@
         GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, WIDTH,
             HEIGHT, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, rgbPlane);
         // Draw the RGB data onto the SurfaceTexture.
-        final float[] identityMatrix = new float[16];
-        Matrix.setIdentityM(identityMatrix, 0);
-        drawer.drawRgb(rgbTexture, identityMatrix);
+        drawer.drawRgb(rgbTexture, RendererCommon.identityMatrix());
         eglBase.swapBuffers();
       }
 
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java
index cd8bfcb..cc73fa5 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/RendererCommonTest.java
@@ -36,64 +36,64 @@
 import static org.webrtc.RendererCommon.ScalingType.*;
 import static org.webrtc.RendererCommon.getDisplaySize;
 import static org.webrtc.RendererCommon.getLayoutMatrix;
-import static org.webrtc.RendererCommon.getSamplingMatrix;
+import static org.webrtc.RendererCommon.rotateTextureMatrix;
 
-public class RendererCommonTest extends ActivityTestCase {
+public final class RendererCommonTest extends ActivityTestCase {
   @SmallTest
   static public void testDisplaySizeNoFrame() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0), new Point(0, 0));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0), new Point(0, 0));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0), new Point(0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 0, 0));
   }
 
   @SmallTest
-  static public void testDisplaySizeDegenerateAspectRatio() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720), new Point(1280, 720));
+  public static void testDisplaySizeDegenerateAspectRatio() {
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 0.0f, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 0.0f, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 0.0f, 1280, 720));
   }
 
   @SmallTest
-  static public void testZeroDisplaySize() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0), new Point(0, 0));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0), new Point(0, 0));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0), new Point(0, 0));
+  public static void testZeroDisplaySize() {
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 0, 0));
+    assertEquals(new Point(0, 0), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 0, 0));
   }
 
   @SmallTest
-  static public void testDisplaySizePerfectFit() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280), new Point(720, 1280));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280), new Point(720, 1280));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280), new Point(720, 1280));
+  public static void testDisplaySizePerfectFit() {
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 1280, 720));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 720, 1280));
   }
 
   @SmallTest
-  static public void testLandscapeVideoInPortraitDisplay() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280), new Point(720, 405));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280), new Point(720, 1280));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280), new Point(720, 720));
+  public static void testLandscapeVideoInPortraitDisplay() {
+    assertEquals(new Point(720, 405), getDisplaySize(SCALE_ASPECT_FIT, 16.0f / 9, 720, 1280));
+    assertEquals(new Point(720, 1280), getDisplaySize(SCALE_ASPECT_FILL, 16.0f / 9, 720, 1280));
+    assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 16.0f / 9, 720, 1280));
   }
 
   @SmallTest
-  static public void testPortraitVideoInLandscapeDisplay() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720), new Point(405, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720), new Point(720, 720));
+  public static void testPortraitVideoInLandscapeDisplay() {
+    assertEquals(new Point(405, 720), getDisplaySize(SCALE_ASPECT_FIT, 9.0f / 16, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 9.0f / 16, 1280, 720));
+    assertEquals(new Point(720, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 9.0f / 16, 1280, 720));
   }
 
   @SmallTest
-  static public void testFourToThreeVideoInSixteenToNineDisplay() {
-    assertEquals(getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720), new Point(960, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720), new Point(1280, 720));
-    assertEquals(getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720), new Point(1280, 720));
+  public static void testFourToThreeVideoInSixteenToNineDisplay() {
+    assertEquals(new Point(960, 720), getDisplaySize(SCALE_ASPECT_FIT, 4.0f / 3, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_FILL, 4.0f / 3, 1280, 720));
+    assertEquals(new Point(1280, 720), getDisplaySize(SCALE_ASPECT_BALANCED, 4.0f / 3, 1280, 720));
   }
 
   // Only keep 2 rounded decimals to make float comparison robust.
-  static private double[] round(float[] array) {
-    assertEquals(array.length, 16);
+  private static double[] round(float[] array) {
+    assertEquals(16, array.length);
     final double[] doubleArray = new double[16];
     for (int i = 0; i < 16; ++i) {
       doubleArray[i] = Math.round(100 * array[i]) / 100.0;
@@ -108,69 +108,82 @@
   // v' = u * m[1] + v * m[5] + m[13].
 
   @SmallTest
-  static public void testLayoutMatrixDefault() {
+  public static void testLayoutMatrixDefault() {
     final float layoutMatrix[] = getLayoutMatrix(false, 1.0f, 1.0f);
     // Assert:
     // u' = u.
     // v' = v.
-    MoreAsserts.assertEquals(round(layoutMatrix), new double[]
-        {1, 0, 0, 0,
-         0, 1, 0, 0,
-         0, 0, 1, 0,
-         0, 0, 0, 1});
+    MoreAsserts.assertEquals(new double[] {
+        1, 0, 0, 0,
+        0, 1, 0, 0,
+        0, 0, 1, 0,
+        0, 0, 0, 1}, round(layoutMatrix));
   }
 
   @SmallTest
-  static public void testLayoutMatrixMirror() {
+  public static void testLayoutMatrixMirror() {
     final float layoutMatrix[] = getLayoutMatrix(true, 1.0f, 1.0f);
     // Assert:
     // u' = 1 - u.
     // v' = v.
-    MoreAsserts.assertEquals(round(layoutMatrix), new double[]
-        {-1, 0, 0, 0,
-          0, 1, 0, 0,
-          0, 0, 1, 0,
-          1, 0, 0, 1});
+    MoreAsserts.assertEquals(new double[] {
+        -1, 0, 0, 0,
+         0, 1, 0, 0,
+         0, 0, 1, 0,
+         1, 0, 0, 1}, round(layoutMatrix));
   }
 
   @SmallTest
-  static public void testLayoutMatrixScale() {
+  public static void testLayoutMatrixScale() {
     // Video has aspect ratio 2, but layout is square. This will cause only the center part of the
     // video to be visible, i.e. the u coordinate will go from 0.25 to 0.75 instead of from 0 to 1.
     final float layoutMatrix[] = getLayoutMatrix(false, 2.0f, 1.0f);
     // Assert:
     // u' = 0.25 + 0.5 u.
     // v' = v.
-    MoreAsserts.assertEquals(round(layoutMatrix), new double[]
-        { 0.5, 0, 0, 0,
-            0, 1, 0, 0,
-            0, 0, 1, 0,
-         0.25, 0, 0, 1});
+    MoreAsserts.assertEquals(new double[] {
+         0.5, 0, 0, 0,
+           0, 1, 0, 0,
+           0, 0, 1, 0,
+        0.25, 0, 0, 1}, round(layoutMatrix));
   }
 
   @SmallTest
-  static public void testSamplingMatrixDefault() {
-    final float samplingMatrix[] = getSamplingMatrix(null, 0);
+  public static void testRotateTextureMatrixDefault() {
+    // Test that rotation with 0 degrees returns an identical matrix.
+    final float[] matrix = new float[] {
+        1, 2, 3, 4,
+        5, 6, 7, 8,
+        9, 0, 1, 2,
+        3, 4, 5, 6
+    };
+    final float rotatedMatrix[] = rotateTextureMatrix(matrix, 0);
+    MoreAsserts.assertEquals(round(matrix), round(rotatedMatrix));
+  }
+
+  @SmallTest
+  public static void testRotateTextureMatrix90Deg() {
+    final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 90);
     // Assert:
-    // u' = u.
-    // v' = 1 - v.
-    MoreAsserts.assertEquals(round(samplingMatrix), new double[]
-        {1,  0, 0, 0,
-         0, -1, 0, 0,
-         0,  0, 1, 0,
-         0,  1, 0, 1});
+    // u' = 1 - v.
+    // v' = u.
+    MoreAsserts.assertEquals(new double[] {
+         0, 1, 0, 0,
+        -1, 0, 0, 0,
+         0, 0, 1, 0,
+         1, 0, 0, 1}, round(samplingMatrix));
   }
 
   @SmallTest
-  static public void testSamplingMatrixRotation90Deg() {
-    final float samplingMatrix[] = getSamplingMatrix(null, 90);
+  public static void testRotateTextureMatrix180Deg() {
+    final float samplingMatrix[] = rotateTextureMatrix(RendererCommon.identityMatrix(), 180);
     // Assert:
     // u' = 1 - u.
     // v' = 1 - v.
-    MoreAsserts.assertEquals(round(samplingMatrix), new double[]
-        { 0, -1, 0, 0,
-         -1,  0, 0, 0,
-          0,  0, 1, 0,
-          1,  1, 0, 1});
+    MoreAsserts.assertEquals(new double[] {
+        -1,  0, 0, 0,
+         0, -1, 0, 0,
+         0,  0, 1, 0,
+         1,  1, 0, 1}, round(samplingMatrix));
   }
 }
diff --git a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
index 6b30321..2cb8af7 100644
--- a/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/GlRectDrawer.java
@@ -99,6 +99,8 @@
       + "  gl_FragColor = texture2D(oes_tex, interp_tc);\n"
       + "}\n";
 
+  // Vertex coordinates in Normalized Device Coordinates, i.e. (-1, -1) is bottom-left and (1, 1) is
+  // top-right.
   private static final FloatBuffer FULL_RECTANGLE_BUF =
       GlUtil.createFloatBuffer(new float[] {
             -1.0f, -1.0f,  // Bottom left.
@@ -107,6 +109,7 @@
              1.0f,  1.0f,  // Top right.
           });
 
+  // Texture coordinates - (0, 0) is bottom-left and (1, 1) is top-right.
   private static final FloatBuffer FULL_RECTANGLE_TEX_BUF =
       GlUtil.createFloatBuffer(new float[] {
             0.0f, 0.0f,  // Bottom left.
diff --git a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
index 5195044..fec41c1 100644
--- a/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
+++ b/talk/app/webrtc/java/android/org/webrtc/RendererCommon.java
@@ -28,7 +28,6 @@
 package org.webrtc;
 
 import android.graphics.Point;
-import android.graphics.SurfaceTexture;
 import android.opengl.Matrix;
 
 /**
@@ -61,37 +60,40 @@
   // The minimum fraction of the frame content that will be shown for |SCALE_ASPECT_BALANCED|.
   // This limits excessive cropping when adjusting display size.
   private static float BALANCED_VISIBLE_FRACTION = 0.5625f;
+  public static final float[] identityMatrix() {
+    return new float[] {
+        1, 0, 0, 0,
+        0, 1, 0, 0,
+        0, 0, 1, 0,
+        0, 0, 0, 1};
+  }
   // Matrix with transform y' = 1 - y.
-  private static final float[] VERTICAL_FLIP = new float[] {
-      1,  0, 0, 0,
-      0, -1, 0, 0,
-      0,  0, 1, 0,
-      0,  1, 0, 1};
+  public static final float[] verticalFlipMatrix() {
+    return new float[] {
+        1,  0, 0, 0,
+        0, -1, 0, 0,
+        0,  0, 1, 0,
+        0,  1, 0, 1};
+  }
 
   /**
-   * Returns matrix that transforms standard coordinates to their proper sampling locations in
-   * the texture. This transform compensates for any properties of the video source that
-   * cause it to appear different from a normalized texture. If the video source is based on
-   * ByteBuffers, pass null in |surfaceTexture|.
+   * Returns texture matrix that will have the effect of rotating the frame |rotationDegree|
+   * clockwise when rendered.
    */
-  public static float[] getSamplingMatrix(SurfaceTexture surfaceTexture, float rotationDegree) {
-    final float[] samplingMatrix;
-    if (surfaceTexture == null) {
-      // For ByteBuffers, row 0 specifies the top row, but for a texture, row 0 specifies the
-      // bottom row. Flip the image vertically to compensate for this.
-      samplingMatrix = VERTICAL_FLIP;
-    } else {
-      samplingMatrix = new float[16];
-      surfaceTexture.getTransformMatrix(samplingMatrix);
-    }
-    // Clockwise rotation matrix in the XY-plane (around the Z-axis).
+  public static float[] rotateTextureMatrix(float[] textureMatrix, float rotationDegree) {
     final float[] rotationMatrix = new float[16];
     Matrix.setRotateM(rotationMatrix, 0, rotationDegree, 0, 0, 1);
     adjustOrigin(rotationMatrix);
-    // Multiply matrices together.
-    final float[] tmpMatrix = new float[16];
-    Matrix.multiplyMM(tmpMatrix, 0, samplingMatrix, 0, rotationMatrix, 0);
-    return tmpMatrix;
+    return multiplyMatrices(textureMatrix, rotationMatrix);
+  }
+
+  /**
+   * Returns new matrix with the result of a * b.
+   */
+  public static float[] multiplyMatrices(float[] a, float[] b) {
+    final float[] resultMatrix = new float[16];
+    Matrix.multiplyMM(resultMatrix, 0, a, 0, b, 0);
+    return resultMatrix;
   }
 
   /**
diff --git a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
index 7b3b3dd..85afdf7 100644
--- a/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
+++ b/talk/app/webrtc/java/android/org/webrtc/SurfaceViewRenderer.java
@@ -389,19 +389,28 @@
     }
 
     final long startTimeNs = System.nanoTime();
-    if (!frame.yuvFrame) {
+    final float[] samplingMatrix;
+    if (frame.yuvFrame) {
+      // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+      // top-left corner of the image, but in glTexImage2D() the first element corresponds to the
+      // bottom-left corner. We correct this discrepancy by setting a vertical flip as sampling
+      // matrix.
+      samplingMatrix = RendererCommon.verticalFlipMatrix();
+    } else {
       // TODO(magjed): Move updateTexImage() to the video source instead.
       SurfaceTexture surfaceTexture = (SurfaceTexture) frame.textureObject;
       surfaceTexture.updateTexImage();
+      samplingMatrix = new float[16];
+      surfaceTexture.getTransformMatrix(samplingMatrix);
     }
 
-    final float[] texMatrix = new float[16];
+    final float[] texMatrix;
     synchronized (layoutLock) {
-      final float[] samplingMatrix = RendererCommon.getSamplingMatrix(
-          (SurfaceTexture) frame.textureObject, frame.rotationDegree);
+      final float[] rotatedSamplingMatrix =
+          RendererCommon.rotateTextureMatrix(samplingMatrix, frame.rotationDegree);
       final float[] layoutMatrix = RendererCommon.getLayoutMatrix(
           mirror, frameAspectRatio(), (float) layoutWidth / layoutHeight);
-      Matrix.multiplyMM(texMatrix, 0, samplingMatrix, 0, layoutMatrix, 0);
+      texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
     }
 
     GLES20.glViewport(0, 0, surfaceWidth, surfaceHeight);
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
index 9909bff..e15b49f 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoRendererGui.java
@@ -143,7 +143,7 @@
     // |screenHeight|, |videoWidth|, |videoHeight|, |rotationDegree|, |scalingType|, and |mirror|.
     private final Object updateLayoutLock = new Object();
     // Texture sampling matrix.
-    private float[] samplingMatrix;
+    private float[] rotatedSamplingMatrix;
     // Viewport dimensions.
     private int screenWidth;
     private int screenHeight;
@@ -240,24 +240,29 @@
         }
 
         if (isNewFrame) {
+          final float[] samplingMatrix;
           if (pendingFrame.yuvFrame) {
             rendererType = RendererType.RENDERER_YUV;
             drawer.uploadYuvData(yuvTextures, pendingFrame.width, pendingFrame.height,
                 pendingFrame.yuvStrides, pendingFrame.yuvPlanes);
+            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
+            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
+            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
+            // sampling matrix.
+            samplingMatrix = RendererCommon.verticalFlipMatrix();
           } else {
             rendererType = RendererType.RENDERER_TEXTURE;
             // External texture rendering. Copy texture id and update texture image to latest.
             // TODO(magjed): We should not make an unmanaged copy of texture id. Also, this is not
             // the best place to call updateTexImage.
             oesTexture = pendingFrame.textureId;
-            if (pendingFrame.textureObject instanceof SurfaceTexture) {
-              SurfaceTexture surfaceTexture =
-                  (SurfaceTexture) pendingFrame.textureObject;
-              surfaceTexture.updateTexImage();
-            }
+            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
+            surfaceTexture.updateTexImage();
+            samplingMatrix = new float[16];
+            surfaceTexture.getTransformMatrix(samplingMatrix);
           }
-          samplingMatrix = RendererCommon.getSamplingMatrix(
-              (SurfaceTexture) pendingFrame.textureObject, pendingFrame.rotationDegree);
+          rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(
+              samplingMatrix, pendingFrame.rotationDegree);
           copyTimeNs += (System.nanoTime() - now);
           VideoRenderer.renderFrameDone(pendingFrame);
           pendingFrame = null;
@@ -265,8 +270,8 @@
       }
 
       updateLayoutMatrix();
-      final float[] texMatrix = new float[16];
-      Matrix.multiplyMM(texMatrix, 0, samplingMatrix, 0, layoutMatrix, 0);
+      final float[] texMatrix =
+          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
       if (rendererType == RendererType.RENDERER_YUV) {
         drawer.drawYuv(yuvTextures, texMatrix);
       } else {