This cl change VideoCaptureAndroid to handle CVO the same way when capturing to texture as  when using ordinary byte buffers.

Ie, rotation is applied in C++ in the VideoFrameFactory is  apply_rotation_ is set. If not, rotation is sent in RTP.

BUG=webrtc:4993
R=nisse@chromium.org

Review URL: https://codereview.webrtc.org/1493913007 .

Cr-Commit-Position: refs/heads/master@{#10986}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
index 0331250..5ac9077 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -132,7 +132,8 @@
     }
     @Override
     public void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, long timeStamp) {
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timeStamp) {
       synchronized (frameLock) {
         ++framesCaptured;
         frameWidth = width;
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index c4c5a48..d8f1217 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -102,12 +102,17 @@
       int output_width,
       int output_height) const override {
     if (buffer_->native_handle() != nullptr) {
+      // TODO(perkj) Implement cropping.
+      RTC_CHECK_EQ(cropped_input_width, buffer_->width());
+      RTC_CHECK_EQ(cropped_input_height, buffer_->height());
       rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
           static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
-              ->CropAndScale(cropped_input_width, cropped_input_height,
-                             output_width, output_height));
+              ->ScaleAndRotate(output_width, output_height,
+                               apply_rotation_ ? input_frame->rotation :
+                                   webrtc::kVideoRotation_0));
       return new cricket::WebRtcVideoFrame(
-          scaled_buffer, input_frame->time_stamp, input_frame->rotation);
+          scaled_buffer, input_frame->time_stamp,
+          apply_rotation_ ? webrtc::kVideoRotation_0 : input_frame->rotation);
     }
     return VideoFrameFactory::CreateAliasedFrame(input_frame,
                                                  cropped_input_width,
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
index 9a3b5ca..d85f2c5 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -762,13 +762,10 @@
       transformMatrix =
           RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
     }
-    transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, rotation);
-
-    final int rotatedWidth = (rotation % 180 == 0) ? captureFormat.width : captureFormat.height;
-    final int rotatedHeight = (rotation % 180 == 0) ? captureFormat.height : captureFormat.width;
     cameraStatistics.addPendingFrame(timestampNs);
-    frameObserver.onTextureFrameCaptured(rotatedWidth, rotatedHeight, oesTextureId,
-        transformMatrix, timestampNs);
+
+    frameObserver.onTextureFrameCaptured(captureFormat.width, captureFormat.height, oesTextureId,
+        transformMatrix, rotation, timestampNs);
   }
 
   // Class used for allocating and bookkeeping video frames. All buffers are
@@ -894,7 +891,8 @@
     // Delivers a captured frame in a texture with id |oesTextureId|. Called on a Java thread
     // owned by VideoCapturerAndroid.
     abstract void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, long timestamp);
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timestamp);
 
     // Requests an output format from the video capturer. Captured frames
     // by the camera will be scaled/or dropped by the video capturer.
@@ -925,9 +923,10 @@
 
     @Override
     public void onTextureFrameCaptured(
-        int width, int height, int oesTextureId, float[] transformMatrix, long timestamp) {
+        int width, int height, int oesTextureId, float[] transformMatrix, int rotation,
+        long timestamp) {
       nativeOnTextureFrameCaptured(nativeCapturer, width, height, oesTextureId, transformMatrix,
-          timestamp);
+          rotation, timestamp);
     }
 
     @Override
@@ -940,7 +939,7 @@
     private native void nativeOnByteBufferFrameCaptured(long nativeCapturer,
         byte[] data, int length, int width, int height, int rotation, long timeStamp);
     private native void nativeOnTextureFrameCaptured(long nativeCapturer, int width, int height,
-        int oesTextureId, float[] transformMatrix, long timestamp);
+        int oesTextureId, float[] transformMatrix, int rotation, long timestamp);
     private native void nativeOnOutputFormatRequest(long nativeCapturer,
         int width, int height, int framerate);
   }
diff --git a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
index 92ec4f0..17b52b5 100644
--- a/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
+++ b/talk/app/webrtc/java/jni/androidmediaencoder_jni.cc
@@ -584,11 +584,10 @@
       if (frame.native_handle() != nullptr) {
         rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
             static_cast<AndroidTextureBuffer*>(
-                frame.video_frame_buffer().get())->CropAndScale(
-                    frame.width(),
-                    frame.height(),
+                frame.video_frame_buffer().get())->ScaleAndRotate(
                     scaled_resolution.width,
-                    scaled_resolution.height));
+                    scaled_resolution.height,
+                    webrtc::kVideoRotation_0));
         input_frame.set_video_frame_buffer(scaled_buffer);
       } else {
         input_frame = quality_scaler_.GetScaledFrame(frame);
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
index 9b3053c..0c23627 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.cc
@@ -185,6 +185,7 @@
 
 void AndroidVideoCapturerJni::OnTextureFrame(int width,
                                              int height,
+                                             int rotation,
                                              int64_t timestamp_ns,
                                              const NativeHandleImpl& handle) {
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
@@ -194,7 +195,7 @@
                     timestamp_ns)));
   AsyncCapturerInvoke("OnIncomingFrame",
                       &webrtc::AndroidVideoCapturer::OnIncomingFrame,
-                      buffer, 0, timestamp_ns);
+                      buffer, rotation, timestamp_ns);
 }
 
 void AndroidVideoCapturerJni::OnOutputFormatRequest(int width,
@@ -228,9 +229,9 @@
 JOW(void, VideoCapturerAndroid_00024NativeObserver_nativeOnTextureFrameCaptured)
     (JNIEnv* jni, jclass, jlong j_capturer, jint j_width, jint j_height,
         jint j_oes_texture_id, jfloatArray j_transform_matrix,
-        jlong j_timestamp) {
+        jint j_rotation, jlong j_timestamp) {
    reinterpret_cast<AndroidVideoCapturerJni*>(j_capturer)
-         ->OnTextureFrame(j_width, j_height, j_timestamp,
+         ->OnTextureFrame(j_width, j_height, j_rotation, j_timestamp,
                           NativeHandleImpl(jni, j_oes_texture_id,
                                            j_transform_matrix));
 }
diff --git a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
index 4c0b48c..0663f3a 100644
--- a/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
+++ b/talk/app/webrtc/java/jni/androidvideocapturer_jni.h
@@ -62,7 +62,7 @@
   void OnCapturerStarted(bool success);
   void OnMemoryBufferFrame(void* video_frame, int length, int width,
                            int height, int rotation, int64_t timestamp_ns);
-  void OnTextureFrame(int width, int height, int64_t timestamp_ns,
+  void OnTextureFrame(int width, int height, int rotation, int64_t timestamp_ns,
                       const NativeHandleImpl& handle);
   void OnOutputFormatRequest(int width, int height, int fps);
 
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index f589447..1757184 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -33,9 +33,50 @@
 #include "webrtc/base/keep_ref_until_done.h"
 #include "webrtc/base/scoped_ptr.h"
 #include "webrtc/base/scoped_ref_ptr.h"
+#include "webrtc/base/logging.h"
 
 using webrtc::NativeHandleBuffer;
 
+namespace {
+
+void RotateMatrix(float a[16], webrtc::VideoRotation rotation) {
+  // Texture coordinates are in the range 0 to 1. The transformation of the last
+  // row in each rotation matrix is needed for proper translation, e.g, to
+  // mirror x, we don't replace x by -x, but by 1-x.
+  switch (rotation) {
+    case webrtc::kVideoRotation_0:
+      break;
+    case webrtc::kVideoRotation_90: {
+      const float ROTATE_90[16] =
+          { a[4], a[5], a[6], a[7],
+            -a[0], -a[1], -a[2], -a[3],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[12], a[1] + a[13], a[2] + a[14], a[3] + a[15]};
+      memcpy(a, ROTATE_90, sizeof(ROTATE_90));
+    } break;
+    case webrtc::kVideoRotation_180: {
+      const float ROTATE_180[16] =
+          { -a[0], -a[1], -a[2], -a[3],
+            -a[4], -a[5], -a[6], -a[7],
+            a[8], a[9], a[10], a[11],
+            a[0] + a[4] + a[12], a[1] +a[5] + a[13], a[2] + a[6] + a[14],
+            a[3] + a[11]+ a[15]};
+        memcpy(a, ROTATE_180, sizeof(ROTATE_180));
+      }
+      break;
+    case webrtc::kVideoRotation_270: {
+      const float ROTATE_270[16] =
+          { -a[4], -a[5], -a[6], -a[7],
+            a[0], a[1], a[2], a[3],
+            a[8], a[9], a[10], a[11],
+            a[4] + a[12], a[5] + a[13], a[6] + a[14], a[7] + a[15]};
+        memcpy(a, ROTATE_270, sizeof(ROTATE_270));
+    } break;
+  }
+}
+
+}  // anonymouse namespace
+
 namespace webrtc_jni {
 
 // Aligning pointer to 64 bytes for improved performance, e.g. use SIMD.
@@ -120,22 +161,26 @@
   return copy;
 }
 
-rtc::scoped_refptr<AndroidTextureBuffer> AndroidTextureBuffer::CropAndScale(
-    int cropped_input_width,
-    int cropped_input_height,
-    int dst_widht,
-    int dst_height) {
-  // TODO(perkj) Implement cropping.
-  RTC_CHECK_EQ(cropped_input_width, width_);
-  RTC_CHECK_EQ(cropped_input_height, height_);
+rtc::scoped_refptr<AndroidTextureBuffer>
+AndroidTextureBuffer::ScaleAndRotate(int dst_widht,
+                                     int dst_height,
+                                     webrtc::VideoRotation rotation) {
+  if (width() == dst_widht && height() == dst_height &&
+      rotation == webrtc::kVideoRotation_0) {
+    return this;
+  }
+  int rotated_width = (rotation % 180 == 0) ? dst_widht : dst_height;
+  int rotated_height = (rotation % 180 == 0) ? dst_height : dst_widht;
 
   // Here we use Bind magic to add a reference count to |this| until the newly
-  // created AndroidTextureBuffer is destructed. ScaledFrameNotInUse will be
-  // called that happens and when it finishes, the reference count to |this|
-  // will be decreased by one.
-  return new rtc::RefCountedObject<AndroidTextureBuffer>(
-      dst_widht, dst_height, native_handle_, surface_texture_helper_,
-      rtc::KeepRefUntilDone(this));
+  // created AndroidTextureBuffer is destructed
+  rtc::scoped_refptr<AndroidTextureBuffer> buffer(
+      new rtc::RefCountedObject<AndroidTextureBuffer>(
+          rotated_width, rotated_height, native_handle_,
+          surface_texture_helper_, rtc::KeepRefUntilDone(this)));
+
+  RotateMatrix(buffer->native_handle_.sampling_matrix, rotation);
+  return buffer;
 }
 
 }  // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index 2026486..1d0f601 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -32,6 +32,7 @@
 #include <jni.h>
 
 #include "webrtc/common_video/include/video_frame_buffer.h"
+#include "webrtc/common_video/rotation.h"
 
 namespace webrtc_jni {
 
@@ -55,11 +56,10 @@
   ~AndroidTextureBuffer();
   rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
 
-  rtc::scoped_refptr<AndroidTextureBuffer> CropAndScale(
-      int cropped_input_width,
-      int cropped_input_height,
+  rtc::scoped_refptr<AndroidTextureBuffer> ScaleAndRotate(
       int dst_widht,
-      int dst_height);
+      int dst_height,
+      webrtc::VideoRotation rotation);
 
  private:
   NativeHandleImpl native_handle_;