Supporting formats of non-multiple of 16 widths on Android.

This is an updated version of perkj's issue (https://webrtc-codereview.appspot.com/44129004/) which was reverted due to libjingle_peerconnection_android_unittest crashing on Nexus 9. It crashed because there was old test code still assuming the width was multiple of 16 (which was only a problem on devices with non-16 widths).

BUG=4522
R=glaznev@webrtc.org, magjed@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/45109004

Cr-Commit-Position: refs/heads/master@{#9029}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
index c7f3fff..32544a6 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -260,7 +260,7 @@
       assertTrue(observer.WaitForCapturerToStart());
       observer.WaitForNextCapturedFrame();
       // Check the frame size.
-      assertEquals((format.width*format.height*3)/2, observer.frameSize());
+      assertEquals(format.frameSize(), observer.frameSize());
       capturer.stopCapture();
     }
     capturer.dispose();
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index 6711b6e..89ab486 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -33,6 +33,7 @@
 #include "webrtc/base/json.h"
 #include "webrtc/base/timeutils.h"
 #include "webrtc/base/thread.h"
+#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
 namespace webrtc {
 
@@ -93,12 +94,16 @@
     if (!apply_rotation_ || captured_frame->rotation == kVideoRotation_0) {
       DCHECK(captured_frame->fourcc == cricket::FOURCC_YV12);
       const uint8_t* y_plane = static_cast<uint8_t*>(captured_frame_.data);
-      const int y_stride = captured_frame->width;
-      const uint8_t* v_plane = y_plane +
-          captured_frame->width * captured_frame->height;
-      const int uv_stride = (captured_frame->width + 1) / 2;
-      const int uv_height = (captured_frame->height + 1) / 2;
-      const uint8_t* u_plane = v_plane + uv_stride * uv_height;
+
+      // Android guarantees that the stride is a multiple of 16.
+      // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+      int y_stride;
+      int uv_stride;
+      webrtc::Calc16ByteAlignedStride(captured_frame->width, &y_stride,
+                                      &uv_stride);
+      const uint8_t* v_plane = y_plane + y_stride * captured_frame->height;
+      const uint8_t* u_plane =
+          v_plane + uv_stride * webrtc::AlignInt(captured_frame->height, 2) / 2;
 
       // Create a WrappedI420Buffer and bind the |no_longer_used| callback
       // to the static method ReturnFrame. The |delegate_| is bound as an
diff --git a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java
index 0039000..ea4a512 100644
--- a/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/src/org/webrtc/VideoCapturerAndroid.java
@@ -28,6 +28,7 @@
 package org.webrtc;
 
 import static java.lang.Math.abs;
+import static java.lang.Math.ceil;
 
 import android.content.Context;
 import android.graphics.ImageFormat;
@@ -264,6 +265,11 @@
     public final int height;
     public final int maxFramerate;
     public final int minFramerate;
+    // TODO(hbos): If VideoCapturerAndroid.startCapture is updated to support
+    // other image formats then this needs to be updated and
+    // VideoCapturerAndroid.getSupportedFormats need to return CaptureFormats of
+    // all imageFormats.
+    public final int imageFormat = ImageFormat.YV12;
 
     public CaptureFormat(int width, int height, int minFramerate,
         int maxFramerate) {
@@ -272,6 +278,33 @@
       this.minFramerate = minFramerate;
       this.maxFramerate = maxFramerate;
     }
+
+    // Calculates the frame size of this capture format.
+    public int frameSize() {
+      return frameSize(width, height, imageFormat);
+    }
+
+    // Calculates the frame size of the specified image format. Currently only
+    // supporting ImageFormat.YV12. The YV12's stride is the closest rounded up
+    // multiple of 16 of the width and width and height are always even.
+    // Android guarantees this:
+    // http://developer.android.com/reference/android/hardware/Camera.Parameters.html#setPreviewFormat%28int%29
+    public static int frameSize(int width, int height, int imageFormat) {
+      if (imageFormat != ImageFormat.YV12) {
+        throw new UnsupportedOperationException("Don't know how to calculate "
+            + "the frame size of non-YV12 image formats.");
+      }
+      int yStride = roundUp(width, 16);
+      int uvStride = roundUp(yStride / 2, 16);
+      int ySize = yStride * height;
+      int uvSize = uvStride * height / 2;
+      return ySize + uvSize * 2;
+    }
+
+    // Rounds up |x| to the closest value that is a multiple of |alignment|.
+    private static int roundUp(int x, int alignment) {
+      return (int)ceil(x / (double)alignment) * alignment;
+    }
   }
 
   private static String getSupportedFormatsAsJson(int id) throws JSONException {
@@ -312,13 +345,6 @@
 
       List<Camera.Size> supportedSizes = parameters.getSupportedPreviewSizes();
       for (Camera.Size size : supportedSizes) {
-        if (size.width % 16 != 0) {
-          // If the width is not a multiple of 16, the frames received from the
-          // camera will have a stride != width when YV12 is used. Since we
-          // currently only support tightly packed images, we simply ignore
-          // those resolutions.
-          continue;
-        }
         formatList.add(new CaptureFormat(size.width, size.height,
             range[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
             range[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]));
@@ -359,9 +385,6 @@
     if (frameObserver == null) {
       throw new RuntimeException("frameObserver not set.");
     }
-    if (width % 16 != 0) {
-      throw new RuntimeException("width must be a multiple of 16." );
-    }
     if (cameraThreadHandler != null) {
       throw new RuntimeException("Camera has already been started.");
     }
@@ -444,6 +467,9 @@
       Camera.Size pictureSize = getPictureSize(parameters, width, height);
       parameters.setPictureSize(pictureSize.width, pictureSize.height);
       parameters.setPreviewSize(width, height);
+      // TODO(hbos): If other ImageFormats are to be supported then
+      // CaptureFormat needs to be updated (currently hard-coded to say YV12,
+      // getSupportedFormats only returns YV12).
       int format = ImageFormat.YV12;
       parameters.setPreviewFormat(format);
       camera.setParameters(parameters);
@@ -685,14 +711,14 @@
         throw new RuntimeException("camera already set.");
 
       this.camera = camera;
-      int newframeSize =
-          width * height * ImageFormat.getBitsPerPixel(format) / 8;
+      int newFrameSize = CaptureFormat.frameSize(width, height, format);
+
       int numberOfEnquedCameraBuffers = 0;
-      if (newframeSize != frameSize) {
+      if (newFrameSize != frameSize) {
         // Create new frames and add to the camera.
         // The old frames will be released when frames are returned.
         for (int i = 0; i < numCaptureBuffers; ++i) {
-          Frame frame = new Frame(newframeSize);
+          Frame frame = new Frame(newFrameSize);
           cameraFrames.add(frame);
           this.camera.addCallbackBuffer(frame.data());
         }
@@ -706,7 +732,7 @@
           }
         }
       }
-      frameSize = newframeSize;
+      frameSize = newFrameSize;
       Log.d(TAG, "queueCameraBuffers enqued " + numberOfEnquedCameraBuffers
           + " buffers of size " + frameSize + ".");
     }