Add support for scaling textures in AndroidVideoCapturer.
The idea is to also reuse AndroidTextureBuffer::CropAndScale when scaling in the encoder.
BUG=webrtc:4993
R=magjed@webrtc.org
Review URL: https://codereview.webrtc.org/1471333003 .
Cr-Commit-Position: refs/heads/master@{#10802}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
index 1a6c1bc..d21e42f 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTest.java
@@ -299,4 +299,20 @@
VideoCapturerAndroidTestFixtures.cameraErrorEventOnBufferStarvation(capturer,
cameraEvents, getInstrumentation().getContext());
}
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutput() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null);
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ }
+
+ @MediumTest
+ // This test that frames forwarded to a renderer is scaled if onOutputFormatRequest is
+ // called. This test both Java and C++ parts of of the stack.
+ public void testScaleCameraOutputUsingTextures() throws InterruptedException {
+ VideoCapturerAndroid capturer = VideoCapturerAndroid.create("", null, EGL10.EGL_NO_CONTEXT);
+ VideoCapturerAndroidTestFixtures.scaleCameraOutput(capturer);
+ }
}
diff --git a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
index da45028..2bd49be 100644
--- a/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
+++ b/talk/app/webrtc/androidtests/src/org/webrtc/VideoCapturerAndroidTestFixtures.java
@@ -43,16 +43,32 @@
static class RendererCallbacks implements VideoRenderer.Callbacks {
private int framesRendered = 0;
private Object frameLock = 0;
+ private int width = 0;
+ private int height = 0;
@Override
public void renderFrame(I420Frame frame) {
synchronized (frameLock) {
++framesRendered;
+ width = frame.rotatedWidth();
+ height = frame.rotatedHeight();
frameLock.notify();
}
VideoRenderer.renderFrameDone(frame);
}
+ public int frameWidth() {
+ synchronized (frameLock) {
+ return width;
+ }
+ }
+
+ public int frameHeight() {
+ synchronized (frameLock) {
+ return height;
+ }
+ }
+
public int WaitForNextFrameToRender() throws InterruptedException {
synchronized (frameLock) {
frameLock.wait();
@@ -541,4 +557,43 @@
capturer.dispose();
assertTrue(capturer.isReleased());
}
+
+ static public void scaleCameraOutput(VideoCapturerAndroid capturer) throws InterruptedException {
+ PeerConnectionFactory factory = new PeerConnectionFactory();
+ VideoSource source =
+ factory.createVideoSource(capturer, new MediaConstraints());
+ VideoTrack track = factory.createVideoTrack("dummy", source);
+ RendererCallbacks renderer = new RendererCallbacks();
+ track.addRenderer(new VideoRenderer(renderer));
+ assertTrue(renderer.WaitForNextFrameToRender() > 0);
+
+ final int startWidth = renderer.frameWidth();
+ final int startHeight = renderer.frameHeight();
+ final int frameRate = 30;
+ final int scaledWidth = startWidth / 2;
+ final int scaledHeight = startHeight / 2;
+
+ // Request the captured frames to be scaled.
+ capturer.onOutputFormatRequest(scaledWidth, scaledHeight, frameRate);
+
+ boolean gotExpectedResolution = false;
+ int numberOfInspectedFrames = 0;
+
+ do {
+ renderer.WaitForNextFrameToRender();
+ ++numberOfInspectedFrames;
+
+ gotExpectedResolution = (renderer.frameWidth() == scaledWidth
+ && renderer.frameHeight() == scaledHeight);
+ } while (!gotExpectedResolution && numberOfInspectedFrames < 30);
+
+ source.stop();
+ track.dispose();
+ source.dispose();
+ factory.dispose();
+ assertTrue(capturer.isReleased());
+
+ assertTrue(gotExpectedResolution);
+ }
+
}
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index d850ab8..c4c5a48 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -26,6 +26,7 @@
*/
#include "talk/app/webrtc/androidvideocapturer.h"
+#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "talk/media/webrtc/webrtcvideoframe.h"
#include "webrtc/base/common.h"
#include "webrtc/base/json.h"
@@ -101,10 +102,12 @@
int output_width,
int output_height) const override {
if (buffer_->native_handle() != nullptr) {
- // TODO(perkj): Implement CreateAliasedFrame properly for textures.
- rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
- buffer_, input_frame->time_stamp, input_frame->rotation));
- return frame.release();
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> scaled_buffer(
+ static_cast<webrtc_jni::AndroidTextureBuffer*>(buffer_.get())
+ ->CropAndScale(cropped_input_width, cropped_input_height,
+ output_width, output_height));
+ return new cricket::WebRtcVideoFrame(
+ scaled_buffer, input_frame->time_stamp, input_frame->rotation);
}
return VideoFrameFactory::CreateAliasedFrame(input_frame,
cropped_input_width,
diff --git a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
index 9e1f8cd..952bd08 100644
--- a/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
+++ b/talk/app/webrtc/java/android/org/webrtc/VideoCapturerAndroid.java
@@ -300,6 +300,8 @@
// Requests a new output format from the video capturer. Captured frames
// by the camera will be scaled/or dropped by the video capturer.
+ // It does not matter if width and height are flipped. I.E, |width| = 640, |height| = 480 produce
+ // the same result as |width| = 480, |height| = 640.
// TODO(magjed/perkj): Document what this function does. Change name?
public void onOutputFormatRequest(final int width, final int height, final int framerate) {
cameraThreadHandler.post(new Runnable() {
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.cc b/talk/app/webrtc/java/jni/native_handle_impl.cc
index ed9ad8e..bb31ba6 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.cc
+++ b/talk/app/webrtc/java/jni/native_handle_impl.cc
@@ -28,9 +28,17 @@
#include "talk/app/webrtc/java/jni/native_handle_impl.h"
#include "webrtc/base/checks.h"
+#include "webrtc/base/bind.h"
+
+using rtc::scoped_refptr;
+using webrtc::NativeHandleBuffer;
namespace webrtc_jni {
+namespace {
+void ScaledFrameNotInUse(scoped_refptr<NativeHandleBuffer> original) {}
+} // anonymous namespace
+
NativeHandleImpl::NativeHandleImpl(JNIEnv* jni,
jint j_oes_texture_id,
jfloatArray j_transform_matrix)
@@ -64,4 +72,22 @@
return nullptr;
}
+rtc::scoped_refptr<AndroidTextureBuffer> AndroidTextureBuffer::CropAndScale(
+ int cropped_input_width,
+ int cropped_input_height,
+ int dst_widht,
+ int dst_height) {
+ // TODO(perkj) Implement cropping.
+ RTC_CHECK_EQ(cropped_input_width, width_);
+ RTC_CHECK_EQ(cropped_input_height, height_);
+
+ // Here we use Bind magic to add a reference count to |this| until the newly
+ // created AndroidTextureBuffer is destructed. ScaledFrameNotInUse will be
+ // called that happens and when it finishes, the reference count to |this|
+ // will be decreased by one.
+ return new rtc::RefCountedObject<AndroidTextureBuffer>(
+ dst_widht, dst_height, native_handle_,
+ rtc::Bind(&ScaledFrameNotInUse, this));
+}
+
} // namespace webrtc_jni
diff --git a/talk/app/webrtc/java/jni/native_handle_impl.h b/talk/app/webrtc/java/jni/native_handle_impl.h
index 7b657d4..911a3c4 100644
--- a/talk/app/webrtc/java/jni/native_handle_impl.h
+++ b/talk/app/webrtc/java/jni/native_handle_impl.h
@@ -54,6 +54,12 @@
~AndroidTextureBuffer();
rtc::scoped_refptr<VideoFrameBuffer> NativeToI420Buffer() override;
+ rtc::scoped_refptr<AndroidTextureBuffer> CropAndScale(
+ int cropped_input_width,
+ int cropped_input_height,
+ int dst_widht,
+ int dst_height);
+
private:
NativeHandleImpl native_handle_;
rtc::Callback0<void> no_longer_used_cb_;