Add support of texture frames for video capturer.

This is a reland of r6252. The video_capture_tests failure on
builder Android Chromium-APK Tests should be flaky.

- Add ViECapturer unittest.
- Add CloneFrame function in I420VideoFrame.
- Encoders do not support texture yet and texture frames
are dropped in ViEEncoder for now.

Corresponding CLs:
https://codereview.chromium.org/277943002
http://cl/66620352

BUG=chromium:362437
TEST=WebRTC video stream forwarding, video_engine_core_unittests,
     common_video_unittests and video_capture_tests_apk.
TBR=fischman@webrtc.org, perkj@webrtc.org, stefan@webrtc.org, wu@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/12609004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@6258 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc
index 5b9543b..fdc2bbc 100644
--- a/webrtc/common_video/i420_video_frame.cc
+++ b/webrtc/common_video/i420_video_frame.cc
@@ -10,6 +10,8 @@
 
 #include "webrtc/common_video/interface/i420_video_frame.h"
 
+#include <string.h>
+
 #include <algorithm>  // swap
 
 namespace webrtc {
@@ -78,6 +80,15 @@
   return 0;
 }
 
+I420VideoFrame* I420VideoFrame::CloneFrame() const {
+  scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame());
+  if (new_frame->CopyFrame(*this) == -1) {
+    // CopyFrame failed.
+    return NULL;
+  }
+  return new_frame.release();
+}
+
 void I420VideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
   y_plane_.Swap(videoFrame->y_plane_);
   u_plane_.Swap(videoFrame->u_plane_);
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index 29578c7..ca01fd0 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -19,8 +19,8 @@
 
 namespace webrtc {
 
-bool EqualFrames(const I420VideoFrame& videoFrame1,
-                 const I420VideoFrame& videoFrame2);
+bool EqualFrames(const I420VideoFrame& frame1,
+                 const I420VideoFrame& frame2);
 bool EqualFramesExceptSize(const I420VideoFrame& frame1,
                            const I420VideoFrame& frame2);
 int ExpectedSize(int plane_stride, int image_height, PlaneType type);
@@ -122,6 +122,29 @@
   EXPECT_TRUE(EqualFrames(frame1, frame2));
 }
 
+TEST(TestI420VideoFrame, CloneFrame) {
+  I420VideoFrame frame1;
+  scoped_ptr<I420VideoFrame> frame2;
+  const int kSizeY = 225;
+  const int kSizeU = 80;
+  const int kSizeV = 80;
+  uint8_t buffer_y[kSizeY];
+  uint8_t buffer_u[kSizeU];
+  uint8_t buffer_v[kSizeV];
+  memset(buffer_y, 16, kSizeY);
+  memset(buffer_u, 8, kSizeU);
+  memset(buffer_v, 4, kSizeV);
+  frame1.CreateFrame(
+      kSizeY, buffer_y, kSizeU, buffer_u, kSizeV, buffer_v, 20, 20, 20, 10, 10);
+  frame1.set_timestamp(1);
+  frame1.set_ntp_time_ms(2);
+  frame1.set_render_time_ms(3);
+
+  frame2.reset(frame1.CloneFrame());
+  EXPECT_TRUE(frame2.get() != NULL);
+  EXPECT_TRUE(EqualFrames(frame1, *frame2));
+}
+
 TEST(TestI420VideoFrame, CopyBuffer) {
   I420VideoFrame frame1, frame2;
   int width = 15;
@@ -234,29 +257,24 @@
 
 bool EqualFrames(const I420VideoFrame& frame1,
                  const I420VideoFrame& frame2) {
-  if (!EqualFramesExceptSize(frame1, frame2))
-    return false;
-  // Compare allocated memory size.
-  bool ret = true;
-  ret |= (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane));
-  ret |= (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane));
-  ret |= (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane));
-  return ret;
+  return (EqualFramesExceptSize(frame1, frame2) &&
+          (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+          (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+          (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)));
 }
 
 bool EqualFramesExceptSize(const I420VideoFrame& frame1,
                            const I420VideoFrame& frame2) {
-  bool ret = true;
-  ret |= (frame1.width() == frame2.width());
-  ret |= (frame1.height() == frame2.height());
-  ret |= (frame1.stride(kYPlane) == frame2.stride(kYPlane));
-  ret |= (frame1.stride(kUPlane) == frame2.stride(kUPlane));
-  ret |= (frame1.stride(kVPlane) == frame2.stride(kVPlane));
-  ret |= (frame1.timestamp() == frame2.timestamp());
-  ret |= (frame1.ntp_time_ms() == frame2.ntp_time_ms());
-  ret |= (frame1.render_time_ms() == frame2.render_time_ms());
-  if (!ret)
+  if ((frame1.width() != frame2.width()) ||
+      (frame1.height() != frame2.height()) ||
+      (frame1.stride(kYPlane) != frame2.stride(kYPlane)) ||
+      (frame1.stride(kUPlane) != frame2.stride(kUPlane)) ||
+      (frame1.stride(kVPlane) != frame2.stride(kVPlane)) ||
+      (frame1.timestamp() != frame2.timestamp()) ||
+      (frame1.ntp_time_ms() != frame2.ntp_time_ms()) ||
+      (frame1.render_time_ms() != frame2.render_time_ms())) {
     return false;
+  }
   // Memory should be the equal for the minimum of the two sizes.
   int size_y = std::min(frame1.allocated_size(kYPlane),
                         frame2.allocated_size(kYPlane));
@@ -264,13 +282,9 @@
                         frame2.allocated_size(kUPlane));
   int size_v = std::min(frame1.allocated_size(kVPlane),
                         frame2.allocated_size(kVPlane));
-  int ret_val = 0;
-  ret_val += memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y);
-  ret_val += memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u);
-  ret_val += memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v);
-  if (ret_val == 0)
-    return true;
-  return false;
+  return (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane), size_y) == 0 &&
+          memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), size_u) == 0 &&
+          memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane), size_v) == 0);
 }
 
 int ExpectedSize(int plane_stride, int image_height, PlaneType type) {
diff --git a/webrtc/common_video/interface/i420_video_frame.h b/webrtc/common_video/interface/i420_video_frame.h
index 3f90a8e..5f7a572 100644
--- a/webrtc/common_video/interface/i420_video_frame.h
+++ b/webrtc/common_video/interface/i420_video_frame.h
@@ -51,13 +51,13 @@
   // on set dimensions - height and plane stride.
   // If required size is bigger than the allocated one, new buffers of adequate
   // size will be allocated.
-  // Return value: 0 on success ,-1 on error.
+  // Return value: 0 on success, -1 on error.
   virtual int CreateEmptyFrame(int width, int height,
                                int stride_y, int stride_u, int stride_v);
 
   // CreateFrame: Sets the frame's members and buffers. If required size is
   // bigger than allocated one, new buffers of adequate size will be allocated.
-  // Return value: 0 on success ,-1 on error.
+  // Return value: 0 on success, -1 on error.
   virtual int CreateFrame(int size_y, const uint8_t* buffer_y,
                           int size_u, const uint8_t* buffer_u,
                           int size_v, const uint8_t* buffer_v,
@@ -66,9 +66,13 @@
 
   // Copy frame: If required size is bigger than allocated one, new buffers of
   // adequate size will be allocated.
-  // Return value: 0 on success ,-1 on error.
+  // Return value: 0 on success, -1 on error.
   virtual int CopyFrame(const I420VideoFrame& videoFrame);
 
+  // Make a copy of |this|. The caller owns the returned frame.
+  // Return value: a new frame on success, NULL on error.
+  virtual I420VideoFrame* CloneFrame() const;
+
   // Swap Frame.
   virtual void SwapFrame(I420VideoFrame* videoFrame);
 
diff --git a/webrtc/common_video/interface/texture_video_frame.h b/webrtc/common_video/interface/texture_video_frame.h
index e905ea7..2c625ab 100644
--- a/webrtc/common_video/interface/texture_video_frame.h
+++ b/webrtc/common_video/interface/texture_video_frame.h
@@ -49,6 +49,7 @@
                           int stride_u,
                           int stride_v) OVERRIDE;
   virtual int CopyFrame(const I420VideoFrame& videoFrame) OVERRIDE;
+  virtual I420VideoFrame* CloneFrame() const OVERRIDE;
   virtual void SwapFrame(I420VideoFrame* videoFrame) OVERRIDE;
   virtual uint8_t* buffer(PlaneType type) OVERRIDE;
   virtual const uint8_t* buffer(PlaneType type) const OVERRIDE;
diff --git a/webrtc/common_video/texture_video_frame.cc b/webrtc/common_video/texture_video_frame.cc
index 2dd6cad..f301d19 100644
--- a/webrtc/common_video/texture_video_frame.cc
+++ b/webrtc/common_video/texture_video_frame.cc
@@ -57,6 +57,11 @@
   return -1;
 }
 
+I420VideoFrame* TextureVideoFrame::CloneFrame() const {
+  return new TextureVideoFrame(
+      handle_, width(), height(), timestamp(), render_time_ms());
+}
+
 void TextureVideoFrame::SwapFrame(I420VideoFrame* videoFrame) {
   assert(false);  // Should not be called.
 }
diff --git a/webrtc/common_video/texture_video_frame_unittest.cc b/webrtc/common_video/texture_video_frame_unittest.cc
index 04e09a6..408f5f6 100644
--- a/webrtc/common_video/texture_video_frame_unittest.cc
+++ b/webrtc/common_video/texture_video_frame_unittest.cc
@@ -8,9 +8,10 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
+#include "webrtc/common_video/interface/texture_video_frame.h"
+
 #include "testing/gtest/include/gtest/gtest.h"
 #include "webrtc/common_video/interface/native_handle.h"
-#include "webrtc/common_video/interface/texture_video_frame.h"
 
 namespace webrtc {
 
@@ -27,6 +28,9 @@
   int32_t ref_count_;
 };
 
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+                        const I420VideoFrame& frame2);
+
 TEST(TestTextureVideoFrame, InitialValues) {
   NativeHandleImpl handle;
   TextureVideoFrame frame(&handle, 640, 480, 100, 10);
@@ -55,4 +59,21 @@
   EXPECT_EQ(0, handle.ref_count());
 }
 
+TEST(TestTextureVideoFrame, CloneFrame) {
+  NativeHandleImpl handle;
+  TextureVideoFrame frame1(&handle, 640, 480, 100, 200);
+  scoped_ptr<I420VideoFrame> frame2(frame1.CloneFrame());
+  EXPECT_TRUE(frame2.get() != NULL);
+  EXPECT_TRUE(EqualTextureFrames(frame1, *frame2));
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+                        const I420VideoFrame& frame2) {
+  return ((frame1.native_handle() == frame2.native_handle()) &&
+          (frame1.width() == frame2.width()) &&
+          (frame1.height() == frame2.height()) &&
+          (frame1.timestamp() == frame2.timestamp()) &&
+          (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
 }  // namespace webrtc
diff --git a/webrtc/modules/utility/interface/mock/mock_process_thread.h b/webrtc/modules/utility/interface/mock/mock_process_thread.h
new file mode 100644
index 0000000..fc0c1fb
--- /dev/null
+++ b/webrtc/modules/utility/interface/mock/mock_process_thread.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
+
+#include "webrtc/modules/utility/interface/process_thread.h"
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockProcessThread : public ProcessThread {
+ public:
+  MOCK_METHOD0(Start, int32_t());
+  MOCK_METHOD0(Stop, int32_t());
+  MOCK_METHOD1(RegisterModule, int32_t(Module* module));
+  MOCK_METHOD1(DeRegisterModule, int32_t(const Module* module));
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_UTILITY_INTERFACE_MOCK_PROCESS_THREAD_H_
diff --git a/webrtc/modules/utility/source/video_frames_queue.cc b/webrtc/modules/utility/source/video_frames_queue.cc
index 63afbe9..9ade8b5 100644
--- a/webrtc/modules/utility/source/video_frames_queue.cc
+++ b/webrtc/modules/utility/source/video_frames_queue.cc
@@ -38,12 +38,7 @@
 
 int32_t VideoFramesQueue::AddFrame(const I420VideoFrame& newFrame) {
   if (newFrame.native_handle() != NULL) {
-    _incomingFrames.push_back(new TextureVideoFrame(
-        static_cast<NativeHandle*>(newFrame.native_handle()),
-        newFrame.width(),
-        newFrame.height(),
-        newFrame.timestamp(),
-        newFrame.render_time_ms()));
+    _incomingFrames.push_back(newFrame.CloneFrame());
     return 0;
   }
 
diff --git a/webrtc/modules/video_capture/include/mock/mock_video_capture.h b/webrtc/modules/video_capture/include/mock/mock_video_capture.h
new file mode 100644
index 0000000..8ad74a2
--- /dev/null
+++ b/webrtc/modules/video_capture/include/mock/mock_video_capture.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
+
+#include "webrtc/modules/video_capture/include/video_capture.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockVideoCaptureModule : public VideoCaptureModule {
+ public:
+  // from Module
+  MOCK_METHOD0(TimeUntilNextProcess, int32_t());
+  MOCK_METHOD0(Process, int32_t());
+
+  // from RefCountedModule
+  MOCK_METHOD0(AddRef, int32_t());
+  MOCK_METHOD0(Release, int32_t());
+
+  // from VideoCaptureModule
+  MOCK_METHOD1(RegisterCaptureDataCallback,
+      void(VideoCaptureDataCallback& dataCallback));
+  MOCK_METHOD0(DeRegisterCaptureDataCallback, void());
+  MOCK_METHOD1(RegisterCaptureCallback, void(VideoCaptureFeedBack& callBack));
+  MOCK_METHOD0(DeRegisterCaptureCallback, void());
+  MOCK_METHOD1(StartCapture, int32_t(const VideoCaptureCapability& capability));
+  MOCK_METHOD0(StopCapture, int32_t());
+  MOCK_CONST_METHOD0(CurrentDeviceName, const char*());
+  MOCK_METHOD0(CaptureStarted, bool());
+  MOCK_METHOD1(CaptureSettings, int32_t(VideoCaptureCapability& settings));
+  MOCK_METHOD1(SetCaptureDelay, void(int32_t delayMS));
+  MOCK_METHOD0(CaptureDelay, int32_t());
+  MOCK_METHOD1(SetCaptureRotation, int32_t(VideoCaptureRotation rotation));
+  MOCK_METHOD1(GetEncodeInterface,
+               VideoCaptureEncodeInterface*(const VideoCodec& codec));
+  MOCK_METHOD1(EnableFrameRateCallback, void(const bool enable));
+  MOCK_METHOD1(EnableNoPictureAlarm, void(const bool enable));
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_INCLUDE_MOCK_MOCK_VIDEO_CAPTURE_H_
diff --git a/webrtc/modules/video_capture/video_capture.gypi b/webrtc/modules/video_capture/video_capture.gypi
index 6df062a..b13b27b 100644
--- a/webrtc/modules/video_capture/video_capture.gypi
+++ b/webrtc/modules/video_capture/video_capture.gypi
@@ -60,6 +60,7 @@
               'link_settings': {
                 'xcode_settings': {
                   'OTHER_LDFLAGS': [
+                    '-framework CoreVideo',
                     '-framework QTKit',
                   ],
                 },
diff --git a/webrtc/modules/video_render/video_render_frames.cc b/webrtc/modules/video_render/video_render_frames.cc
index 7025d62..d790877 100644
--- a/webrtc/modules/video_render/video_render_frames.cc
+++ b/webrtc/modules/video_render/video_render_frames.cc
@@ -55,12 +55,7 @@
   }
 
   if (new_frame->native_handle() != NULL) {
-    incoming_frames_.push_back(new TextureVideoFrame(
-        static_cast<NativeHandle*>(new_frame->native_handle()),
-        new_frame->width(),
-        new_frame->height(),
-        new_frame->timestamp(),
-        new_frame->render_time_ms()));
+    incoming_frames_.push_back(new_frame->CloneFrame());
     return static_cast<int32_t>(incoming_frames_.size());
   }
 
diff --git a/webrtc/video_engine/encoder_state_feedback_unittest.cc b/webrtc/video_engine/encoder_state_feedback_unittest.cc
index f85d989..4e15752 100644
--- a/webrtc/video_engine/encoder_state_feedback_unittest.cc
+++ b/webrtc/video_engine/encoder_state_feedback_unittest.cc
@@ -17,26 +17,17 @@
 
 #include "webrtc/common.h"
 #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
 #include "webrtc/system_wrappers/interface/scoped_ptr.h"
 #include "webrtc/video_engine/vie_encoder.h"
 
-namespace webrtc {
+using ::testing::NiceMock;
 
-// TODO(mflodman) Create a common mock in module utility.
-class TestProcessThread : public ProcessThread {
- public:
-  TestProcessThread() {}
-  ~TestProcessThread() {}
-  virtual int32_t Start() { return 0; }
-  virtual int32_t Stop() { return 0; }
-  virtual int32_t RegisterModule(Module* module) { return 0; }
-  virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
+namespace webrtc {
 
 class MockVieEncoder : public ViEEncoder {
  public:
-  explicit MockVieEncoder(TestProcessThread* process_thread)
+  explicit MockVieEncoder(ProcessThread* process_thread)
       : ViEEncoder(1, 1, 1, config_, *process_thread, NULL) {}
   ~MockVieEncoder() {}
 
@@ -55,10 +46,10 @@
 class VieKeyRequestTest : public ::testing::Test {
  protected:
   virtual void SetUp() {
-    process_thread_.reset(new TestProcessThread());
+    process_thread_.reset(new NiceMock<MockProcessThread>);
     encoder_state_feedback_.reset(new EncoderStateFeedback());
   }
-  scoped_ptr<TestProcessThread> process_thread_;
+  scoped_ptr<MockProcessThread> process_thread_;
   scoped_ptr<EncoderStateFeedback> encoder_state_feedback_;
 };
 
diff --git a/webrtc/video_engine/mock/mock_vie_frame_provider_base.h b/webrtc/video_engine/mock/mock_vie_frame_provider_base.h
new file mode 100644
index 0000000..d4e17f4
--- /dev/null
+++ b/webrtc/video_engine/mock/mock_vie_frame_provider_base.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
+
+#include "webrtc/video_engine/vie_frame_provider_base.h"
+#include "testing/gmock/include/gmock/gmock.h"
+
+namespace webrtc {
+
+class MockViEFrameCallback : public ViEFrameCallback {
+ public:
+  MOCK_METHOD4(DeliverFrame,
+               void(int id,
+                    I420VideoFrame* video_frame,
+                    int num_csrcs,
+                    const uint32_t CSRC[kRtpCsrcSize]));
+  MOCK_METHOD2(DelayChanged, void(int id, int frame_delay));
+  MOCK_METHOD3(GetPreferedFrameSettings,
+               int(int* width, int* height, int* frame_rate));
+  MOCK_METHOD1(ProviderDestroyed, void(int id));
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_MOCK_MOCK_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/webrtc/video_engine/video_engine_core.gypi b/webrtc/video_engine/video_engine_core.gypi
index 57cdecd..dfb48b4 100644
--- a/webrtc/video_engine/video_engine_core.gypi
+++ b/webrtc/video_engine/video_engine_core.gypi
@@ -131,6 +131,7 @@
             'encoder_state_feedback_unittest.cc',
             'overuse_frame_detector_unittest.cc',
             'stream_synchronization_unittest.cc',
+            'vie_capturer_unittest.cc',
             'vie_codec_unittest.cc',
             'vie_remb_unittest.cc',
           ],
diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc
index 867de9b..30d6633 100644
--- a/webrtc/video_engine/vie_capturer.cc
+++ b/webrtc/video_engine/vie_capturer.cc
@@ -10,6 +10,7 @@
 
 #include "webrtc/video_engine/vie_capturer.h"
 
+#include "webrtc/common_video/interface/texture_video_frame.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 #include "webrtc/modules/interface/module_common_types.h"
 #include "webrtc/modules/utility/interface/process_thread.h"
@@ -346,11 +347,16 @@
   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
                            "render_time", video_frame.render_time_ms());
 
-  captured_frame_.SwapFrame(&video_frame);
+  if (video_frame.native_handle() != NULL) {
+    captured_frame_.reset(video_frame.CloneFrame());
+  } else {
+    if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
+      captured_frame_.reset(new I420VideoFrame());
+    captured_frame_->SwapFrame(&video_frame);
+  }
   capture_event_.Set();
-  overuse_detector_->FrameCaptured(captured_frame_.width(),
-                                   captured_frame_.height());
-  return;
+  overuse_detector_->FrameCaptured(captured_frame_->width(),
+                                   captured_frame_->height());
 }
 
 void ViECapturer::OnCaptureDelayChanged(const int32_t id,
@@ -473,7 +479,9 @@
     deliver_cs_->Enter();
     if (SwapCapturedAndDeliverFrameIfAvailable()) {
       encode_start_time = Clock::GetRealTimeClock()->TimeInMilliseconds();
-      DeliverI420Frame(&deliver_frame_);
+      DeliverI420Frame(deliver_frame_.get());
+      if (deliver_frame_->native_handle() != NULL)
+        deliver_frame_.reset();  // Release the texture so it can be reused.
     }
     deliver_cs_->Leave();
     if (current_brightness_level_ != reported_brightness_level_) {
@@ -494,6 +502,11 @@
 }
 
 void ViECapturer::DeliverI420Frame(I420VideoFrame* video_frame) {
+  if (video_frame->native_handle() != NULL) {
+    ViEFrameProviderBase::DeliverFrame(video_frame);
+    return;
+  }
+
   // Apply image enhancement and effect filter.
   if (deflicker_frame_stats_) {
     if (image_proc_module_->GetFrameStats(deflicker_frame_stats_,
@@ -608,11 +621,21 @@
 
 bool ViECapturer::SwapCapturedAndDeliverFrameIfAvailable() {
   CriticalSectionScoped cs(capture_cs_.get());
-  if (captured_frame_.IsZeroSize())
+  if (captured_frame_ == NULL)
     return false;
 
-  deliver_frame_.SwapFrame(&captured_frame_);
-  captured_frame_.ResetSize();
+  if (captured_frame_->native_handle() != NULL) {
+    deliver_frame_.reset(captured_frame_.release());
+    return true;
+  }
+
+  if (captured_frame_->IsZeroSize())
+    return false;
+
+  if (deliver_frame_ == NULL)
+    deliver_frame_.reset(new I420VideoFrame());
+  deliver_frame_->SwapFrame(captured_frame_.get());
+  captured_frame_->ResetSize();
   return true;
 }
 
diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h
index 9ac5f83..8e89357 100644
--- a/webrtc/video_engine/vie_capturer.h
+++ b/webrtc/video_engine/vie_capturer.h
@@ -169,8 +169,8 @@
   EventWrapper& capture_event_;
   EventWrapper& deliver_event_;
 
-  I420VideoFrame captured_frame_;
-  I420VideoFrame deliver_frame_;
+  scoped_ptr<I420VideoFrame> captured_frame_;
+  scoped_ptr<I420VideoFrame> deliver_frame_;
 
   // Image processing.
   ViEEffectFilter* effect_filter_;
diff --git a/webrtc/video_engine/vie_capturer_unittest.cc b/webrtc/video_engine/vie_capturer_unittest.cc
new file mode 100644
index 0000000..edaf13b
--- /dev/null
+++ b/webrtc/video_engine/vie_capturer_unittest.cc
@@ -0,0 +1,263 @@
+/*
+ *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file includes unit tests for ViECapturer.
+
+#include "webrtc/video_engine/vie_capturer.h"
+
+#include <vector>
+
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "webrtc/common.h"
+#include "webrtc/common_video/interface/native_handle.h"
+#include "webrtc/common_video/interface/texture_video_frame.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
+#include "webrtc/modules/video_capture/include/mock/mock_video_capture.h"
+#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
+#include "webrtc/system_wrappers/interface/event_wrapper.h"
+#include "webrtc/system_wrappers/interface/ref_count.h"
+#include "webrtc/system_wrappers/interface/scoped_ptr.h"
+#include "webrtc/system_wrappers/interface/scoped_vector.h"
+#include "webrtc/video_engine/mock/mock_vie_frame_provider_base.h"
+
+using ::testing::_;
+using ::testing::Invoke;
+using ::testing::NiceMock;
+using ::testing::Return;
+using ::testing::WithArg;
+
+// If an output frame does not arrive in 500ms, the test will fail.
+#define FRAME_TIMEOUT_MS 500
+
+namespace webrtc {
+
+bool EqualFrames(const I420VideoFrame& frame1,
+                 const I420VideoFrame& frame2);
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+                        const I420VideoFrame& frame2);
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+                       const I420VideoFrame& frame2);
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+                       const ScopedVector<I420VideoFrame>& frames2);
+I420VideoFrame* CreateI420VideoFrame(uint8_t length);
+
+class FakeNativeHandle : public NativeHandle {
+ public:
+  FakeNativeHandle() {}
+  virtual ~FakeNativeHandle() {}
+  virtual void* GetHandle() { return NULL; }
+};
+
+class ViECapturerTest : public ::testing::Test {
+ protected:
+  ViECapturerTest()
+      : mock_capture_module_(new NiceMock<MockVideoCaptureModule>()),
+        mock_process_thread_(new NiceMock<MockProcessThread>),
+        mock_frame_callback_(new NiceMock<MockViEFrameCallback>),
+        data_callback_(NULL),
+        output_frame_event_(EventWrapper::Create()) {
+  }
+
+  virtual void SetUp() {
+    EXPECT_CALL(*mock_capture_module_, RegisterCaptureDataCallback(_))
+        .WillRepeatedly(Invoke(this, &ViECapturerTest::SetCaptureDataCallback));
+    EXPECT_CALL(*mock_frame_callback_, DeliverFrame(_, _, _, _))
+        .WillRepeatedly(
+            WithArg<1>(Invoke(this, &ViECapturerTest::AddOutputFrame)));
+
+    Config config;
+    vie_capturer_.reset(
+        ViECapturer::CreateViECapture(
+            0, 0, config, mock_capture_module_.get(), *mock_process_thread_));
+    vie_capturer_->RegisterFrameCallback(0, mock_frame_callback_.get());
+  }
+
+  virtual void TearDown() {
+    // ViECapturer accesses |mock_process_thread_| in destructor and should
+    // be deleted first.
+    vie_capturer_.reset();
+  }
+
+  void SetCaptureDataCallback(VideoCaptureDataCallback& data_callback) {
+    data_callback_ = &data_callback;
+  }
+
+  void AddInputFrame(I420VideoFrame* frame) {
+    data_callback_->OnIncomingCapturedFrame(0, *frame);
+  }
+
+  void AddOutputFrame(I420VideoFrame* frame) {
+    if (frame->native_handle() == NULL)
+      output_frame_ybuffers_.push_back(frame->buffer(kYPlane));
+    // Clone the frames because ViECapturer owns the frames.
+    output_frames_.push_back(frame->CloneFrame());
+    output_frame_event_->Set();
+  }
+
+  void WaitOutputFrame() {
+    EXPECT_EQ(kEventSignaled, output_frame_event_->Wait(FRAME_TIMEOUT_MS));
+  }
+
+  scoped_ptr<MockVideoCaptureModule> mock_capture_module_;
+  scoped_ptr<MockProcessThread> mock_process_thread_;
+  scoped_ptr<MockViEFrameCallback> mock_frame_callback_;
+
+  // Used to send input capture frames to ViECapturer.
+  VideoCaptureDataCallback* data_callback_;
+
+  scoped_ptr<ViECapturer> vie_capturer_;
+
+  // Input capture frames of ViECapturer.
+  ScopedVector<I420VideoFrame> input_frames_;
+
+  // Indicate an output frame has arrived.
+  scoped_ptr<EventWrapper> output_frame_event_;
+
+  // Output delivered frames of ViECaptuer.
+  ScopedVector<I420VideoFrame> output_frames_;
+
+  // The pointers of Y plane buffers of output frames. This is used to verify
+  // the frame are swapped and not copied.
+  std::vector<uint8_t*> output_frame_ybuffers_;
+};
+
+TEST_F(ViECapturerTest, TestTextureFrames) {
+  const int kNumFrame = 3;
+  for (int i = 0 ; i < kNumFrame; ++i) {
+    webrtc::RefCountImpl<FakeNativeHandle>* handle =
+              new webrtc::RefCountImpl<FakeNativeHandle>();
+    input_frames_.push_back(new TextureVideoFrame(handle, i, i, i, i));
+    AddInputFrame(input_frames_[i]);
+    WaitOutputFrame();
+  }
+
+  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestI420Frames) {
+  const int kNumFrame = 4;
+  ScopedVector<I420VideoFrame> copied_input_frames;
+  std::vector<uint8_t*> ybuffer_pointers;
+  for (int i = 0; i < kNumFrame; ++i) {
+    input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
+    ybuffer_pointers.push_back(input_frames_[i]->buffer(kYPlane));
+    // Copy input frames because the buffer data will be swapped.
+    copied_input_frames.push_back(input_frames_[i]->CloneFrame());
+    AddInputFrame(input_frames_[i]);
+    WaitOutputFrame();
+  }
+
+  EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_));
+  // Make sure the buffer is swapped and not copied.
+  for (int i = 0; i < kNumFrame; ++i)
+    EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
+  // The pipeline should be filled with frames with allocated buffers. Check
+  // the last input frame has the same allocated size after swapping.
+  EXPECT_EQ(input_frames_.back()->allocated_size(kYPlane),
+            copied_input_frames.back()->allocated_size(kYPlane));
+}
+
+TEST_F(ViECapturerTest, TestI420FrameAfterTextureFrame) {
+  webrtc::RefCountImpl<FakeNativeHandle>* handle =
+      new webrtc::RefCountImpl<FakeNativeHandle>();
+  input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+  AddInputFrame(input_frames_[0]);
+  WaitOutputFrame();
+
+  input_frames_.push_back(CreateI420VideoFrame(1));
+  scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[1]->CloneFrame());
+  AddInputFrame(copied_input_frame.get());
+  WaitOutputFrame();
+
+  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
+  input_frames_.push_back(CreateI420VideoFrame(1));
+  scoped_ptr<I420VideoFrame> copied_input_frame(input_frames_[0]->CloneFrame());
+  AddInputFrame(copied_input_frame.get());
+  WaitOutputFrame();
+
+  webrtc::RefCountImpl<FakeNativeHandle>* handle =
+      new webrtc::RefCountImpl<FakeNativeHandle>();
+  input_frames_.push_back(new TextureVideoFrame(handle, 1, 1, 1, 1));
+  AddInputFrame(input_frames_[1]);
+  WaitOutputFrame();
+
+  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+}
+
+bool EqualFrames(const I420VideoFrame& frame1,
+                 const I420VideoFrame& frame2) {
+  if (frame1.native_handle() != NULL || frame2.native_handle() != NULL)
+    return EqualTextureFrames(frame1, frame2);
+  return EqualBufferFrames(frame1, frame2);
+}
+
+bool EqualTextureFrames(const I420VideoFrame& frame1,
+                        const I420VideoFrame& frame2) {
+  return ((frame1.native_handle() == frame2.native_handle()) &&
+          (frame1.width() == frame2.width()) &&
+          (frame1.height() == frame2.height()) &&
+          (frame1.timestamp() == frame2.timestamp()) &&
+          (frame1.render_time_ms() == frame2.render_time_ms()));
+}
+
+bool EqualBufferFrames(const I420VideoFrame& frame1,
+                       const I420VideoFrame& frame2) {
+  return ((frame1.width() == frame2.width()) &&
+          (frame1.height() == frame2.height()) &&
+          (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
+          (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
+          (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
+          (frame1.timestamp() == frame2.timestamp()) &&
+          (frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
+          (frame1.render_time_ms() == frame2.render_time_ms()) &&
+          (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
+          (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
+          (frame1.allocated_size(kVPlane) == frame2.allocated_size(kVPlane)) &&
+          (memcmp(frame1.buffer(kYPlane), frame2.buffer(kYPlane),
+                  frame1.allocated_size(kYPlane)) == 0) &&
+          (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane),
+                  frame1.allocated_size(kUPlane)) == 0) &&
+          (memcmp(frame1.buffer(kVPlane), frame2.buffer(kVPlane),
+                  frame1.allocated_size(kVPlane)) == 0));
+}
+
+bool EqualFramesVector(const ScopedVector<I420VideoFrame>& frames1,
+                       const ScopedVector<I420VideoFrame>& frames2) {
+  if (frames1.size() != frames2.size())
+    return false;
+  for (size_t i = 0; i < frames1.size(); ++i) {
+    if (!EqualFrames(*frames1[i], *frames2[i]))
+      return false;
+  }
+  return true;
+}
+
+I420VideoFrame* CreateI420VideoFrame(uint8_t data) {
+  I420VideoFrame* frame = new I420VideoFrame();
+  const int width = 36;
+  const int height = 24;
+  const int kSizeY = width * height * 2;
+  const int kSizeUV = width * height;
+  uint8_t buffer[kSizeY];
+  memset(buffer, data, kSizeY);
+  frame->CreateFrame(
+      kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
+      width / 2, width / 2);
+  frame->set_timestamp(data);
+  frame->set_ntp_time_ms(data);
+  frame->set_render_time_ms(data);
+  return frame;
+}
+
+}  // namespace webrtc
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
index afb6d0c..40a61de 100644
--- a/webrtc/video_engine/vie_encoder.cc
+++ b/webrtc/video_engine/vie_encoder.cc
@@ -487,6 +487,10 @@
     }
     encoder_paused_and_dropped_frame_ = false;
   }
+  if (video_frame->native_handle() != NULL) {
+    // TODO(wuchengli): add texture support. http://crbug.com/362437
+    return;
+  }
 
   // Convert render time, in ms, to RTP timestamp.
   const int kMsToRtpTimestamp = 90;
diff --git a/webrtc/video_engine/vie_remb_unittest.cc b/webrtc/video_engine/vie_remb_unittest.cc
index cdfe39c..1f0b70c 100644
--- a/webrtc/video_engine/vie_remb_unittest.cc
+++ b/webrtc/video_engine/vie_remb_unittest.cc
@@ -18,35 +18,26 @@
 
 #include "webrtc/modules/rtp_rtcp/interface/rtp_rtcp.h"
 #include "webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
-#include "webrtc/modules/utility/interface/process_thread.h"
+#include "webrtc/modules/utility/interface/mock/mock_process_thread.h"
 #include "webrtc/system_wrappers/interface/scoped_ptr.h"
 #include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/video_engine/vie_remb.h"
 
 using ::testing::_;
 using ::testing::AnyNumber;
+using ::testing::NiceMock;
 using ::testing::Return;
 
 namespace webrtc {
 
-class TestProcessThread : public ProcessThread {
- public:
-  explicit TestProcessThread() {}
-  ~TestProcessThread() {}
-  virtual int32_t Start() { return 0; }
-  virtual int32_t Stop() { return 0; }
-  virtual int32_t RegisterModule(Module* module) { return 0; }
-  virtual int32_t DeRegisterModule(const Module* module) { return 0; }
-};
-
 class ViERembTest : public ::testing::Test {
  protected:
   virtual void SetUp() {
     TickTime::UseFakeClock(12345);
-    process_thread_.reset(new TestProcessThread);
+    process_thread_.reset(new NiceMock<MockProcessThread>);
     vie_remb_.reset(new VieRemb());
   }
-  scoped_ptr<TestProcessThread> process_thread_;
+  scoped_ptr<MockProcessThread> process_thread_;
   scoped_ptr<VieRemb> vie_remb_;
 };