Make the entry point for VideoFrames to webrtc const ref I420VideoFrame.

This removes the none const pointer entry and SwapFrame.

Since frames delivered using VideoSendStream no longer use the external capture module, VideoSendStream will not get an incoming framerate callback. VideoSendStream now uses a rtc::RateTracker.
Also, the video engine must ensure that time stamps are always increasing.

With this, time stamps (ntp, render_time and rtp timestamps ) are checked and set in ViECapturer::OnIncomingCapturedFrame

BUG=1128
R=magjed@webrtc.org, mflodman@webrtc.org, pbos@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/46429004

Cr-Commit-Position: refs/heads/master@{#8633}
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8633 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc
index aaa6f1e..0e8f56b 100644
--- a/talk/media/webrtc/webrtcvideocapturer.cc
+++ b/talk/media/webrtc/webrtcvideocapturer.cc
@@ -353,8 +353,9 @@
   return true;
 }
 
-void WebRtcVideoCapturer::OnIncomingCapturedFrame(const int32_t id,
-    webrtc::I420VideoFrame& sample) {
+void WebRtcVideoCapturer::OnIncomingCapturedFrame(
+    const int32_t id,
+    const webrtc::I420VideoFrame& sample) {
   // This would be a normal CritScope, except that it's possible that:
   // (1) whatever system component producing this frame has taken a lock, and
   // (2) Stop() probably calls back into that system component, which may take
@@ -395,7 +396,7 @@
 }
 
 void WebRtcVideoCapturer::SignalFrameCapturedOnStartThread(
-    webrtc::I420VideoFrame* frame) {
+    const webrtc::I420VideoFrame* frame) {
   DCHECK(start_thread_->IsCurrent());
   // Signal down stream components on captured frame.
   // The CapturedFrame class doesn't support planes. We have to ExtractBuffer
diff --git a/talk/media/webrtc/webrtcvideocapturer.h b/talk/media/webrtc/webrtcvideocapturer.h
index c0f7807..56896f9 100644
--- a/talk/media/webrtc/webrtcvideocapturer.h
+++ b/talk/media/webrtc/webrtcvideocapturer.h
@@ -81,7 +81,7 @@
  private:
   // Callback when a frame is captured by camera.
   virtual void OnIncomingCapturedFrame(const int32_t id,
-                                       webrtc::I420VideoFrame& frame);
+                                       const webrtc::I420VideoFrame& frame);
   virtual void OnCaptureDelayChanged(const int32_t id,
                                      const int32_t delay);
 
@@ -91,7 +91,7 @@
   // directly from OnIncomingCapturedFrame.
   // TODO(tommi): Remove this workaround when we've updated the WebRTC capturers
   // to follow the same contract.
-  void SignalFrameCapturedOnStartThread(webrtc::I420VideoFrame* frame);
+  void SignalFrameCapturedOnStartThread(const webrtc::I420VideoFrame* frame);
 
   rtc::scoped_ptr<WebRtcVcmFactoryInterface> factory_;
   webrtc::VideoCaptureModule* module_;
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index af00928..e6f59d1 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -1412,11 +1412,11 @@
   SetDimensions(
       video_frame_.width(), video_frame_.height(), capturer->IsScreencast());
 
-  LOG(LS_VERBOSE) << "SwapFrame: " << video_frame_.width() << "x"
+  LOG(LS_VERBOSE) << "IncomingCapturedFrame: " << video_frame_.width() << "x"
                   << video_frame_.height() << " -> (codec) "
                   << parameters_.encoder_config.streams.back().width << "x"
                   << parameters_.encoder_config.streams.back().height;
-  stream_->Input()->SwapFrame(&video_frame_);
+  stream_->Input()->IncomingCapturedFrame(video_frame_);
 }
 
 bool WebRtcVideoChannel2::WebRtcVideoSendStream::SetCapturer(
@@ -1436,7 +1436,7 @@
 
         CreateBlackFrame(&black_frame, last_dimensions_.width,
                          last_dimensions_.height);
-        stream_->Input()->SwapFrame(&black_frame);
+        stream_->Input()->IncomingCapturedFrame(black_frame);
       }
 
       capturer_ = NULL;
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 637ecf3..b29fc51 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -139,9 +139,10 @@
   return last_frame_.height();
 }
 
-void FakeVideoSendStream::SwapFrame(webrtc::I420VideoFrame* frame) {
+void FakeVideoSendStream::IncomingCapturedFrame(
+    const webrtc::I420VideoFrame& frame) {
   ++num_swapped_frames_;
-  last_frame_.SwapFrame(frame);
+  last_frame_.ShallowCopy(frame);
 }
 
 void FakeVideoSendStream::SetStats(
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.h b/talk/media/webrtc/webrtcvideoengine2_unittest.h
index 1149949..7703535 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.h
@@ -54,7 +54,7 @@
   void SetStats(const webrtc::VideoSendStream::Stats& stats);
 
  private:
-  void SwapFrame(webrtc::I420VideoFrame* frame) override;
+  void IncomingCapturedFrame(const webrtc::I420VideoFrame& frame) override;
   webrtc::VideoSendStream::Stats GetStats() override;
 
   bool ReconfigureVideoEncoder(
diff --git a/webrtc/common_video/i420_video_frame.cc b/webrtc/common_video/i420_video_frame.cc
index 0afdf10..5b26cbb 100644
--- a/webrtc/common_video/i420_video_frame.cc
+++ b/webrtc/common_video/i420_video_frame.cc
@@ -139,6 +139,14 @@
   return 0;
 }
 
+void I420VideoFrame::ShallowCopy(const I420VideoFrame& videoFrame) {
+  video_frame_buffer_ = videoFrame.video_frame_buffer();
+  timestamp_ = videoFrame.timestamp_;
+  ntp_time_ms_ = videoFrame.ntp_time_ms_;
+  render_time_ms_ = videoFrame.render_time_ms_;
+  rotation_ = videoFrame.rotation_;
+}
+
 I420VideoFrame* I420VideoFrame::CloneFrame() const {
   rtc::scoped_ptr<I420VideoFrame> new_frame(new I420VideoFrame());
   if (new_frame->CopyFrame(*this) == -1) {
diff --git a/webrtc/common_video/i420_video_frame_unittest.cc b/webrtc/common_video/i420_video_frame_unittest.cc
index 013382e..e02dded 100644
--- a/webrtc/common_video/i420_video_frame_unittest.cc
+++ b/webrtc/common_video/i420_video_frame_unittest.cc
@@ -130,6 +130,63 @@
   EXPECT_TRUE(EqualFrames(small_frame, big_frame));
 }
 
+TEST(TestI420VideoFrame, ShallowCopy) {
+  uint32_t timestamp = 1;
+  int64_t ntp_time_ms = 2;
+  int64_t render_time_ms = 3;
+  int stride_y = 15;
+  int stride_u = 10;
+  int stride_v = 10;
+  int width = 15;
+  int height = 15;
+
+  const int kSizeY = 400;
+  const int kSizeU = 100;
+  const int kSizeV = 100;
+  const VideoRotation kRotation = kVideoRotation_270;
+  uint8_t buffer_y[kSizeY];
+  uint8_t buffer_u[kSizeU];
+  uint8_t buffer_v[kSizeV];
+  memset(buffer_y, 16, kSizeY);
+  memset(buffer_u, 8, kSizeU);
+  memset(buffer_v, 4, kSizeV);
+  I420VideoFrame frame1;
+  EXPECT_EQ(0, frame1.CreateFrame(kSizeY, buffer_y, kSizeU, buffer_u, kSizeV,
+                                  buffer_v, width, height, stride_y, stride_u,
+                                  stride_v, kRotation));
+  frame1.set_timestamp(timestamp);
+  frame1.set_ntp_time_ms(ntp_time_ms);
+  frame1.set_render_time_ms(render_time_ms);
+  I420VideoFrame frame2;
+  frame2.ShallowCopy(frame1);
+
+  // To be able to access the buffers, we need const pointers to the frames.
+  const I420VideoFrame* const_frame1_ptr = &frame1;
+  const I420VideoFrame* const_frame2_ptr = &frame2;
+
+  EXPECT_TRUE(const_frame1_ptr->buffer(kYPlane) ==
+              const_frame2_ptr->buffer(kYPlane));
+  EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) ==
+              const_frame2_ptr->buffer(kUPlane));
+  EXPECT_TRUE(const_frame1_ptr->buffer(kVPlane) ==
+              const_frame2_ptr->buffer(kVPlane));
+
+  EXPECT_EQ(frame2.timestamp(), frame1.timestamp());
+  EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms());
+  EXPECT_EQ(frame2.render_time_ms(), frame1.render_time_ms());
+  EXPECT_EQ(frame2.rotation(), frame1.rotation());
+
+  frame2.set_timestamp(timestamp + 1);
+  frame2.set_ntp_time_ms(ntp_time_ms + 1);
+  frame2.set_render_time_ms(render_time_ms + 1);
+  frame2.set_rotation(kVideoRotation_90);
+
+  EXPECT_NE(frame2.timestamp(), frame1.timestamp());
+  EXPECT_NE(frame2.ntp_time_ms(), frame1.ntp_time_ms());
+  EXPECT_NE(frame2.render_time_ms(), frame1.render_time_ms());
+  EXPECT_NE(frame2.rotation(), frame1.rotation());
+}
+
 TEST(TestI420VideoFrame, CloneFrame) {
   I420VideoFrame frame1;
   rtc::scoped_ptr<I420VideoFrame> frame2;
diff --git a/webrtc/modules/video_capture/include/video_capture_defines.h b/webrtc/modules/video_capture/include/video_capture_defines.h
index 93a03f3..63a5b7a 100644
--- a/webrtc/modules/video_capture/include/video_capture_defines.h
+++ b/webrtc/modules/video_capture/include/video_capture_defines.h
@@ -86,9 +86,6 @@
                                   size_t videoFrameLength,
                                   const VideoCaptureCapability& frameInfo,
                                   int64_t captureTime = 0) = 0;
-    virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame,
-                                           int64_t captureTime = 0) = 0;
-
 protected:
     ~VideoCaptureExternal() {}
 };
@@ -98,7 +95,7 @@
 {
 public:
     virtual void OnIncomingCapturedFrame(const int32_t id,
-                                         I420VideoFrame& videoFrame) = 0;
+                                         const I420VideoFrame& videoFrame) = 0;
     virtual void OnCaptureDelayChanged(const int32_t id,
                                        const int32_t delay) = 0;
 protected:
diff --git a/webrtc/modules/video_capture/test/video_capture_unittest.cc b/webrtc/modules/video_capture/test/video_capture_unittest.cc
index 04a93a8..2470b2d 100644
--- a/webrtc/modules/video_capture/test/video_capture_unittest.cc
+++ b/webrtc/modules/video_capture/test/video_capture_unittest.cc
@@ -104,8 +104,9 @@
       printf("No of timing warnings %d\n", timing_warnings_);
   }
 
-  virtual void OnIncomingCapturedFrame(const int32_t id,
-                                       webrtc::I420VideoFrame& videoFrame) {
+  virtual void OnIncomingCapturedFrame(
+      const int32_t id,
+      const webrtc::I420VideoFrame& videoFrame) {
     CriticalSectionScoped cs(capture_cs_.get());
     int height = videoFrame.height();
     int width = videoFrame.width();
@@ -479,76 +480,6 @@
   EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
 }
 
-// Test input of planar I420 frames.
-// NOTE: flaky, sometimes fails on the last CompareLastFrame.
-// http://code.google.com/p/webrtc/issues/detail?id=777
-TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
-  webrtc::I420VideoFrame frame_i420;
-  frame_i420.CopyFrame(test_frame_);
-
-  EXPECT_EQ(0,
-            capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0));
-  EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420));
-
-  // Test with a frame with pitch not equal to width
-  memset(test_frame_.buffer(webrtc::kYPlane), 0xAA,
-         test_frame_.allocated_size(webrtc::kYPlane));
-  memset(test_frame_.buffer(webrtc::kUPlane), 0xAA,
-         test_frame_.allocated_size(webrtc::kUPlane));
-  memset(test_frame_.buffer(webrtc::kVPlane), 0xAA,
-         test_frame_.allocated_size(webrtc::kVPlane));
-  webrtc::I420VideoFrame aligned_test_frame;
-  int y_pitch = kTestWidth + 2;
-  int u_pitch = kTestWidth / 2 + 1;
-  int v_pitch = u_pitch;
-  aligned_test_frame.CreateEmptyFrame(kTestWidth, kTestHeight,
-                                      y_pitch, u_pitch, v_pitch);
-  memset(aligned_test_frame.buffer(webrtc::kYPlane), 0,
-         kTestWidth * kTestHeight);
-  memset(aligned_test_frame.buffer(webrtc::kUPlane), 0,
-         (kTestWidth + 1) / 2  * (kTestHeight + 1) / 2);
-  memset(aligned_test_frame.buffer(webrtc::kVPlane), 0,
-         (kTestWidth + 1) / 2  * (kTestHeight + 1) / 2);
-  // Copy the test_frame_ to aligned_test_frame.
-  int y_width = kTestWidth;
-  int uv_width = kTestWidth / 2;
-  int y_rows = kTestHeight;
-  int uv_rows = kTestHeight / 2;
-  const webrtc::I420VideoFrame& const_test_frame = test_frame_;
-  const unsigned char* y_plane = const_test_frame.buffer(webrtc::kYPlane);
-  const unsigned char* u_plane = const_test_frame.buffer(webrtc::kUPlane);
-  const unsigned char* v_plane = const_test_frame.buffer(webrtc::kVPlane);
-  // Copy Y
-  unsigned char* current_pointer = aligned_test_frame.buffer(webrtc::kYPlane);
-  for (int i = 0; i < y_rows; ++i) {
-    memcpy(current_pointer, y_plane, y_width);
-    // Remove the alignment which ViE doesn't support.
-    current_pointer += y_pitch;
-    y_plane += y_width;
-  }
-  // Copy U
-  current_pointer = aligned_test_frame.buffer(webrtc::kUPlane);
-  for (int i = 0; i < uv_rows; ++i) {
-    memcpy(current_pointer, u_plane, uv_width);
-    // Remove the alignment which ViE doesn't support.
-    current_pointer += u_pitch;
-    u_plane += uv_width;
-  }
-  // Copy V
-  current_pointer = aligned_test_frame.buffer(webrtc::kVPlane);
-  for (int i = 0; i < uv_rows; ++i) {
-    memcpy(current_pointer, v_plane, uv_width);
-    // Remove the alignment which ViE doesn't support.
-    current_pointer += v_pitch;
-    v_plane += uv_width;
-  }
-  frame_i420.CopyFrame(aligned_test_frame);
-
-  EXPECT_EQ(0,
-            capture_input_interface_->IncomingI420VideoFrame(&frame_i420, 0));
-  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
-}
-
 // Test frame rate and no picture alarm.
 // Flaky on Win32, see webrtc:3270.
 TEST_F(VideoCaptureExternalTest, DISABLED_ON_WIN(FrameRate)) {
diff --git a/webrtc/modules/video_capture/video_capture_impl.cc b/webrtc/modules/video_capture/video_capture_impl.cc
index 1b780c4..7202b69 100644
--- a/webrtc/modules/video_capture/video_capture_impl.cc
+++ b/webrtc/modules/video_capture/video_capture_impl.cc
@@ -173,10 +173,6 @@
       _captureCallBack(NULL),
       _lastProcessFrameCount(TickTime::Now()),
       _rotateFrame(kRotateNone),
-      last_capture_time_(0),
-      delta_ntp_internal_ms_(
-          Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
-          TickTime::MillisecondTimestamp()),
       apply_rotation_(true) {
     _requestedCapability.width = kDefaultWidth;
     _requestedCapability.height = kDefaultHeight;
@@ -231,8 +227,7 @@
     return _setCaptureDelay;
 }
 
-int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame,
-                                               int64_t capture_time) {
+int32_t VideoCaptureImpl::DeliverCapturedFrame(I420VideoFrame& captureFrame) {
   UpdateFrameCount();  // frame count used for local frame rate callback.
 
   const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
@@ -241,19 +236,6 @@
       _setCaptureDelay = _captureDelay;
   }
 
-  // Set the capture time
-  if (capture_time != 0) {
-    captureFrame.set_render_time_ms(capture_time - delta_ntp_internal_ms_);
-  } else {
-    captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
-  }
-
-  if (captureFrame.render_time_ms() == last_capture_time_) {
-    // We don't allow the same capture time for two frames, drop this one.
-    return -1;
-  }
-  last_capture_time_ = captureFrame.render_time_ms();
-
   if (_dataCallBack) {
     if (callOnCaptureDelayChanged) {
       _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
@@ -337,8 +319,10 @@
         } else {
           _captureFrame.set_rotation(kVideoRotation_0);
         }
+        _captureFrame.set_ntp_time_ms(captureTime);
+        _captureFrame.set_render_time_ms(TickTime::MillisecondTimestamp());
 
-        DeliverCapturedFrame(_captureFrame, captureTime);
+        DeliverCapturedFrame(_captureFrame);
     }
     else // Encoded format
     {
@@ -349,16 +333,6 @@
     return 0;
 }
 
-int32_t VideoCaptureImpl::IncomingI420VideoFrame(I420VideoFrame* video_frame,
-                                                 int64_t captureTime) {
-
-  CriticalSectionScoped cs(&_apiCs);
-  CriticalSectionScoped cs2(&_callBackCs);
-  DeliverCapturedFrame(*video_frame, captureTime);
-
-  return 0;
-}
-
 int32_t VideoCaptureImpl::SetCaptureRotation(VideoRotation rotation) {
   CriticalSectionScoped cs(&_apiCs);
   CriticalSectionScoped cs2(&_callBackCs);
diff --git a/webrtc/modules/video_capture/video_capture_impl.h b/webrtc/modules/video_capture/video_capture_impl.h
index 01493bc..a4ac680 100644
--- a/webrtc/modules/video_capture/video_capture_impl.h
+++ b/webrtc/modules/video_capture/video_capture_impl.h
@@ -81,15 +81,12 @@
     virtual int32_t Process();
 
     // Implement VideoCaptureExternal
-    // |capture_time| must be specified in the NTP time format in milliseconds.
+    // |capture_time| must be specified in NTP time format in milliseconds.
     virtual int32_t IncomingFrame(uint8_t* videoFrame,
                                   size_t videoFrameLength,
                                   const VideoCaptureCapability& frameInfo,
                                   int64_t captureTime = 0);
 
-    virtual int32_t IncomingI420VideoFrame(I420VideoFrame* video_frame,
-                                           int64_t captureTime = 0);
-
     // Platform dependent
     virtual int32_t StartCapture(const VideoCaptureCapability& capability)
     {
@@ -106,8 +103,7 @@
 protected:
     VideoCaptureImpl(const int32_t id);
     virtual ~VideoCaptureImpl();
-    int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame,
-                                 int64_t capture_time);
+    int32_t DeliverCapturedFrame(I420VideoFrame& captureFrame);
 
     int32_t _id; // Module ID
     char* _deviceUniqueId; // current Device unique name;
@@ -136,12 +132,6 @@
 
     I420VideoFrame _captureFrame;
 
-    // Used to make sure incoming timestamp is increasing for every frame.
-    int64_t last_capture_time_;
-
-    // Delta used for translating between NTP and internal timestamps.
-    const int64_t delta_ntp_internal_ms_;
-
     // Indicate whether rotation should be applied before delivered externally.
     bool apply_rotation_;
 };
diff --git a/webrtc/test/frame_generator_capturer.cc b/webrtc/test/frame_generator_capturer.cc
index 721c29a..f78a597 100644
--- a/webrtc/test/frame_generator_capturer.cc
+++ b/webrtc/test/frame_generator_capturer.cc
@@ -114,11 +114,11 @@
     CriticalSectionScoped cs(lock_.get());
     if (sending_) {
       I420VideoFrame* frame = frame_generator_->NextFrame();
-      frame->set_render_time_ms(clock_->CurrentNtpInMilliseconds());
+      frame->set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
       if (first_frame_capture_time_ == -1) {
-        first_frame_capture_time_ = frame->render_time_ms();
+        first_frame_capture_time_ = frame->ntp_time_ms();
       }
-      input_->SwapFrame(frame);
+      input_->IncomingCapturedFrame(*frame);
     }
   }
   tick_->Wait(WEBRTC_EVENT_INFINITE);
diff --git a/webrtc/test/vcm_capturer.cc b/webrtc/test/vcm_capturer.cc
index a5820bf..f9976d2 100644
--- a/webrtc/test/vcm_capturer.cc
+++ b/webrtc/test/vcm_capturer.cc
@@ -87,9 +87,9 @@
 VcmCapturer::~VcmCapturer() { Destroy(); }
 
 void VcmCapturer::OnIncomingCapturedFrame(const int32_t id,
-                                          I420VideoFrame& frame) {
+                                          const I420VideoFrame& frame) {
   if (started_)
-    input_->SwapFrame(&frame);
+    input_->IncomingCapturedFrame(frame);
 }
 
 void VcmCapturer::OnCaptureDelayChanged(const int32_t id, const int32_t delay) {
diff --git a/webrtc/test/vcm_capturer.h b/webrtc/test/vcm_capturer.h
index 1cb5b4e..c73eeb1 100644
--- a/webrtc/test/vcm_capturer.h
+++ b/webrtc/test/vcm_capturer.h
@@ -28,7 +28,7 @@
   void Stop() override;
 
   void OnIncomingCapturedFrame(const int32_t id,
-                               I420VideoFrame& frame) override;  // NOLINT
+                               const I420VideoFrame& frame) override;  // NOLINT
   void OnCaptureDelayChanged(const int32_t id, const int32_t delay) override;
 
  private:
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index a8f6f09..c3f3fd5 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -170,7 +170,7 @@
   // check that the callbacks are done after processing video.
   rtc::scoped_ptr<test::FrameGenerator> frame_generator(
       test::FrameGenerator::CreateChromaGenerator(kWidth, kHeight));
-  send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
+  send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
   EXPECT_EQ(kEventSignaled, pre_render_callback.Wait())
       << "Timed out while waiting for pre-render callback.";
   EXPECT_EQ(kEventSignaled, renderer.Wait())
@@ -218,7 +218,7 @@
   rtc::scoped_ptr<test::FrameGenerator> frame_generator(
       test::FrameGenerator::CreateChromaGenerator(
           encoder_config_.streams[0].width, encoder_config_.streams[0].height));
-  send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
+  send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
 
   EXPECT_EQ(kEventSignaled, renderer.Wait())
       << "Timed out while waiting for the frame to render.";
@@ -833,7 +833,7 @@
   // check that the callbacks are done after processing video.
   rtc::scoped_ptr<test::FrameGenerator> frame_generator(
       test::FrameGenerator::CreateChromaGenerator(kWidth / 2, kHeight / 2));
-  send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
+  send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
 
   EXPECT_EQ(kEventSignaled, pre_encode_callback.Wait())
       << "Timed out while waiting for pre-encode callback.";
@@ -1263,7 +1263,7 @@
   rtc::scoped_ptr<test::FrameGenerator> frame_generator(
       test::FrameGenerator::CreateChromaGenerator(
           encoder_config_.streams[0].width, encoder_config_.streams[0].height));
-  send_stream_->Input()->SwapFrame(frame_generator->NextFrame());
+  send_stream_->Input()->IncomingCapturedFrame(*frame_generator->NextFrame());
 
   EXPECT_EQ(kEventSignaled, post_encode_observer.Wait())
       << "Timed out while waiting for send-side encoded-frame callback.";
diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc
index 033bf4f..5dfd8e5 100644
--- a/webrtc/video/full_stack.cc
+++ b/webrtc/video/full_stack.cc
@@ -144,7 +144,7 @@
     return receiver_->DeliverPacket(packet, length);
   }
 
-  void SwapFrame(I420VideoFrame* video_frame) override {
+  void IncomingCapturedFrame(const I420VideoFrame& video_frame) override {
     I420VideoFrame* copy = NULL;
     {
       CriticalSectionScoped lock(crit_.get());
@@ -156,8 +156,8 @@
     if (copy == NULL)
       copy = new I420VideoFrame();
 
-    copy->CopyFrame(*video_frame);
-    copy->set_timestamp(copy->render_time_ms() * 90);
+    copy->CopyFrame(video_frame);
+    copy->set_timestamp(copy->ntp_time_ms() * 90);
 
     {
       CriticalSectionScoped lock(crit_.get());
@@ -167,7 +167,7 @@
       frames_.push_back(copy);
     }
 
-    input_->SwapFrame(video_frame);
+    input_->IncomingCapturedFrame(video_frame);
   }
 
   bool SendRtp(const uint8_t* packet, size_t length) override {
diff --git a/webrtc/video/send_statistics_proxy.cc b/webrtc/video/send_statistics_proxy.cc
index 33255f9..594cde5 100644
--- a/webrtc/video/send_statistics_proxy.cc
+++ b/webrtc/video/send_statistics_proxy.cc
@@ -50,15 +50,11 @@
   stats_.suspended = is_suspended;
 }
 
-void SendStatisticsProxy::CapturedFrameRate(const int capture_id,
-                                            const unsigned char frame_rate) {
-  CriticalSectionScoped lock(crit_.get());
-  stats_.input_frame_rate = frame_rate;
-}
-
 VideoSendStream::Stats SendStatisticsProxy::GetStats() {
   CriticalSectionScoped lock(crit_.get());
   PurgeOldStats();
+  stats_.input_frame_rate =
+      static_cast<int>(input_frame_rate_tracker_.units_second());
   return stats_;
 }
 
@@ -122,6 +118,11 @@
   update_times_[ssrc].resolution_update_ms = clock_->TimeInMilliseconds();
 }
 
+void SendStatisticsProxy::OnIncomingFrame() {
+  CriticalSectionScoped lock(crit_.get());
+  input_frame_rate_tracker_.Update(1);
+}
+
 void SendStatisticsProxy::RtcpPacketTypesCounterUpdated(
     uint32_t ssrc,
     const RtcpPacketTypeCounter& packet_counter) {
diff --git a/webrtc/video/send_statistics_proxy.h b/webrtc/video/send_statistics_proxy.h
index a60d495..0a048a5 100644
--- a/webrtc/video/send_statistics_proxy.h
+++ b/webrtc/video/send_statistics_proxy.h
@@ -13,6 +13,7 @@
 
 #include <string>
 
+#include "webrtc/base/ratetracker.h"
 #include "webrtc/base/scoped_ptr.h"
 #include "webrtc/base/thread_annotations.h"
 #include "webrtc/common_types.h"
@@ -35,7 +36,6 @@
                             public BitrateStatisticsObserver,
                             public FrameCountObserver,
                             public ViEEncoderObserver,
-                            public ViECaptureObserver,
                             public VideoEncoderRateObserver,
                             public SendSideDelayObserver {
  public:
@@ -48,6 +48,8 @@
 
   virtual void OnSendEncodedImage(const EncodedImage& encoded_image,
                                   const RTPVideoHeader* rtp_video_header);
+  // Used to update incoming frame rate.
+  void OnIncomingFrame();
 
   // From VideoEncoderRateObserver.
   void OnSetRates(uint32_t bitrate_bps, int framerate) override;
@@ -83,16 +85,6 @@
 
   void SuspendChange(int video_channel, bool is_suspended) override;
 
-  // From ViECaptureObserver.
-  void BrightnessAlarm(const int capture_id,
-                       const Brightness brightness) override {}
-
-  void CapturedFrameRate(const int capture_id,
-                         const unsigned char frame_rate) override;
-
-  void NoPictureAlarm(const int capture_id, const CaptureAlarm alarm) override {
-  }
-
   void SendSideDelayUpdated(int avg_delay_ms,
                             int max_delay_ms,
                             uint32_t ssrc) override;
@@ -110,6 +102,7 @@
   const VideoSendStream::Config config_;
   rtc::scoped_ptr<CriticalSectionWrapper> crit_;
   VideoSendStream::Stats stats_ GUARDED_BY(crit_);
+  rtc::RateTracker input_frame_rate_tracker_ GUARDED_BY(crit_);
   std::map<uint32_t, StatsUpdateTimes> update_times_ GUARDED_BY(crit_);
 };
 
diff --git a/webrtc/video/send_statistics_proxy_unittest.cc b/webrtc/video/send_statistics_proxy_unittest.cc
index c2ebf5f..0243add 100644
--- a/webrtc/video/send_statistics_proxy_unittest.cc
+++ b/webrtc/video/send_statistics_proxy_unittest.cc
@@ -128,16 +128,6 @@
   ExpectEqual(expected_, stats);
 }
 
-TEST_F(SendStatisticsProxyTest, CaptureFramerate) {
-  const int capture_fps = 31;
-
-  ViECaptureObserver* capture_observer = statistics_proxy_.get();
-  capture_observer->CapturedFrameRate(0, capture_fps);
-
-  VideoSendStream::Stats stats = statistics_proxy_->GetStats();
-  EXPECT_EQ(capture_fps, stats.input_frame_rate);
-}
-
 TEST_F(SendStatisticsProxyTest, EncodedBitrateAndFramerate) {
   const int media_bitrate_bps = 500;
   const int encode_fps = 29;
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index 881802a..b21b713 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -237,7 +237,6 @@
   rtp_rtcp_->RegisterSendFrameCountObserver(channel_, &stats_proxy_);
 
   codec_->RegisterEncoderObserver(channel_, stats_proxy_);
-  capture_->RegisterObserver(capture_id_, stats_proxy_);
 }
 
 VideoSendStream::~VideoSendStream() {
@@ -274,12 +273,13 @@
   rtp_rtcp_->Release();
 }
 
-void VideoSendStream::SwapFrame(I420VideoFrame* frame) {
+void VideoSendStream::IncomingCapturedFrame(const I420VideoFrame& frame) {
   // TODO(pbos): Local rendering should not be done on the capture thread.
   if (config_.local_renderer != NULL)
-    config_.local_renderer->RenderFrame(*frame, 0);
+    config_.local_renderer->RenderFrame(frame, 0);
 
-  external_capture_->SwapFrame(frame);
+  stats_proxy_.OnIncomingFrame();
+  external_capture_->IncomingFrame(frame);
 }
 
 VideoSendStreamInput* VideoSendStream::Input() { return this; }
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index 648a64e..898b810 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -63,7 +63,7 @@
   bool DeliverRtcp(const uint8_t* packet, size_t length);
 
   // From VideoSendStreamInput.
-  void SwapFrame(I420VideoFrame* frame) override;
+  void IncomingCapturedFrame(const I420VideoFrame& frame) override;
 
   // From webrtc::VideoSendStream.
   VideoSendStreamInput* Input() override;
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index e21474d..72fab4b 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -266,35 +266,6 @@
   StatisticianMap stats_map_;
 };
 
-TEST_F(VideoSendStreamTest, SwapsI420VideoFrames) {
-  static const size_t kWidth = 320;
-  static const size_t kHeight = 240;
-
-  test::NullTransport transport;
-  Call::Config call_config(&transport);
-  CreateSenderCall(call_config);
-
-  CreateSendConfig(1);
-  CreateStreams();
-  send_stream_->Start();
-
-  I420VideoFrame frame;
-  const int stride_uv = (kWidth + 1) / 2;
-  frame.CreateEmptyFrame(kWidth, kHeight, kWidth, stride_uv, stride_uv);
-  uint8_t* old_y_buffer = frame.buffer(kYPlane);
-  // Initialize memory to avoid DrMemory errors.
-  const int half_height = (kHeight + 1) / 2;
-  memset(frame.buffer(kYPlane), 0, kWidth * kHeight);
-  memset(frame.buffer(kUPlane), 0, stride_uv * half_height);
-  memset(frame.buffer(kVPlane), 0, stride_uv * half_height);
-
-  send_stream_->Input()->SwapFrame(&frame);
-
-  EXPECT_NE(frame.buffer(kYPlane), old_y_buffer);
-
-  DestroyStreams();
-}
-
 TEST_F(VideoSendStreamTest, SupportsFec) {
   class FecObserver : public test::SendTest {
    public:
@@ -1044,15 +1015,13 @@
       new webrtc::RefCountImpl<FakeNativeHandle>();
   input_frames.push_back(new I420VideoFrame(handle1, width, height, 1, 1));
   input_frames.push_back(new I420VideoFrame(handle2, width, height, 2, 2));
-  input_frames.push_back(CreateI420VideoFrame(width, height, 1));
-  input_frames.push_back(CreateI420VideoFrame(width, height, 2));
-  input_frames.push_back(new I420VideoFrame(handle3, width, height, 3, 3));
+  input_frames.push_back(CreateI420VideoFrame(width, height, 3));
+  input_frames.push_back(CreateI420VideoFrame(width, height, 4));
+  input_frames.push_back(new I420VideoFrame(handle3, width, height, 5, 5));
 
   send_stream_->Start();
   for (size_t i = 0; i < input_frames.size(); i++) {
-    // Make a copy of the input frame because the buffer will be swapped.
-    rtc::scoped_ptr<I420VideoFrame> frame(input_frames[i]->CloneFrame());
-    send_stream_->Input()->SwapFrame(frame.get());
+    send_stream_->Input()->IncomingCapturedFrame(*input_frames[i]);
     // Do not send the next frame too fast, so the frame dropper won't drop it.
     if (i < input_frames.size() - 1)
       SleepMs(1000 / encoder_config_.streams[0].max_framerate);
@@ -1082,6 +1051,7 @@
   EXPECT_EQ(frame1.native_handle(), frame2.native_handle());
   EXPECT_EQ(frame1.width(), frame2.width());
   EXPECT_EQ(frame1.height(), frame2.height());
+  EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms());
 }
 
 void ExpectEqualBufferFrames(const I420VideoFrame& frame1,
@@ -1091,7 +1061,7 @@
   EXPECT_EQ(frame1.stride(kYPlane), frame2.stride(kYPlane));
   EXPECT_EQ(frame1.stride(kUPlane), frame2.stride(kUPlane));
   EXPECT_EQ(frame1.stride(kVPlane), frame2.stride(kVPlane));
-  EXPECT_EQ(frame1.ntp_time_ms(), frame2.ntp_time_ms());
+  EXPECT_EQ(frame1.render_time_ms(), frame2.render_time_ms());
   ASSERT_EQ(frame1.allocated_size(kYPlane), frame2.allocated_size(kYPlane));
   EXPECT_EQ(0,
             memcmp(frame1.buffer(kYPlane),
@@ -1134,7 +1104,6 @@
                      width / 2,
                      width / 2);
   frame->set_timestamp(data);
-  frame->set_ntp_time_ms(data);
   frame->set_render_time_ms(data);
   return frame;
 }
diff --git a/webrtc/video_engine/include/vie_capture.h b/webrtc/video_engine/include/vie_capture.h
index caaeace..537e596 100644
--- a/webrtc/video_engine/include/vie_capture.h
+++ b/webrtc/video_engine/include/vie_capture.h
@@ -113,7 +113,7 @@
       const ViEVideoFrameI420& video_frame,
       unsigned long long capture_time = 0) = 0;
 
-  virtual void SwapFrame(I420VideoFrame* frame) {}
+  virtual void IncomingFrame(const I420VideoFrame& frame) {}
 };
 
 // This class declares an abstract interface for a user defined observer. It is
diff --git a/webrtc/video_engine/vie_capturer.cc b/webrtc/video_engine/vie_capturer.cc
index 4f88237..ce3b715 100644
--- a/webrtc/video_engine/vie_capturer.cc
+++ b/webrtc/video_engine/vie_capturer.cc
@@ -21,6 +21,7 @@
 #include "webrtc/system_wrappers/interface/event_wrapper.h"
 #include "webrtc/system_wrappers/interface/logging.h"
 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
+#include "webrtc/system_wrappers/interface/tick_util.h"
 #include "webrtc/system_wrappers/interface/trace_event.h"
 #include "webrtc/video_engine/include/vie_image_process.h"
 #include "webrtc/video_engine/overuse_frame_detector.h"
@@ -78,6 +79,10 @@
       capture_event_(*EventWrapper::Create()),
       deliver_event_(*EventWrapper::Create()),
       stop_(0),
+      last_captured_timestamp_(0),
+      delta_ntp_internal_ms_(
+          Clock::GetRealTimeClock()->CurrentNtpInMilliseconds() -
+          TickTime::MillisecondTimestamp()),
       effect_filter_(NULL),
       image_proc_module_(NULL),
       image_proc_module_ref_counter_(0),
@@ -310,10 +315,6 @@
 
 int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
                                    unsigned long long capture_time) {  // NOLINT
-  if (!external_capture_module_) {
-    return -1;
-  }
-
   int size_y = video_frame.height * video_frame.y_pitch;
   int size_u = video_frame.u_pitch * ((video_frame.height + 1) / 2);
   int size_v = video_frame.v_pitch * ((video_frame.height + 1) / 2);
@@ -329,46 +330,61 @@
                                        video_frame.y_pitch,
                                        video_frame.u_pitch,
                                        video_frame.v_pitch);
-
   if (ret < 0) {
     LOG_F(LS_ERROR) << "Could not create I420Frame.";
     return -1;
   }
+  incoming_frame_.set_ntp_time_ms(capture_time);
 
-  return external_capture_module_->IncomingI420VideoFrame(&incoming_frame_,
-                                                          capture_time);
+  OnIncomingCapturedFrame(-1, incoming_frame_);
+  return 0;
 }
 
-void ViECapturer::SwapFrame(I420VideoFrame* frame) {
-  external_capture_module_->IncomingI420VideoFrame(frame,
-                                                   frame->render_time_ms());
-  frame->set_timestamp(0);
-  frame->set_ntp_time_ms(0);
-  frame->set_render_time_ms(0);
+void ViECapturer::IncomingFrame(const I420VideoFrame& frame) {
+  OnIncomingCapturedFrame(-1, frame);
 }
 
 void ViECapturer::OnIncomingCapturedFrame(const int32_t capture_id,
-                                          I420VideoFrame& video_frame) {
+                                          const I420VideoFrame& video_frame) {
   CriticalSectionScoped cs(capture_cs_.get());
-  // Make sure we render this frame earlier since we know the render time set
-  // is slightly off since it's being set when the frame has been received from
-  // the camera, and not when the camera actually captured the frame.
-  video_frame.set_render_time_ms(video_frame.render_time_ms() - FrameDelay());
+  captured_frame_.reset(new I420VideoFrame());
+  captured_frame_->ShallowCopy(video_frame);
 
-  overuse_detector_->FrameCaptured(video_frame.width(),
-                                   video_frame.height(),
-                                   video_frame.render_time_ms());
+  if (captured_frame_->ntp_time_ms() != 0) {
+    // If a ntp time stamp is set, this is the time stamp we will use.
+    captured_frame_->set_render_time_ms(
+        captured_frame_->ntp_time_ms() - delta_ntp_internal_ms_);
+  } else {  // ntp time stamp not set.
+    int64_t render_time = captured_frame_->render_time_ms() != 0 ?
+        captured_frame_->render_time_ms() : TickTime::MillisecondTimestamp();
+
+    // Make sure we render this frame earlier since we know the render time set
+    // is slightly off since it's being set when the frame was received
+    // from the camera, and not when the camera actually captured the frame.
+    render_time -= FrameDelay();
+    captured_frame_->set_render_time_ms(render_time);
+    captured_frame_->set_ntp_time_ms(
+        render_time + delta_ntp_internal_ms_);
+  }
+
+  if (captured_frame_->ntp_time_ms() <= last_captured_timestamp_) {
+    // We don't allow the same capture time for two frames, drop this one.
+    return;
+  }
+  last_captured_timestamp_ = captured_frame_->ntp_time_ms();
+
+  // Convert ntp time, in ms, to RTP timestamp.
+  const int kMsToRtpTimestamp = 90;
+  captured_frame_->set_timestamp(kMsToRtpTimestamp *
+      static_cast<uint32_t>(captured_frame_->ntp_time_ms()));
+
+  overuse_detector_->FrameCaptured(captured_frame_->width(),
+                                   captured_frame_->height(),
+                                   captured_frame_->render_time_ms());
 
   TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", video_frame.render_time_ms(),
                            "render_time", video_frame.render_time_ms());
 
-  if (video_frame.native_handle() != NULL) {
-    captured_frame_.reset(video_frame.CloneFrame());
-  } else {
-    if (captured_frame_ == NULL || captured_frame_->native_handle() != NULL)
-      captured_frame_.reset(new I420VideoFrame());
-    captured_frame_->SwapFrame(&video_frame);
-  }
   capture_event_.Set();
 }
 
diff --git a/webrtc/video_engine/vie_capturer.h b/webrtc/video_engine/vie_capturer.h
index 9f077e1..79a305e 100644
--- a/webrtc/video_engine/vie_capturer.h
+++ b/webrtc/video_engine/vie_capturer.h
@@ -69,17 +69,17 @@
   int FrameCallbackChanged();
 
   // Implements ExternalCapture.
-  virtual int IncomingFrame(unsigned char* video_frame,
-                            size_t video_frame_length,
-                            uint16_t width,
-                            uint16_t height,
-                            RawVideoType video_type,
-                            unsigned long long capture_time = 0);  // NOLINT
+  int IncomingFrame(unsigned char* video_frame,
+                    size_t video_frame_length,
+                    uint16_t width,
+                    uint16_t height,
+                    RawVideoType video_type,
+                    unsigned long long capture_time = 0) override;
 
-  virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
-                                unsigned long long capture_time = 0);  // NOLINT
+  int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
+                        unsigned long long capture_time = 0) override;
 
-  void SwapFrame(I420VideoFrame* frame) override;
+  void IncomingFrame(const I420VideoFrame& frame) override;
 
   // Start/Stop.
   int32_t Start(
@@ -123,7 +123,7 @@
 
   // Implements VideoCaptureDataCallback.
   virtual void OnIncomingCapturedFrame(const int32_t id,
-                                       I420VideoFrame& video_frame);
+                                       const I420VideoFrame& video_frame);
   virtual void OnCaptureDelayChanged(const int32_t id,
                                      const int32_t delay);
 
@@ -172,6 +172,10 @@
   volatile int stop_;
 
   rtc::scoped_ptr<I420VideoFrame> captured_frame_;
+  // Used to make sure incoming time stamp is increasing for every frame.
+  int64_t last_captured_timestamp_;
+  // Delta used for translating between NTP and internal timestamps.
+  const int64_t delta_ntp_internal_ms_;
   rtc::scoped_ptr<I420VideoFrame> deliver_frame_;
 
   // Image processing.
diff --git a/webrtc/video_engine/vie_capturer_unittest.cc b/webrtc/video_engine/vie_capturer_unittest.cc
index fa3a9d2..4ca95cb 100644
--- a/webrtc/video_engine/vie_capturer_unittest.cc
+++ b/webrtc/video_engine/vie_capturer_unittest.cc
@@ -129,13 +129,37 @@
   std::vector<const uint8_t*> output_frame_ybuffers_;
 };
 
+TEST_F(ViECapturerTest, TestNtpTimeStampSetIfRenderTimeSet) {
+  input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(0)));
+  input_frames_[0]->set_render_time_ms(5);
+  input_frames_[0]->set_ntp_time_ms(0);
+
+  AddInputFrame(input_frames_[0]);
+  WaitOutputFrame();
+  EXPECT_GT(output_frames_[0]->ntp_time_ms(),
+            input_frames_[0]->render_time_ms());
+}
+
+TEST_F(ViECapturerTest, TestRtpTimeStampSet) {
+  input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(0)));
+  input_frames_[0]->set_render_time_ms(0);
+  input_frames_[0]->set_ntp_time_ms(1);
+  input_frames_[0]->set_timestamp(0);
+
+  AddInputFrame(input_frames_[0]);
+  WaitOutputFrame();
+  EXPECT_EQ(output_frames_[0]->timestamp(),
+            input_frames_[0]->ntp_time_ms() * 90);
+}
+
 TEST_F(ViECapturerTest, TestTextureFrames) {
   const int kNumFrame = 3;
   for (int i = 0 ; i < kNumFrame; ++i) {
     webrtc::RefCountImpl<FakeNativeHandle>* handle =
               new webrtc::RefCountImpl<FakeNativeHandle>();
     // Add one to |i| so that width/height > 0.
-    input_frames_.push_back(new I420VideoFrame(handle, i + 1, i + 1, i, i));
+    input_frames_.push_back(
+        new I420VideoFrame(handle, i + 1, i + 1, i + 1, i + 1));
     AddInputFrame(input_frames_[i]);
     WaitOutputFrame();
   }
@@ -145,20 +169,17 @@
 
 TEST_F(ViECapturerTest, TestI420Frames) {
   const int kNumFrame = 4;
-  ScopedVector<I420VideoFrame> copied_input_frames;
   std::vector<const uint8_t*> ybuffer_pointers;
   for (int i = 0; i < kNumFrame; ++i) {
     input_frames_.push_back(CreateI420VideoFrame(static_cast<uint8_t>(i + 1)));
     const I420VideoFrame* const_input_frame = input_frames_[i];
     ybuffer_pointers.push_back(const_input_frame->buffer(kYPlane));
-    // Copy input frames because the buffer data will be swapped.
-    copied_input_frames.push_back(input_frames_[i]->CloneFrame());
     AddInputFrame(input_frames_[i]);
     WaitOutputFrame();
   }
 
-  EXPECT_TRUE(EqualFramesVector(copied_input_frames, output_frames_));
-  // Make sure the buffer is swapped and not copied.
+  EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
+  // Make sure the buffer is not copied.
   for (int i = 0; i < kNumFrame; ++i)
     EXPECT_EQ(ybuffer_pointers[i], output_frame_ybuffers_[i]);
 }
@@ -170,10 +191,8 @@
   AddInputFrame(input_frames_[0]);
   WaitOutputFrame();
 
-  input_frames_.push_back(CreateI420VideoFrame(1));
-  rtc::scoped_ptr<I420VideoFrame> copied_input_frame(
-      input_frames_[1]->CloneFrame());
-  AddInputFrame(copied_input_frame.get());
+  input_frames_.push_back(CreateI420VideoFrame(2));
+  AddInputFrame(input_frames_[1]);
   WaitOutputFrame();
 
   EXPECT_TRUE(EqualFramesVector(input_frames_, output_frames_));
@@ -181,14 +200,12 @@
 
 TEST_F(ViECapturerTest, TestTextureFrameAfterI420Frame) {
   input_frames_.push_back(CreateI420VideoFrame(1));
-  rtc::scoped_ptr<I420VideoFrame> copied_input_frame(
-      input_frames_[0]->CloneFrame());
-  AddInputFrame(copied_input_frame.get());
+  AddInputFrame(input_frames_[0]);
   WaitOutputFrame();
 
   webrtc::RefCountImpl<FakeNativeHandle>* handle =
       new webrtc::RefCountImpl<FakeNativeHandle>();
-  input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 1, 1));
+  input_frames_.push_back(new I420VideoFrame(handle, 1, 1, 2, 2));
   AddInputFrame(input_frames_[1]);
   WaitOutputFrame();
 
@@ -207,7 +224,6 @@
   return ((frame1.native_handle() == frame2.native_handle()) &&
           (frame1.width() == frame2.width()) &&
           (frame1.height() == frame2.height()) &&
-          (frame1.timestamp() == frame2.timestamp()) &&
           (frame1.render_time_ms() == frame2.render_time_ms()));
 }
 
@@ -218,8 +234,6 @@
           (frame1.stride(kYPlane) == frame2.stride(kYPlane)) &&
           (frame1.stride(kUPlane) == frame2.stride(kUPlane)) &&
           (frame1.stride(kVPlane) == frame2.stride(kVPlane)) &&
-          (frame1.timestamp() == frame2.timestamp()) &&
-          (frame1.ntp_time_ms() == frame2.ntp_time_ms()) &&
           (frame1.render_time_ms() == frame2.render_time_ms()) &&
           (frame1.allocated_size(kYPlane) == frame2.allocated_size(kYPlane)) &&
           (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) &&
@@ -254,8 +268,6 @@
   frame->CreateFrame(
       kSizeY, buffer, kSizeUV, buffer, kSizeUV, buffer, width, height, width,
       width / 2, width / 2);
-  frame->set_timestamp(data);
-  frame->set_ntp_time_ms(data);
   frame->set_render_time_ms(data);
   return frame;
 }
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
index f86beef..a3f63a9 100644
--- a/webrtc/video_engine/vie_encoder.cc
+++ b/webrtc/video_engine/vie_encoder.cc
@@ -582,16 +582,8 @@
     TraceFrameDropEnd();
   }
 
-  // Convert render time, in ms, to RTP timestamp.
-  const int kMsToRtpTimestamp = 90;
-  const uint32_t time_stamp =
-      kMsToRtpTimestamp *
-      static_cast<uint32_t>(video_frame->render_time_ms());
-
   TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame->render_time_ms(),
                           "Encode");
-  video_frame->set_timestamp(time_stamp);
-
   I420VideoFrame* decimated_frame = NULL;
   // TODO(wuchengli): support texture frames.
   if (video_frame->native_handle() == NULL) {
diff --git a/webrtc/video_frame.h b/webrtc/video_frame.h
index f207c1a..3859072 100644
--- a/webrtc/video_frame.h
+++ b/webrtc/video_frame.h
@@ -73,11 +73,15 @@
                   int stride_v,
                   VideoRotation rotation);
 
-  // Copy frame: If required size is bigger than allocated one, new buffers of
-  // adequate size will be allocated.
+  // Deep copy frame: If required size is bigger than allocated one, new
+  // buffers of adequate size will be allocated.
   // Return value: 0 on success, -1 on error.
   int CopyFrame(const I420VideoFrame& videoFrame);
 
+  // Creates a shallow copy of |videoFrame|, i.e, the this object will retain a
+  // reference to the video buffer also retained by |videoFrame|.
+  void ShallowCopy(const I420VideoFrame& videoFrame);
+
   // Make a copy of |this|. The caller owns the returned frame.
   // Return value: a new frame on success, NULL on error.
   I420VideoFrame* CloneFrame() const;
diff --git a/webrtc/video_send_stream.h b/webrtc/video_send_stream.h
index c55d9e3..4ae0b6b 100644
--- a/webrtc/video_send_stream.h
+++ b/webrtc/video_send_stream.h
@@ -29,7 +29,7 @@
   // These methods do not lock internally and must be called sequentially.
   // If your application switches input sources synchronization must be done
   // externally to make sure that any old frames are not delivered concurrently.
-  virtual void SwapFrame(I420VideoFrame* video_frame) = 0;
+  virtual void IncomingCapturedFrame(const I420VideoFrame& video_frame) = 0;
 
  protected:
   virtual ~VideoSendStreamInput() {}