Remove cricket::VideoFrame::Set/GetElapsedTime()

This CL is a baby step towards consolidating the timestamps in cricket::VideoFrame and webrtc::VideoFrame, so that we can unify the frame classes in the future.

The elapsed time functionality is not really used. If a video sink wants to know the elapsed time since the first frame they can store the first timestamp themselves and calculate the time delta to later frames. This is already done in all video sinks that need the elapsed time. Having redundant timestamps in the frame classes is confusing and error prone.

TBR=pthatcher@webrtc.org

Review URL: https://codereview.webrtc.org/1324263004

Cr-Commit-Position: refs/heads/master@{#10131}
diff --git a/talk/app/webrtc/androidvideocapturer.cc b/talk/app/webrtc/androidvideocapturer.cc
index 0312cd3..618fcb3 100644
--- a/talk/app/webrtc/androidvideocapturer.cc
+++ b/talk/app/webrtc/androidvideocapturer.cc
@@ -43,7 +43,7 @@
 class AndroidVideoCapturer::FrameFactory : public cricket::VideoFrameFactory {
  public:
   FrameFactory(const rtc::scoped_refptr<AndroidVideoCapturerDelegate>& delegate)
-      : start_time_(rtc::TimeNanos()), delegate_(delegate) {
+      : delegate_(delegate) {
     // Create a CapturedFrame that only contains header information, not the
     // actual pixel data.
     captured_frame_.pixel_height = 1;
@@ -60,7 +60,6 @@
     buffer_ = buffer;
     captured_frame_.width = buffer->width();
     captured_frame_.height = buffer->height();
-    captured_frame_.elapsed_time = rtc::TimeNanos() - start_time_;
     captured_frame_.time_stamp = time_stamp_in_ns;
     captured_frame_.rotation = rotation;
   }
@@ -69,7 +68,6 @@
     buffer_ = nullptr;
     captured_frame_.width = 0;
     captured_frame_.height = 0;
-    captured_frame_.elapsed_time = 0;
     captured_frame_.time_stamp = 0;
   }
 
@@ -85,8 +83,7 @@
     RTC_CHECK(captured_frame == &captured_frame_);
     rtc::scoped_ptr<cricket::VideoFrame> frame(new cricket::WebRtcVideoFrame(
         ShallowCenterCrop(buffer_, dst_width, dst_height),
-        captured_frame->elapsed_time, captured_frame->time_stamp,
-        captured_frame->GetRotation()));
+        captured_frame->time_stamp, captured_frame->GetRotation()));
     // Caller takes ownership.
     // TODO(magjed): Change CreateAliasedFrame() to return a rtc::scoped_ptr.
     return apply_rotation_ ? frame->GetCopyWithRotationApplied()->Copy()
@@ -94,7 +91,6 @@
   }
 
  private:
-  uint64 start_time_;
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer_;
   cricket::CapturedFrame captured_frame_;
   rtc::scoped_refptr<AndroidVideoCapturerDelegate> delegate_;
diff --git a/talk/app/webrtc/objc/avfoundationvideocapturer.mm b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
index c47e36d..b4d7ee2 100644
--- a/talk/app/webrtc/objc/avfoundationvideocapturer.mm
+++ b/talk/app/webrtc/objc/avfoundationvideocapturer.mm
@@ -306,7 +306,7 @@
 namespace webrtc {
 
 AVFoundationVideoCapturer::AVFoundationVideoCapturer()
-    : _capturer(nil), _startThread(nullptr), _startTime(0) {
+    : _capturer(nil), _startThread(nullptr) {
   // Set our supported formats. This matches kDefaultPreset.
   std::vector<cricket::VideoFormat> supportedFormats;
   supportedFormats.push_back(cricket::VideoFormat(kDefaultFormat));
@@ -344,7 +344,6 @@
   // to spin up, and this call returns async.
   // TODO(tkchin): make this better.
   [_capturer startCaptureAsync];
-  _startTime = rtc::TimeNanos();
   SetCaptureState(cricket::CaptureState::CS_RUNNING);
 
   return cricket::CaptureState::CS_STARTING;
@@ -424,7 +423,6 @@
   frame.pixel_height = 1;
   frame.fourcc = static_cast<uint32>(cricket::FOURCC_NV12);
   frame.time_stamp = currentTime;
-  frame.elapsed_time = currentTime - _startTime;
   frame.data = yPlaneAddress;
   frame.data_size = frameSize;
 
diff --git a/talk/app/webrtc/videotrack_unittest.cc b/talk/app/webrtc/videotrack_unittest.cc
index 6db3c52..609ee80 100644
--- a/talk/app/webrtc/videotrack_unittest.cc
+++ b/talk/app/webrtc/videotrack_unittest.cc
@@ -82,7 +82,7 @@
   ASSERT_FALSE(renderer_input == NULL);
 
   cricket::WebRtcVideoFrame frame;
-  frame.InitToBlack(123, 123, 1, 1, 0, 0);
+  frame.InitToBlack(123, 123, 1, 1, 0);
   renderer_input->RenderFrame(&frame);
   EXPECT_EQ(1, renderer_1->num_rendered_frames());
 
diff --git a/talk/media/base/fakevideocapturer.h b/talk/media/base/fakevideocapturer.h
index 0909f84..7bf8952 100644
--- a/talk/media/base/fakevideocapturer.h
+++ b/talk/media/base/fakevideocapturer.h
@@ -114,7 +114,6 @@
     frame.height = height;
     frame.fourcc = fourcc;
     frame.data_size = size;
-    frame.elapsed_time = next_timestamp_;
     frame.time_stamp = initial_unix_timestamp_ + next_timestamp_;
     next_timestamp_ += timestamp_interval;
 
diff --git a/talk/media/base/fakevideorenderer.h b/talk/media/base/fakevideorenderer.h
index 23ae06d..9ceaac8 100644
--- a/talk/media/base/fakevideorenderer.h
+++ b/talk/media/base/fakevideorenderer.h
@@ -44,8 +44,7 @@
         height_(0),
         num_set_sizes_(0),
         num_rendered_frames_(0),
-        black_frame_(false),
-        last_frame_elapsed_time_ns_(-1) {
+        black_frame_(false) {
   }
 
   virtual bool SetSize(int width, int height, int reserved) {
@@ -76,7 +75,6 @@
       ++errors_;
       return false;
     }
-    last_frame_elapsed_time_ns_ = frame->GetElapsedTime();
     ++num_rendered_frames_;
     SignalRenderFrame(frame);
     return true;
@@ -104,11 +102,6 @@
     return black_frame_;
   }
 
-  int64_t last_frame_elapsed_time_ns() const {
-    rtc::CritScope cs(&crit_);
-    return last_frame_elapsed_time_ns_;
-  }
-
   sigslot::signal3<int, int, int> SignalSetSize;
   sigslot::signal1<const VideoFrame*> SignalRenderFrame;
 
@@ -167,7 +160,6 @@
   int num_set_sizes_;
   int num_rendered_frames_;
   bool black_frame_;
-  int64_t last_frame_elapsed_time_ns_;
   mutable rtc::CriticalSection crit_;
 };
 
diff --git a/talk/media/base/videocapturer.cc b/talk/media/base/videocapturer.cc
index c8fa26a..33cab71 100644
--- a/talk/media/base/videocapturer.cc
+++ b/talk/media/base/videocapturer.cc
@@ -80,7 +80,6 @@
       fourcc(0),
       pixel_width(0),
       pixel_height(0),
-      elapsed_time(0),
       time_stamp(0),
       data_size(0),
       rotation(0),
@@ -323,8 +322,7 @@
   }
 
   std::ostringstream ss;
-  ss << fourcc_name << captured_frame->width << "x" << captured_frame->height
-     << "x" << VideoFormat::IntervalToFpsFloat(captured_frame->elapsed_time);
+  ss << fourcc_name << captured_frame->width << "x" << captured_frame->height;
   return ss.str();
 }
 
diff --git a/talk/media/base/videocapturer.h b/talk/media/base/videocapturer.h
index 95cb9e9..dd89c44 100644
--- a/talk/media/base/videocapturer.h
+++ b/talk/media/base/videocapturer.h
@@ -90,8 +90,8 @@
   uint32 fourcc;        // compression
   uint32 pixel_width;   // width of a pixel, default is 1
   uint32 pixel_height;  // height of a pixel, default is 1
-  int64  elapsed_time;  // elapsed time since the creation of the frame
-                        // source (that is, the camera), in nanoseconds.
+  // TODO(magjed): |elapsed_time| is deprecated - remove once not used anymore.
+  int64  elapsed_time;
   int64  time_stamp;    // timestamp of when the frame was captured, in unix
                         // time with nanosecond units.
   uint32 data_size;     // number of bytes of the frame data
diff --git a/talk/media/base/videocapturer_unittest.cc b/talk/media/base/videocapturer_unittest.cc
index dbef1b9..8f529d2 100644
--- a/talk/media/base/videocapturer_unittest.cc
+++ b/talk/media/base/videocapturer_unittest.cc
@@ -55,7 +55,6 @@
       : capture_state_(cricket::CS_STOPPED),
         num_state_changes_(0),
         video_frames_received_(0),
-        last_frame_elapsed_time_(0),
         expects_rotation_applied_(true) {
     capturer_.SignalVideoFrame.connect(this, &VideoCapturerTest::OnVideoFrame);
     capturer_.SignalStateChange.connect(this,
@@ -69,7 +68,6 @@
  protected:
   void OnVideoFrame(cricket::VideoCapturer*, const cricket::VideoFrame* frame) {
     ++video_frames_received_;
-    last_frame_elapsed_time_ = frame->GetElapsedTime();
     if (expects_rotation_applied_) {
       EXPECT_EQ(webrtc::kVideoRotation_0, frame->GetRotation());
     } else {
@@ -87,13 +85,11 @@
   int video_frames_received() const {
     return video_frames_received_;
   }
-  int64 last_frame_elapsed_time() const { return last_frame_elapsed_time_; }
 
   cricket::FakeVideoCapturer capturer_;
   cricket::CaptureState capture_state_;
   int num_state_changes_;
   int video_frames_received_;
-  int64 last_frame_elapsed_time_;
   cricket::FakeVideoRenderer renderer_;
   bool expects_rotation_applied_;
 };
diff --git a/talk/media/base/videoframe.cc b/talk/media/base/videoframe.cc
index 7e79bf6..5994b07 100644
--- a/talk/media/base/videoframe.cc
+++ b/talk/media/base/videoframe.cc
@@ -201,7 +201,6 @@
                   dst->GetYPitch(), dst->GetUPitch(), dst->GetVPitch(),
                   dst->GetWidth(), dst->GetHeight(),
                   interpolate, vert_crop);
-  dst->SetElapsedTime(GetElapsedTime());
   dst->SetTimeStamp(GetTimeStamp());
   // Stretched frame should have the same rotation as the source.
   dst->SetRotation(GetVideoRotation());
@@ -212,7 +211,7 @@
   VideoFrame* dest = CreateEmptyFrame(static_cast<int>(dst_width),
                                       static_cast<int>(dst_height),
                                       GetPixelWidth(), GetPixelHeight(),
-                                      GetElapsedTime(), GetTimeStamp());
+                                      GetTimeStamp());
   if (dest) {
     StretchToFrame(dest, interpolate, vert_crop);
   }
diff --git a/talk/media/base/videoframe.h b/talk/media/base/videoframe.h
index ac53ca2..7199dcc 100644
--- a/talk/media/base/videoframe.h
+++ b/talk/media/base/videoframe.h
@@ -42,8 +42,7 @@
   virtual ~VideoFrame() {}
 
   virtual bool InitToBlack(int w, int h, size_t pixel_width,
-                           size_t pixel_height, int64_t elapsed_time,
-                           int64_t time_stamp) = 0;
+                           size_t pixel_height, int64_t time_stamp) = 0;
   // Creates a frame from a raw sample with FourCC |format| and size |w| x |h|.
   // |h| can be negative indicating a vertically flipped image.
   // |dw| is destination width; can be less than |w| if cropping is desired.
@@ -59,7 +58,6 @@
                      size_t sample_size,
                      size_t pixel_width,
                      size_t pixel_height,
-                     int64_t elapsed_time,
                      int64_t time_stamp,
                      webrtc::VideoRotation rotation,
                      bool apply_rotation) = 0;
@@ -74,11 +72,10 @@
                      size_t sample_size,
                      size_t pixel_width,
                      size_t pixel_height,
-                     int64_t elapsed_time,
                      int64_t time_stamp,
                      int rotation) {
     return Reset(fourcc, w, h, dw, dh, sample, sample_size, pixel_width,
-                 pixel_height, elapsed_time, time_stamp,
+                 pixel_height, time_stamp,
                  static_cast<webrtc::VideoRotation>(rotation), true);
   }
 
@@ -117,9 +114,7 @@
   virtual size_t GetPixelWidth() const = 0;
   virtual size_t GetPixelHeight() const = 0;
 
-  virtual int64_t GetElapsedTime() const = 0;
   virtual int64_t GetTimeStamp() const = 0;
-  virtual void SetElapsedTime(int64_t elapsed_time) = 0;
   virtual void SetTimeStamp(int64_t time_stamp) = 0;
 
   // Indicates the rotation angle in degrees.
@@ -218,7 +213,6 @@
   // Creates an empty frame.
   virtual VideoFrame *CreateEmptyFrame(int w, int h, size_t pixel_width,
                                        size_t pixel_height,
-                                       int64_t elapsed_time,
                                        int64_t time_stamp) const = 0;
   virtual void SetRotation(webrtc::VideoRotation rotation) = 0;
 };
diff --git a/talk/media/base/videoframe_unittest.h b/talk/media/base/videoframe_unittest.h
index 07b3910..92a278b 100644
--- a/talk/media/base/videoframe_unittest.h
+++ b/talk/media/base/videoframe_unittest.h
@@ -142,7 +142,7 @@
     bool ret = false;
     for (int i = 0; i < repeat_; ++i) {
       ret = frame->Init(format, width, height, dw, dh,
-                        sample, sample_size, 1, 1, 0, 0, rotation);
+                        sample, sample_size, 1, 1, 0, rotation);
     }
     return ret;
   }
@@ -280,7 +280,7 @@
 
     const uint8* start = reinterpret_cast<const uint8*>(ms->GetBuffer());
     int awidth = (width + 1) & ~1;
-    frame->InitToBlack(width, height, 1, 1, 0, 0);
+    frame->InitToBlack(width, height, 1, 1, 0);
     int stride_y = frame->GetYPitch();
     int stride_u = frame->GetUPitch();
     int stride_v = frame->GetVPitch();
@@ -322,7 +322,7 @@
       start = start + pitch * (height - 1);
       pitch = -pitch;
     }
-    frame->InitToBlack(width, height, 1, 1, 0, 0);
+    frame->InitToBlack(width, height, 1, 1, 0);
     int stride_y = frame->GetYPitch();
     int stride_u = frame->GetUPitch();
     int stride_v = frame->GetVPitch();
@@ -435,7 +435,7 @@
   static bool IsEqual(const cricket::VideoFrame& frame,
                       size_t width, size_t height,
                       size_t pixel_width, size_t pixel_height,
-                      int64 elapsed_time, int64 time_stamp,
+                      int64 time_stamp,
                       const uint8* y, uint32 ypitch,
                       const uint8* u, uint32 upitch,
                       const uint8* v, uint32 vpitch,
@@ -445,7 +445,6 @@
                   static_cast<uint32>(height)) &&
         frame.GetPixelWidth() == pixel_width &&
         frame.GetPixelHeight() == pixel_height &&
-        frame.GetElapsedTime() == elapsed_time &&
         frame.GetTimeStamp() == time_stamp &&
         IsPlaneEqual("y", frame.GetYPlane(), frame.GetYPitch(), y, ypitch,
                      static_cast<uint32>(width),
@@ -464,7 +463,7 @@
     return IsEqual(frame1,
                    frame2.GetWidth(), frame2.GetHeight(),
                    frame2.GetPixelWidth(), frame2.GetPixelHeight(),
-                   frame2.GetElapsedTime(), frame2.GetTimeStamp(),
+                   frame2.GetTimeStamp(),
                    frame2.GetYPlane(), frame2.GetYPitch(),
                    frame2.GetUPlane(), frame2.GetUPitch(),
                    frame2.GetVPlane(), frame2.GetVPitch(),
@@ -480,7 +479,7 @@
                    frame2.GetWidth() - hcrop * 2,
                    frame2.GetHeight() - vcrop * 2,
                    frame2.GetPixelWidth(), frame2.GetPixelHeight(),
-                   frame2.GetElapsedTime(), frame2.GetTimeStamp(),
+                   frame2.GetTimeStamp(),
                    frame2.GetYPlane() + vcrop * frame2.GetYPitch()
                        + hcrop,
                    frame2.GetYPitch(),
@@ -516,7 +515,7 @@
     const uint8* y = reinterpret_cast<uint8*>(ms.get()->GetBuffer());
     const uint8* u = y + kWidth * kHeight;
     const uint8* v = u + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0, 0,
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0,
                         y, kWidth, u, kWidth / 2, v, kWidth / 2, 0));
   }
 
@@ -531,7 +530,7 @@
     const uint8* y = reinterpret_cast<uint8*>(ms.get()->GetBuffer());
     const uint8* v = y + kWidth * kHeight;
     const uint8* u = v + kWidth * kHeight / 4;
-    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0, 0,
+    EXPECT_TRUE(IsEqual(frame, kWidth, kHeight, 1, 1, 0,
                         y, kWidth, u, kWidth / 2, v, kWidth / 2, 0));
   }
 
@@ -795,10 +794,10 @@
     EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
                             kHeight,                                           \
                             reinterpret_cast<uint8*>(ms->GetBuffer()),         \
-                            data_size, 1, 1, 0, 0, webrtc::kVideoRotation_0)); \
+                            data_size, 1, 1, 0, webrtc::kVideoRotation_0));    \
     int width_rotate = static_cast<int>(frame1.GetWidth());                    \
     int height_rotate = static_cast<int>(frame1.GetHeight());                  \
-    EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0, 0));  \
+    EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0));     \
     libyuv::I420Mirror(                                                        \
         frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(),            \
         frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(),            \
@@ -826,10 +825,10 @@
     EXPECT_TRUE(frame2.Init(cricket::FOURCC_##FOURCC, kWidth, kHeight, kWidth, \
                             kHeight,                                           \
                             reinterpret_cast<uint8*>(ms->GetBuffer()),         \
-                            data_size, 1, 1, 0, 0, webrtc::kVideoRotation_0)); \
+                            data_size, 1, 1, 0, webrtc::kVideoRotation_0));    \
     int width_rotate = static_cast<int>(frame1.GetWidth());                    \
     int height_rotate = static_cast<int>(frame1.GetHeight());                  \
-    EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0, 0));  \
+    EXPECT_TRUE(frame3.InitToBlack(width_rotate, height_rotate, 1, 1, 0));     \
     libyuv::I420Rotate(                                                        \
         frame2.GetYPlane(), frame2.GetYPitch(), frame2.GetUPlane(),            \
         frame2.GetUPitch(), frame2.GetVPlane(), frame2.GetVPitch(),            \
@@ -935,13 +934,12 @@
     uint8 pixel[3] = { 1, 2, 3 };
     for (int i = 0; i < repeat_; ++i) {
       EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 1, 1, 1, 1, pixel,
-                             sizeof(pixel), 1, 1, 0, 0,
-                             webrtc::kVideoRotation_0));
+                             sizeof(pixel), 1, 1, 0, webrtc::kVideoRotation_0));
     }
     const uint8* y = pixel;
     const uint8* u = y + 1;
     const uint8* v = u + 1;
-    EXPECT_TRUE(IsEqual(frame, 1, 1, 1, 1, 0, 0,
+    EXPECT_TRUE(IsEqual(frame, 1, 1, 1, 1, 0,
                         y, 1, u, 1, v, 1, 0));
   }
 
@@ -952,7 +950,7 @@
     memset(pixels5x5, 1, 5 * 5 + ((5 + 1) / 2 * (5 + 1) / 2) *  2);
     for (int i = 0; i < repeat_; ++i) {
       EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 5, 5, 5, 5, pixels5x5,
-                             sizeof(pixels5x5), 1, 1, 0, 0,
+                             sizeof(pixels5x5), 1, 1, 0,
                              webrtc::kVideoRotation_0));
     }
     EXPECT_EQ(5u, frame.GetWidth());
@@ -968,7 +966,7 @@
     uint8 pixel[4] = { 64, 128, 192, 255 };
     for (int i = 0; i < repeat_; ++i) {
       EXPECT_TRUE(frame.Init(cricket::FOURCC_ARGB, 1, 1, 1, 1, pixel,
-                             sizeof(pixel), 1, 1, 0, 0,
+                             sizeof(pixel), 1, 1, 0,
                              webrtc::kVideoRotation_0));
     }
     // Convert back to ARGB.
@@ -1005,7 +1003,7 @@
 
     for (int i = 0; i < repeat_; ++i) {
       EXPECT_TRUE(frame.Init(cricket::FOURCC_ARGB, 10, 1, 10, 1, pixel,
-                             sizeof(pixel), 1, 1, 0, 0,
+                             sizeof(pixel), 1, 1, 0,
                              webrtc::kVideoRotation_0));
     }
     // Convert back to ARGB
@@ -1314,7 +1312,7 @@
   void ConstructBlack() {
     T frame;
     for (int i = 0; i < repeat_; ++i) {
-      EXPECT_TRUE(frame.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
+      EXPECT_TRUE(frame.InitToBlack(kWidth, kHeight, 1, 1, 0));
     }
     EXPECT_TRUE(IsSize(frame, kWidth, kHeight));
     EXPECT_TRUE(IsBlack(frame));
@@ -1380,13 +1378,13 @@
     ASSERT_TRUE(ms.get() != NULL);
     size_t data_size;
     ms->GetSize(&data_size);
-    EXPECT_TRUE(frame1.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
-    EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
+    EXPECT_TRUE(frame1.InitToBlack(kWidth, kHeight, 1, 1, 0));
+    EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0));
     EXPECT_TRUE(IsBlack(frame1));
     EXPECT_TRUE(IsEqual(frame1, frame2, 0));
     EXPECT_TRUE(frame1.Reset(cricket::FOURCC_I420, kWidth, kHeight, kWidth,
                              kHeight, reinterpret_cast<uint8*>(ms->GetBuffer()),
-                             data_size, 1, 1, 0, 0, rotation,
+                             data_size, 1, 1, 0, rotation,
                              apply_rotation));
     if (apply_rotation)
       EXPECT_EQ(webrtc::kVideoRotation_0, frame1.GetVideoRotation());
@@ -1450,7 +1448,7 @@
                                                     out,
                                                     out_size, stride));
     }
-    EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0, 0));
+    EXPECT_TRUE(frame2.InitToBlack(kWidth, kHeight, 1, 1, 0));
     for (int i = 0; i < repeat_from; ++i) {
       EXPECT_EQ(0, RGBToI420(out, stride,
                              frame2.GetYPlane(), frame2.GetYPitch(),
@@ -1767,7 +1765,7 @@
                                       kWidth, kHeight));
     }
     EXPECT_TRUE(frame2.Init(cricket::FOURCC_I422, kWidth, kHeight, kWidth,
-                            kHeight, y, out_size, 1, 1, 0, 0,
+                            kHeight, y, out_size, 1, 1, 0,
                             webrtc::kVideoRotation_0));
     EXPECT_TRUE(IsEqual(frame1, frame2, 1));
   }
@@ -1871,7 +1869,7 @@
     uint8 pixel[3] = { 1, 2, 3 };
     T frame;
     EXPECT_TRUE(frame.Init(cricket::FOURCC_I420, 1, 1, 1, 1, pixel,
-                           sizeof(pixel), 1, 1, 0, 0,
+                           sizeof(pixel), 1, 1, 0,
                            webrtc::kVideoRotation_0));
     for (int i = 0; i < repeat_; ++i) {
       EXPECT_EQ(out_size, frame.CopyToBuffer(out.get(), out_size));
@@ -1885,7 +1883,7 @@
   void StretchToFrame() {
     // Create the source frame as a black frame.
     T source;
-    EXPECT_TRUE(source.InitToBlack(kWidth * 2, kHeight * 2, 1, 1, 0, 0));
+    EXPECT_TRUE(source.InitToBlack(kWidth * 2, kHeight * 2, 1, 1, 0));
     EXPECT_TRUE(IsSize(source, kWidth * 2, kHeight * 2));
 
     // Create the target frame by loading from a file.
@@ -1902,7 +1900,6 @@
     ASSERT_TRUE(LoadFrameNoRepeat(&target2));
     source.StretchToFrame(&target2, true, true);
     EXPECT_TRUE(IsBlack(target2));
-    EXPECT_EQ(source.GetElapsedTime(), target2.GetElapsedTime());
     EXPECT_EQ(source.GetTimeStamp(), target2.GetTimeStamp());
   }
 
diff --git a/talk/media/base/videoframefactory.cc b/talk/media/base/videoframefactory.cc
index ceef757..dfd97c6 100644
--- a/talk/media/base/videoframefactory.cc
+++ b/talk/media/base/videoframefactory.cc
@@ -70,7 +70,6 @@
     }
   } else {
     cropped_input_frame->StretchToFrame(output_frame_.get(), true, true);
-    output_frame_->SetElapsedTime(cropped_input_frame->GetElapsedTime());
     output_frame_->SetTimeStamp(cropped_input_frame->GetTimeStamp());
   }
   return output_frame_->Copy();
diff --git a/talk/media/devices/filevideocapturer.cc b/talk/media/devices/filevideocapturer.cc
index 6025d6d..489be53 100644
--- a/talk/media/devices/filevideocapturer.cc
+++ b/talk/media/devices/filevideocapturer.cc
@@ -74,7 +74,9 @@
     buffer.WriteUInt32(frame.fourcc);
     buffer.WriteUInt32(frame.pixel_width);
     buffer.WriteUInt32(frame.pixel_height);
-    buffer.WriteUInt64(frame.elapsed_time);
+    // Elapsed time is deprecated.
+    const uint64_t dummy_elapsed_time = 0;
+    buffer.WriteUInt64(dummy_elapsed_time);
     buffer.WriteUInt64(frame.time_stamp);
     buffer.WriteUInt32(size);
 
@@ -163,7 +165,6 @@
     : frame_buffer_size_(0),
       file_read_thread_(NULL),
       repeat_(0),
-      start_time_ns_(0),
       last_frame_timestamp_ns_(0),
       ignore_framerate_(false) {
 }
@@ -243,8 +244,6 @@
   SetCaptureFormat(&capture_format);
   // Create a thread to read the file.
   file_read_thread_ = new FileReadThread(this);
-  start_time_ns_ = kNumNanoSecsPerMilliSec *
-      static_cast<int64>(rtc::Time());
   bool ret = file_read_thread_->Start();
   if (ret) {
     LOG(LS_INFO) << "File video capturer '" << GetId() << "' started";
@@ -302,7 +301,9 @@
     buffer.ReadUInt32(&frame->fourcc);
     buffer.ReadUInt32(&frame->pixel_width);
     buffer.ReadUInt32(&frame->pixel_height);
-    buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->elapsed_time));
+    // Elapsed time is deprecated.
+    uint64 dummy_elapsed_time;
+    buffer.ReadUInt64(&dummy_elapsed_time);
     buffer.ReadUInt64(reinterpret_cast<uint64*>(&frame->time_stamp));
     buffer.ReadUInt32(&frame->data_size);
   }
@@ -318,7 +319,6 @@
   if (!first_frame) {
     captured_frame_.time_stamp = kNumNanoSecsPerMilliSec *
         static_cast<int64>(start_read_time_ms);
-    captured_frame_.elapsed_time = captured_frame_.time_stamp - start_time_ns_;
     SignalFrameCaptured(this, &captured_frame_);
   }
 
diff --git a/talk/media/devices/filevideocapturer.h b/talk/media/devices/filevideocapturer.h
index 3ef82ef..f72c638 100644
--- a/talk/media/devices/filevideocapturer.h
+++ b/talk/media/devices/filevideocapturer.h
@@ -149,7 +149,6 @@
   uint32 frame_buffer_size_;
   FileReadThread* file_read_thread_;
   int repeat_;  // How many times to repeat the file.
-  int64 start_time_ns_;  // Time when the file video capturer starts.
   int64 last_frame_timestamp_ns_;  // Timestamp of last read frame.
   bool ignore_framerate_;
 
diff --git a/talk/media/webrtc/webrtcvideocapturer.cc b/talk/media/webrtc/webrtcvideocapturer.cc
index 60b8422..f64786f 100644
--- a/talk/media/webrtc/webrtcvideocapturer.cc
+++ b/talk/media/webrtc/webrtcvideocapturer.cc
@@ -434,8 +434,7 @@
   pixel_width = 1;
   pixel_height = 1;
   // Convert units from VideoFrame RenderTimeMs to CapturedFrame (nanoseconds).
-  elapsed_time = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
-  time_stamp = elapsed_time;
+  time_stamp = sample.render_time_ms() * rtc::kNumNanosecsPerMillisec;
   data_size = rtc::checked_cast<uint32>(length);
   data = buffer;
   rotation = sample.rotation();
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index b94d24d..7b29001 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -2537,7 +2537,6 @@
 
   const WebRtcVideoFrame render_frame(
       frame.video_frame_buffer(),
-      elapsed_time_ms * rtc::kNumNanosecsPerMillisec,
       frame.render_time_ms() * rtc::kNumNanosecsPerMillisec, frame.rotation());
   renderer_->RenderFrame(&render_frame);
 }
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 247ba96..ee24fe5 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -492,7 +492,6 @@
   rtc::scoped_ptr<char[]> data(new char[frame.data_size]);
   frame.data = data.get();
   memset(frame.data, 1, frame.data_size);
-  frame.elapsed_time = 0;
   const int kInitialTimestamp = 123456;
   frame.time_stamp = kInitialTimestamp;
 
@@ -1810,7 +1809,7 @@
   EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
 }
 
-TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
+TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeCorrectly) {
   // Start at last timestamp to verify that wraparounds are estimated correctly.
   static const uint32_t kInitialTimestamp = 0xFFFFFFFFu;
   static const int64_t kInitialNtpTimeMs = 1247891230;
@@ -1829,7 +1828,6 @@
   stream->InjectFrame(video_frame, 0);
 
   EXPECT_EQ(1, renderer.num_rendered_frames());
-  EXPECT_EQ(0, renderer.last_frame_elapsed_time_ns());
 
   // This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which
   // triggers a constant-overflow warning, hence we're calculating it explicitly
@@ -1839,8 +1837,6 @@
   stream->InjectFrame(video_frame, 0);
 
   EXPECT_EQ(2, renderer.num_rendered_frames());
-  EXPECT_EQ(kFrameOffsetMs * rtc::kNumNanosecsPerMillisec,
-            renderer.last_frame_elapsed_time_ns());
 
   // Verify that NTP time has been correctly deduced.
   cricket::VideoMediaInfo info;
diff --git a/talk/media/webrtc/webrtcvideoframe.cc b/talk/media/webrtc/webrtcvideoframe.cc
index 932bf3c..2bc97d9 100644
--- a/talk/media/webrtc/webrtcvideoframe.cc
+++ b/talk/media/webrtc/webrtcvideoframe.cc
@@ -42,19 +42,16 @@
 WebRtcVideoFrame::WebRtcVideoFrame():
     pixel_width_(0),
     pixel_height_(0),
-    elapsed_time_ns_(0),
     time_stamp_ns_(0),
     rotation_(webrtc::kVideoRotation_0) {}
 
 WebRtcVideoFrame::WebRtcVideoFrame(
     const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
-    int64_t elapsed_time_ns,
     int64_t time_stamp_ns,
     webrtc::VideoRotation rotation)
     : video_frame_buffer_(buffer),
       pixel_width_(1),
       pixel_height_(1),
-      elapsed_time_ns_(elapsed_time_ns),
       time_stamp_ns_(time_stamp_ns),
       rotation_(rotation) {
 }
@@ -66,7 +63,6 @@
     : video_frame_buffer_(buffer),
       pixel_width_(1),
       pixel_height_(1),
-      elapsed_time_ns_(elapsed_time_ns),
       time_stamp_ns_(time_stamp_ns),
       rotation_(webrtc::kVideoRotation_0) {
 }
@@ -82,11 +78,10 @@
                             size_t sample_size,
                             size_t pixel_width,
                             size_t pixel_height,
-                            int64_t elapsed_time_ns,
                             int64_t time_stamp_ns,
                             webrtc::VideoRotation rotation) {
   return Reset(format, w, h, dw, dh, sample, sample_size, pixel_width,
-               pixel_height, elapsed_time_ns, time_stamp_ns, rotation,
+               pixel_height, time_stamp_ns, rotation,
                true /*apply_rotation*/);
 }
 
@@ -94,17 +89,21 @@
                             bool apply_rotation) {
   return Reset(frame->fourcc, frame->width, frame->height, dw, dh,
                static_cast<uint8*>(frame->data), frame->data_size,
-               frame->pixel_width, frame->pixel_height, frame->elapsed_time,
+               frame->pixel_width, frame->pixel_height,
                frame->time_stamp,
                frame->GetRotation(),
                apply_rotation);
 }
 
 bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
-                                   size_t pixel_height, int64_t elapsed_time_ns,
+                                   size_t pixel_height, int64_t,
                                    int64_t time_stamp_ns) {
-  InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
-                    time_stamp_ns);
+  return InitToBlack(w, h, pixel_width, pixel_height, time_stamp_ns);
+}
+
+bool WebRtcVideoFrame::InitToBlack(int w, int h, size_t pixel_width,
+                                   size_t pixel_height, int64_t time_stamp_ns) {
+  InitToEmptyBuffer(w, h, pixel_width, pixel_height, time_stamp_ns);
   return SetToBlack();
 }
 
@@ -170,7 +169,7 @@
 
 VideoFrame* WebRtcVideoFrame::Copy() const {
   WebRtcVideoFrame* new_frame = new WebRtcVideoFrame(
-      video_frame_buffer_, elapsed_time_ns_, time_stamp_ns_, rotation_);
+      video_frame_buffer_, time_stamp_ns_, rotation_);
   new_frame->pixel_width_ = pixel_width_;
   new_frame->pixel_height_ = pixel_height_;
   return new_frame;
@@ -216,7 +215,6 @@
                              size_t sample_size,
                              size_t pixel_width,
                              size_t pixel_height,
-                             int64_t elapsed_time_ns,
                              int64_t time_stamp_ns,
                              webrtc::VideoRotation rotation,
                              bool apply_rotation) {
@@ -237,7 +235,7 @@
   }
 
   InitToEmptyBuffer(new_width, new_height, pixel_width, pixel_height,
-                    elapsed_time_ns, time_stamp_ns);
+                    time_stamp_ns);
   rotation_ = apply_rotation ? webrtc::kVideoRotation_0 : rotation;
 
   int horiz_crop = ((w - dw) / 2) & ~1;
@@ -267,21 +265,18 @@
 
 VideoFrame* WebRtcVideoFrame::CreateEmptyFrame(
     int w, int h, size_t pixel_width, size_t pixel_height,
-    int64_t elapsed_time_ns, int64_t time_stamp_ns) const {
+    int64_t time_stamp_ns) const {
   WebRtcVideoFrame* frame = new WebRtcVideoFrame();
-  frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, elapsed_time_ns,
-                           time_stamp_ns);
+  frame->InitToEmptyBuffer(w, h, pixel_width, pixel_height, time_stamp_ns);
   return frame;
 }
 
 void WebRtcVideoFrame::InitToEmptyBuffer(int w, int h, size_t pixel_width,
                                          size_t pixel_height,
-                                         int64_t elapsed_time_ns,
                                          int64_t time_stamp_ns) {
   video_frame_buffer_ = new rtc::RefCountedObject<webrtc::I420Buffer>(w, h);
   pixel_width_ = pixel_width;
   pixel_height_ = pixel_height;
-  elapsed_time_ns_ = elapsed_time_ns;
   time_stamp_ns_ = time_stamp_ns;
   rotation_ = webrtc::kVideoRotation_0;
 }
@@ -315,7 +310,7 @@
 
   rotated_frame_.reset(CreateEmptyFrame(rotated_width, rotated_height,
                                         GetPixelWidth(), GetPixelHeight(),
-                                        GetElapsedTime(), GetTimeStamp()));
+                                        GetTimeStamp()));
 
   // TODO(guoweis): Add a function in webrtc_libyuv.cc to convert from
   // VideoRotation to libyuv::RotationMode.
diff --git a/talk/media/webrtc/webrtcvideoframe.h b/talk/media/webrtc/webrtcvideoframe.h
index 7900472..f173c96 100644
--- a/talk/media/webrtc/webrtcvideoframe.h
+++ b/talk/media/webrtc/webrtcvideoframe.h
@@ -43,7 +43,6 @@
  public:
   WebRtcVideoFrame();
   WebRtcVideoFrame(const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& buffer,
-                   int64_t elapsed_time_ns,
                    int64_t time_stamp_ns,
                    webrtc::VideoRotation rotation);
 
@@ -67,18 +66,21 @@
             size_t sample_size,
             size_t pixel_width,
             size_t pixel_height,
-            int64_t elapsed_time_ns,
             int64_t time_stamp_ns,
             webrtc::VideoRotation rotation);
 
   bool Init(const CapturedFrame* frame, int dw, int dh, bool apply_rotation);
 
   void InitToEmptyBuffer(int w, int h, size_t pixel_width, size_t pixel_height,
-                         int64_t elapsed_time_ns, int64_t time_stamp_ns);
+                         int64_t time_stamp_ns);
 
+  // TODO(magjed): Remove once Chromium is updated.
   bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
                    int64_t elapsed_time_ns, int64_t time_stamp_ns);
 
+  bool InitToBlack(int w, int h, size_t pixel_width, size_t pixel_height,
+                   int64_t time_stamp_ns);
+
   // From base class VideoFrame.
   virtual bool Reset(uint32 format,
                      int w,
@@ -89,7 +91,6 @@
                      size_t sample_size,
                      size_t pixel_width,
                      size_t pixel_height,
-                     int64_t elapsed_time_ns,
                      int64_t time_stamp_ns,
                      webrtc::VideoRotation rotation,
                      bool apply_rotation);
@@ -111,11 +112,7 @@
 
   virtual size_t GetPixelWidth() const { return pixel_width_; }
   virtual size_t GetPixelHeight() const { return pixel_height_; }
-  virtual int64_t GetElapsedTime() const { return elapsed_time_ns_; }
   virtual int64_t GetTimeStamp() const { return time_stamp_ns_; }
-  virtual void SetElapsedTime(int64_t elapsed_time_ns) {
-    elapsed_time_ns_ = elapsed_time_ns;
-  }
   virtual void SetTimeStamp(int64_t time_stamp_ns) {
     time_stamp_ns_ = time_stamp_ns;
   }
@@ -138,14 +135,12 @@
  private:
   virtual VideoFrame* CreateEmptyFrame(int w, int h, size_t pixel_width,
                                        size_t pixel_height,
-                                       int64_t elapsed_time_ns,
                                        int64_t time_stamp_ns) const;
 
   // An opaque reference counted handle that stores the pixel data.
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> video_frame_buffer_;
   size_t pixel_width_;
   size_t pixel_height_;
-  int64_t elapsed_time_ns_;
   int64_t time_stamp_ns_;
   webrtc::VideoRotation rotation_;
 
diff --git a/talk/media/webrtc/webrtcvideoframe_unittest.cc b/talk/media/webrtc/webrtcvideoframe_unittest.cc
index daa8ffa..6868c2c 100644
--- a/talk/media/webrtc/webrtcvideoframe_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoframe_unittest.cc
@@ -41,11 +41,9 @@
                                        int h,
                                        size_t pixel_width,
                                        size_t pixel_height,
-                                       int64_t elapsed_time,
                                        int64_t time_stamp) const override {
     WebRtcVideoTestFrame* frame = new WebRtcVideoTestFrame();
-    frame->InitToBlack(w, h, pixel_width, pixel_height, elapsed_time,
-                       time_stamp);
+    frame->InitToBlack(w, h, pixel_width, pixel_height, time_stamp);
     return frame;
   }
 };
@@ -68,7 +66,6 @@
     captured_frame.fourcc = cricket::FOURCC_I420;
     captured_frame.pixel_width = 1;
     captured_frame.pixel_height = 1;
-    captured_frame.elapsed_time = 1234;
     captured_frame.time_stamp = 5678;
     captured_frame.rotation = frame_rotation;
     captured_frame.width = frame_width;
@@ -90,7 +87,6 @@
     // Verify the new frame.
     EXPECT_EQ(1u, frame.GetPixelWidth());
     EXPECT_EQ(1u, frame.GetPixelHeight());
-    EXPECT_EQ(1234, frame.GetElapsedTime());
     EXPECT_EQ(5678, frame.GetTimeStamp());
     if (apply_rotation)
       EXPECT_EQ(webrtc::kVideoRotation_0, frame.GetRotation());
@@ -303,14 +299,11 @@
   webrtc::NativeHandleBuffer* buffer =
       new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
           dummy_handle, 640, 480);
-  cricket::WebRtcVideoFrame frame(buffer, 100, 200, webrtc::kVideoRotation_0);
+  cricket::WebRtcVideoFrame frame(buffer, 200, webrtc::kVideoRotation_0);
   EXPECT_EQ(dummy_handle, frame.GetNativeHandle());
   EXPECT_EQ(640u, frame.GetWidth());
   EXPECT_EQ(480u, frame.GetHeight());
-  EXPECT_EQ(100, frame.GetElapsedTime());
   EXPECT_EQ(200, frame.GetTimeStamp());
-  frame.SetElapsedTime(300);
-  EXPECT_EQ(300, frame.GetElapsedTime());
   frame.SetTimeStamp(400);
   EXPECT_EQ(400, frame.GetTimeStamp());
 }
@@ -321,12 +314,11 @@
   webrtc::NativeHandleBuffer* buffer =
       new rtc::RefCountedObject<webrtc::test::FakeNativeHandleBuffer>(
           dummy_handle, 640, 480);
-  cricket::WebRtcVideoFrame frame1(buffer, 100, 200, webrtc::kVideoRotation_0);
+  cricket::WebRtcVideoFrame frame1(buffer, 200, webrtc::kVideoRotation_0);
   cricket::VideoFrame* frame2 = frame1.Copy();
   EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle());
   EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth());
   EXPECT_EQ(frame1.GetHeight(), frame2->GetHeight());
-  EXPECT_EQ(frame1.GetElapsedTime(), frame2->GetElapsedTime());
   EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp());
   delete frame2;
 }
diff --git a/talk/media/webrtc/webrtcvideoframefactory_unittest.cc b/talk/media/webrtc/webrtcvideoframefactory_unittest.cc
index 4dbad05..00c13b5 100644
--- a/talk/media/webrtc/webrtcvideoframefactory_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoframefactory_unittest.cc
@@ -44,7 +44,6 @@
     captured_frame_.fourcc = cricket::FOURCC_I420;
     captured_frame_.pixel_width = 1;
     captured_frame_.pixel_height = 1;
-    captured_frame_.elapsed_time = 1234;
     captured_frame_.time_stamp = 5678;
     captured_frame_.rotation = frame_rotation;
     captured_frame_.width = frame_width;