Reland of: "Implement elapsed time and capture start NTP time estimation." revision @8139
Link to original CL: https://review.webrtc.org/36909004/
R=pbos@webrtc.org
TBR=pthatcher@webrtc.org
BUG=4227
Review URL: https://webrtc-codereview.appspot.com/39669004
git-svn-id: http://webrtc.googlecode.com/svn/trunk@8162 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/base/fakevideorenderer.h b/talk/media/base/fakevideorenderer.h
index 9ceaac8..23ae06d 100644
--- a/talk/media/base/fakevideorenderer.h
+++ b/talk/media/base/fakevideorenderer.h
@@ -44,7 +44,8 @@
height_(0),
num_set_sizes_(0),
num_rendered_frames_(0),
- black_frame_(false) {
+ black_frame_(false),
+ last_frame_elapsed_time_ns_(-1) {
}
virtual bool SetSize(int width, int height, int reserved) {
@@ -75,6 +76,7 @@
++errors_;
return false;
}
+ last_frame_elapsed_time_ns_ = frame->GetElapsedTime();
++num_rendered_frames_;
SignalRenderFrame(frame);
return true;
@@ -102,6 +104,11 @@
return black_frame_;
}
+ int64_t last_frame_elapsed_time_ns() const {
+ rtc::CritScope cs(&crit_);
+ return last_frame_elapsed_time_ns_;
+ }
+
sigslot::signal3<int, int, int> SignalSetSize;
sigslot::signal1<const VideoFrame*> SignalRenderFrame;
@@ -160,6 +167,7 @@
int num_set_sizes_;
int num_rendered_frames_;
bool black_frame_;
+ int64_t last_frame_elapsed_time_ns_;
mutable rtc::CriticalSection crit_;
};
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index 33fefdf..7b60526 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -1870,7 +1870,9 @@
external_decoder_factory_(external_decoder_factory),
renderer_(NULL),
last_width_(-1),
- last_height_(-1) {
+ last_height_(-1),
+ first_frame_timestamp_(-1),
+ estimated_remote_start_ntp_time_ms_(0) {
config_.renderer = this;
// SetRecvCodecs will also reset (start) the VideoReceiveStream.
SetRecvCodecs(recv_codecs);
@@ -1973,6 +1975,17 @@
const webrtc::I420VideoFrame& frame,
int time_to_render_ms) {
rtc::CritScope crit(&renderer_lock_);
+
+ if (first_frame_timestamp_ < 0)
+ first_frame_timestamp_ = frame.timestamp();
+ int64_t rtp_time_elapsed_since_first_frame =
+ (timestamp_wraparound_handler_.Unwrap(frame.timestamp()) -
+ first_frame_timestamp_);
+ int64_t elapsed_time_ms = rtp_time_elapsed_since_first_frame /
+ (cricket::kVideoCodecClockrate / 1000);
+ if (frame.ntp_time_ms() > 0)
+ estimated_remote_start_ntp_time_ms_ = frame.ntp_time_ms() - elapsed_time_ms;
+
if (renderer_ == NULL) {
LOG(LS_WARNING) << "VideoReceiveStream not connected to a VideoRenderer.";
return;
@@ -1985,7 +1998,7 @@
LOG(LS_VERBOSE) << "RenderFrame: (" << frame.width() << "x" << frame.height()
<< ")";
- const WebRtcVideoRenderFrame render_frame(&frame);
+ const WebRtcVideoRenderFrame render_frame(&frame, elapsed_time_ms);
renderer_->RenderFrame(&render_frame);
}
@@ -2032,6 +2045,7 @@
rtc::CritScope frame_cs(&renderer_lock_);
info.frame_width = last_width_;
info.frame_height = last_height_;
+ info.capture_start_ntp_time_ms = estimated_remote_start_ntp_time_ms_;
// TODO(pbos): Support or remove the following stats.
info.packets_concealed = -1;
diff --git a/talk/media/webrtc/webrtcvideoengine2.h b/talk/media/webrtc/webrtcvideoengine2.h
index 6ffff2b..bb23659 100644
--- a/talk/media/webrtc/webrtcvideoengine2.h
+++ b/talk/media/webrtc/webrtcvideoengine2.h
@@ -48,12 +48,8 @@
#include "webrtc/video_send_stream.h"
namespace webrtc {
-class VideoCaptureModule;
class VideoDecoder;
class VideoEncoder;
-class VideoRender;
-class VideoSendStreamInput;
-class VideoReceiveStream;
}
namespace rtc {
@@ -79,8 +75,6 @@
struct CapturedFrame;
struct Device;
-class WebRtcVideoRenderer;
-
class UnsignalledSsrcHandler {
public:
enum Action {
@@ -117,7 +111,6 @@
// WebRtcVideoEngine2 is used for the new native WebRTC Video API (webrtc:1667).
class WebRtcVideoEngine2 : public sigslot::has_slots<> {
public:
- // Creates the WebRtcVideoEngine2 with internal VideoCaptureModule.
WebRtcVideoEngine2();
virtual ~WebRtcVideoEngine2();
@@ -444,6 +437,14 @@
cricket::VideoRenderer* renderer_ GUARDED_BY(renderer_lock_);
int last_width_ GUARDED_BY(renderer_lock_);
int last_height_ GUARDED_BY(renderer_lock_);
+ // Expands remote RTP timestamps to int64_t to be able to estimate how long
+ // the stream has been running.
+ rtc::TimestampWrapAroundHandler timestamp_wraparound_handler_
+ GUARDED_BY(renderer_lock_);
+ int64_t first_frame_timestamp_ GUARDED_BY(renderer_lock_);
+ // Start NTP time is estimated as current remote NTP time (estimated from
+ // RTCP) minus the elapsed time, as soon as remote NTP time is available.
+ int64_t estimated_remote_start_ntp_time_ms_ GUARDED_BY(renderer_lock_);
};
void Construct(webrtc::Call* call, WebRtcVideoEngine2* engine);
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 1796500..116949e 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -73,6 +73,19 @@
cricket::kRtcpFbParamCcm, cricket::kRtcpFbCcmParamFir)));
}
+static void CreateBlackFrame(webrtc::I420VideoFrame* video_frame,
+ int width,
+ int height) {
+ video_frame->CreateEmptyFrame(
+ width, height, width, (width + 1) / 2, (width + 1) / 2);
+ memset(video_frame->buffer(webrtc::kYPlane), 16,
+ video_frame->allocated_size(webrtc::kYPlane));
+ memset(video_frame->buffer(webrtc::kUPlane), 128,
+ video_frame->allocated_size(webrtc::kUPlane));
+ memset(video_frame->buffer(webrtc::kVPlane), 128,
+ video_frame->allocated_size(webrtc::kVPlane));
+}
+
} // namespace
namespace cricket {
@@ -176,6 +189,11 @@
return receiving_;
}
+void FakeVideoReceiveStream::InjectFrame(const webrtc::I420VideoFrame& frame,
+ int time_to_render_ms) {
+ config_.renderer->RenderFrame(frame, time_to_render_ms);
+}
+
webrtc::VideoReceiveStream::Stats FakeVideoReceiveStream::GetStats() const {
return webrtc::VideoReceiveStream::Stats();
}
@@ -1583,12 +1601,44 @@
EXPECT_TRUE(channel_->SetCapturer(last_ssrc_, NULL));
}
-TEST_F(WebRtcVideoChannel2Test, DISABLED_WebRtcShouldLog) {
- FAIL() << "Not implemented."; // TODO(pbos): Implement.
-}
+TEST_F(WebRtcVideoChannel2Test, EstimatesNtpStartTimeAndElapsedTimeCorrectly) {
+ // Start at last timestamp to verify that wraparounds are estimated correctly.
+ static const uint32_t kInitialTimestamp = 0xFFFFFFFFu;
+ static const int64_t kInitialNtpTimeMs = 1247891230;
+ static const int kFrameOffsetMs = 20;
+ EXPECT_TRUE(channel_->SetRecvCodecs(engine_.codecs()));
-TEST_F(WebRtcVideoChannel2Test, DISABLED_WebRtcShouldNotLog) {
- FAIL() << "Not implemented."; // TODO(pbos): Implement.
+ FakeVideoReceiveStream* stream = AddRecvStream();
+ cricket::FakeVideoRenderer renderer;
+ EXPECT_TRUE(channel_->SetRenderer(last_ssrc_, &renderer));
+ EXPECT_TRUE(channel_->SetRender(true));
+
+ webrtc::I420VideoFrame video_frame;
+ CreateBlackFrame(&video_frame, 4, 4);
+ video_frame.set_timestamp(kInitialTimestamp);
+ // Initial NTP time is not available on the first frame, but should still be
+ // able to be estimated.
+ stream->InjectFrame(video_frame, 0);
+
+ EXPECT_EQ(1, renderer.num_rendered_frames());
+ EXPECT_EQ(0, renderer.last_frame_elapsed_time_ns());
+
+ // This timestamp is kInitialTimestamp (-1) + kFrameOffsetMs * 90, which
+ // triggers a constant-overflow warning, hence we're calculating it explicitly
+ // here.
+ video_frame.set_timestamp(kFrameOffsetMs * 90 - 1);
+ video_frame.set_ntp_time_ms(kInitialNtpTimeMs + kFrameOffsetMs);
+ stream->InjectFrame(video_frame, 0);
+
+ EXPECT_EQ(2, renderer.num_rendered_frames());
+ EXPECT_EQ(kFrameOffsetMs * rtc::kNumNanosecsPerMillisec,
+ renderer.last_frame_elapsed_time_ns());
+
+ // Verify that NTP time has been correctly deduced.
+ cricket::VideoMediaInfo info;
+ ASSERT_TRUE(channel_->GetStats(cricket::StatsOptions(), &info));
+ ASSERT_EQ(1u, info.receivers.size());
+ EXPECT_EQ(kInitialNtpTimeMs, info.receivers[0].capture_start_ntp_time_ms);
}
TEST_F(WebRtcVideoChannel2Test, SetDefaultSendCodecs) {
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.h b/talk/media/webrtc/webrtcvideoengine2_unittest.h
index e53a30b..72c52f9 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.h
@@ -84,6 +84,8 @@
bool IsReceiving() const;
+ void InjectFrame(const webrtc::I420VideoFrame& frame, int time_to_render_ms);
+
private:
virtual webrtc::VideoReceiveStream::Stats GetStats() const OVERRIDE;
diff --git a/talk/media/webrtc/webrtcvideoframe.cc b/talk/media/webrtc/webrtcvideoframe.cc
index 7a548f3..19566d8 100644
--- a/talk/media/webrtc/webrtcvideoframe.cc
+++ b/talk/media/webrtc/webrtcvideoframe.cc
@@ -360,8 +360,9 @@
}
WebRtcVideoRenderFrame::WebRtcVideoRenderFrame(
- const webrtc::I420VideoFrame* frame)
- : frame_(frame) {
+ const webrtc::I420VideoFrame* frame,
+ int64_t elapsed_time_ms)
+ : frame_(frame), elapsed_time_ms_(elapsed_time_ms) {
}
bool WebRtcVideoRenderFrame::InitToBlack(int w,
@@ -442,12 +443,10 @@
}
int64_t WebRtcVideoRenderFrame::GetElapsedTime() const {
- // Convert millisecond render time to ns timestamp.
- return frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec;
+ return elapsed_time_ms_ * rtc::kNumNanosecsPerMillisec;
}
int64_t WebRtcVideoRenderFrame::GetTimeStamp() const {
- // Convert 90K rtp timestamp to ns timestamp.
- return (frame_->timestamp() / 90) * rtc::kNumNanosecsPerMillisec;
+ return frame_->render_time_ms() * rtc::kNumNanosecsPerMillisec;
}
void WebRtcVideoRenderFrame::SetElapsedTime(int64_t elapsed_time) {
UNIMPLEMENTED;
diff --git a/talk/media/webrtc/webrtcvideoframe.h b/talk/media/webrtc/webrtcvideoframe.h
index bc4622f..c6a7d70 100644
--- a/talk/media/webrtc/webrtcvideoframe.h
+++ b/talk/media/webrtc/webrtcvideoframe.h
@@ -139,7 +139,8 @@
// be written to.
class WebRtcVideoRenderFrame : public VideoFrame {
public:
- explicit WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame);
+ WebRtcVideoRenderFrame(const webrtc::I420VideoFrame* frame,
+ int64_t elapsed_time_ms);
virtual bool InitToBlack(int w,
int h,
@@ -192,6 +193,7 @@
private:
const webrtc::I420VideoFrame* const frame_;
+ const int64_t elapsed_time_ms_;
};
} // namespace cricket