Have ViE sender also use the last encoded frame timestamp when determining if the video stream is paused/muted, for purposes of padding.

Without this, external encoders with internal sources (i.e. don't use the normal camera path) won't trigger ViEEncoder::DeliverFrame, so time_of_last_incoming_frame_ms_ will always be 0.

BUG=
R=stefan@webrtc.org

Review URL: https://webrtc-codereview.appspot.com/44099004

Cr-Commit-Position: refs/heads/master@{#9010}
diff --git a/webrtc/video_engine/vie_encoder.cc b/webrtc/video_engine/vie_encoder.cc
index d54e22b..2b1674d 100644
--- a/webrtc/video_engine/vie_encoder.cc
+++ b/webrtc/video_engine/vie_encoder.cc
@@ -122,7 +122,7 @@
       pacer_(pacer),
       bitrate_allocator_(bitrate_allocator),
       bitrate_controller_(bitrate_controller),
-      time_of_last_incoming_frame_ms_(0),
+      time_of_last_frame_activity_ms_(0),
       send_padding_(false),
       min_transmit_bitrate_kbps_(0),
       last_observed_bitrate_bps_(0),
@@ -398,7 +398,7 @@
 }
 
 int ViEEncoder::GetPaddingNeededBps(int bitrate_bps) const {
-  int64_t time_of_last_incoming_frame_ms;
+  int64_t time_of_last_frame_activity_ms;
   int min_transmit_bitrate_bps;
   {
     CriticalSectionScoped cs(data_cs_.get());
@@ -406,7 +406,7 @@
         send_padding_ || video_suspended_ || min_transmit_bitrate_kbps_ > 0;
     if (!send_padding)
       return 0;
-    time_of_last_incoming_frame_ms = time_of_last_incoming_frame_ms_;
+    time_of_last_frame_activity_ms = time_of_last_frame_activity_ms_;
     min_transmit_bitrate_bps = 1000 * min_transmit_bitrate_kbps_;
   }
 
@@ -441,9 +441,9 @@
     pad_up_to_bitrate_bps = 0;
 
   // The amount of padding should decay to zero if no frames are being
-  // captured unless a min-transmit bitrate is used.
+  // captured/encoded unless a min-transmit bitrate is used.
   int64_t now_ms = TickTime::MillisecondTimestamp();
-  if (now_ms - time_of_last_incoming_frame_ms > kStopPaddingThresholdMs)
+  if (now_ms - time_of_last_frame_activity_ms > kStopPaddingThresholdMs)
     pad_up_to_bitrate_bps = 0;
 
   // Pad up to min bitrate.
@@ -508,7 +508,7 @@
   }
   {
     CriticalSectionScoped cs(data_cs_.get());
-    time_of_last_incoming_frame_ms_ = TickTime::MillisecondTimestamp();
+    time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
     if (EncoderPaused()) {
       TraceFrameDropStart();
       return;
@@ -713,6 +713,11 @@
   DCHECK(send_payload_router_ != NULL);
 
   {
+    CriticalSectionScoped cs(data_cs_.get());
+    time_of_last_frame_activity_ms_ = TickTime::MillisecondTimestamp();
+  }
+
+  {
     CriticalSectionScoped cs(callback_cs_.get());
     if (send_statistics_proxy_ != NULL)
       send_statistics_proxy_->OnSendEncodedImage(encoded_image, rtp_video_hdr);
diff --git a/webrtc/video_engine/vie_encoder.h b/webrtc/video_engine/vie_encoder.h
index a3b7252..074aa61 100644
--- a/webrtc/video_engine/vie_encoder.h
+++ b/webrtc/video_engine/vie_encoder.h
@@ -210,7 +210,10 @@
   BitrateAllocator* const bitrate_allocator_;
   BitrateController* const bitrate_controller_;
 
-  int64_t time_of_last_incoming_frame_ms_ GUARDED_BY(data_cs_);
+  // The time we last received an input frame or encoded frame. This is used to
+  // track when video is stopped long enough that we also want to stop sending
+  // padding.
+  int64_t time_of_last_frame_activity_ms_ GUARDED_BY(data_cs_);
   bool send_padding_ GUARDED_BY(data_cs_);
   int min_transmit_bitrate_kbps_ GUARDED_BY(data_cs_);
   uint32_t last_observed_bitrate_bps_ GUARDED_BY(data_cs_);