Full stack graphs

Updating full stack test to optionally save metadata for each frame and save it
to a file with given filename (controlled from the new full_stack_samples
executable).
Adding a Python script that reads the output generated by full stack test
and plots the graph(s).

Review URL: https://codereview.webrtc.org/1289933003

Cr-Commit-Position: refs/heads/master@{#9874}
diff --git a/webrtc/video/full_stack.cc b/webrtc/video/full_stack.cc
index 3995f8a..45c28ad 100644
--- a/webrtc/video/full_stack.cc
+++ b/webrtc/video/full_stack.cc
@@ -14,6 +14,7 @@
 
 #include "testing/gtest/include/gtest/gtest.h"
 
+#include "webrtc/base/format_macros.h"
 #include "webrtc/base/scoped_ptr.h"
 #include "webrtc/base/thread_annotations.h"
 #include "webrtc/call.h"
@@ -25,8 +26,6 @@
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
 #include "webrtc/system_wrappers/interface/event_wrapper.h"
 #include "webrtc/system_wrappers/interface/sleep.h"
-#include "webrtc/test/call_test.h"
-#include "webrtc/test/direct_transport.h"
 #include "webrtc/test/encoder_settings.h"
 #include "webrtc/test/fake_encoder.h"
 #include "webrtc/test/frame_generator.h"
@@ -34,41 +33,13 @@
 #include "webrtc/test/statistics.h"
 #include "webrtc/test/testsupport/fileutils.h"
 #include "webrtc/typedefs.h"
+#include "webrtc/video/full_stack.h"
 
 namespace webrtc {
 
 static const int kFullStackTestDurationSecs = 60;
 static const int kSendStatsPollingIntervalMs = 1000;
 
-enum class ContentMode {
-  kRealTimeVideo,
-  kScreensharingStaticImage,
-  kScreensharingScrollingImage,
-};
-
-struct FullStackTestParams {
-  const char* test_label;
-  struct {
-    const char* name;
-    size_t width, height;
-    int fps;
-  } clip;
-  ContentMode mode;
-  int min_bitrate_bps;
-  int target_bitrate_bps;
-  int max_bitrate_bps;
-  double avg_psnr_threshold;
-  double avg_ssim_threshold;
-  int test_durations_secs;
-  std::string codec;
-  FakeNetworkPipe::Config link;
-};
-
-class FullStackTest : public test::CallTest {
- protected:
-  void RunTest(const FullStackTestParams& params);
-};
-
 class VideoAnalyzer : public PacketReceiver,
                       public newapi::Transport,
                       public VideoRenderer,
@@ -80,12 +51,14 @@
                 const char* test_label,
                 double avg_psnr_threshold,
                 double avg_ssim_threshold,
-                int duration_frames)
+                int duration_frames,
+                const std::string& graph_data_output_filename)
       : input_(input),
         transport_(transport),
         receiver_(nullptr),
         send_stream_(nullptr),
         test_label_(test_label),
+        graph_data_output_filename_(graph_data_output_filename),
         frames_to_process_(duration_frames),
         frames_recorded_(0),
         frames_processed_(0),
@@ -177,8 +150,11 @@
             header.timestamp - first_send_frame_.timestamp();
         first_send_frame_.Reset();
       }
-      send_times_[header.timestamp - rtp_timestamp_delta_] =
+      uint32_t timestamp = header.timestamp - rtp_timestamp_delta_;
+      send_times_[timestamp] =
           Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
+      encoded_frame_sizes_[timestamp] +=
+          length - (header.headerLength + header.paddingLength);
     }
 
     return transport_->SendRtp(packet, length);
@@ -271,20 +247,26 @@
  private:
   struct FrameComparison {
     FrameComparison()
-        : dropped(false), send_time_ms(0), recv_time_ms(0), render_time_ms(0) {}
+        : dropped(false),
+          send_time_ms(0),
+          recv_time_ms(0),
+          render_time_ms(0),
+          encoded_frame_size(0) {}
 
     FrameComparison(const VideoFrame& reference,
                     const VideoFrame& render,
                     bool dropped,
                     int64_t send_time_ms,
                     int64_t recv_time_ms,
-                    int64_t render_time_ms)
+                    int64_t render_time_ms,
+                    size_t encoded_frame_size)
         : reference(reference),
           render(render),
           dropped(dropped),
           send_time_ms(send_time_ms),
           recv_time_ms(recv_time_ms),
-          render_time_ms(render_time_ms) {}
+          render_time_ms(render_time_ms),
+          encoded_frame_size(encoded_frame_size) {}
 
     VideoFrame reference;
     VideoFrame render;
@@ -292,6 +274,35 @@
     int64_t send_time_ms;
     int64_t recv_time_ms;
     int64_t render_time_ms;
+    size_t encoded_frame_size;
+  };
+
+  struct Sample {
+    Sample(double dropped,
+           double input_time_ms,
+           double send_time_ms,
+           double recv_time_ms,
+           double encoded_frame_size,
+           double psnr,
+           double ssim,
+           double render_time_ms)
+        : dropped(dropped),
+          input_time_ms(input_time_ms),
+          send_time_ms(send_time_ms),
+          recv_time_ms(recv_time_ms),
+          encoded_frame_size(encoded_frame_size),
+          psnr(psnr),
+          ssim(ssim),
+          render_time_ms(render_time_ms) {}
+
+    double dropped;
+    double input_time_ms;
+    double send_time_ms;
+    double recv_time_ms;
+    double encoded_frame_size;
+    double psnr;
+    double ssim;
+    double render_time_ms;
   };
 
   void AddFrameComparison(const VideoFrame& reference,
@@ -304,10 +315,18 @@
     int64_t recv_time_ms = recv_times_[reference.timestamp()];
     recv_times_.erase(reference.timestamp());
 
+    size_t encoded_size = encoded_frame_sizes_[reference.timestamp()];
+    encoded_frame_sizes_.erase(reference.timestamp());
+
+    VideoFrame reference_copy;
+    VideoFrame render_copy;
+    reference_copy.CopyFrame(reference);
+    render_copy.CopyFrame(render);
+
     rtc::CritScope crit(&comparison_lock_);
-    comparisons_.push_back(FrameComparison(reference, render, dropped,
+    comparisons_.push_back(FrameComparison(reference_copy, render_copy, dropped,
                                            send_time_ms, recv_time_ms,
-                                           render_time_ms));
+                                           render_time_ms, encoded_size));
     comparison_available_event_->Set();
   }
 
@@ -365,6 +384,8 @@
 
     if (FrameProcessed()) {
       PrintResults();
+      if (!graph_data_output_filename_.empty())
+        PrintSamplesToFile();
       done_->Set();
       comparison_available_event_->Set();
       return false;
@@ -437,9 +458,18 @@
     double psnr = I420PSNR(&comparison.reference, &comparison.render);
     double ssim = I420SSIM(&comparison.reference, &comparison.render);
 
+    int64_t input_time_ms = comparison.reference.ntp_time_ms();
+
     rtc::CritScope crit(&comparison_lock_);
+    if (!graph_data_output_filename_.empty()) {
+      samples_.push_back(Sample(
+          comparison.dropped, input_time_ms, comparison.send_time_ms,
+          comparison.recv_time_ms, comparison.encoded_frame_size, psnr, ssim,
+          comparison.render_time_ms));
+    }
     psnr_.AddSample(psnr);
     ssim_.AddSample(ssim);
+
     if (comparison.dropped) {
       ++dropped_frames_;
       return;
@@ -448,11 +478,11 @@
       rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
     last_render_time_ = comparison.render_time_ms;
 
-    int64_t input_time_ms = comparison.reference.ntp_time_ms();
     sender_time_.AddSample(comparison.send_time_ms - input_time_ms);
     receiver_time_.AddSample(comparison.render_time_ms -
                              comparison.recv_time_ms);
     end_to_end_.AddSample(comparison.render_time_ms - input_time_ms);
+    encoded_frame_size_.AddSample(comparison.encoded_frame_size);
   }
 
   void PrintResult(const char* result_type,
@@ -466,7 +496,39 @@
            unit);
   }
 
+  void PrintSamplesToFile(void) {
+    FILE* out = fopen(graph_data_output_filename_.c_str(), "w");
+    CHECK(out != nullptr)
+        << "Couldn't open file: " << graph_data_output_filename_;
+
+    rtc::CritScope crit(&comparison_lock_);
+    std::sort(samples_.begin(), samples_.end(),
+              [](const Sample& A, const Sample& B)
+                  -> bool { return A.input_time_ms < B.input_time_ms; });
+
+    fprintf(out, "%s\n", test_label_);
+    fprintf(out, "%" PRIuS "\n", samples_.size());
+    fprintf(out,
+            "dropped "
+            "input_time_ms "
+            "send_time_ms "
+            "recv_time_ms "
+            "encoded_frame_size "
+            "psnr "
+            "ssim "
+            "render_time_ms\n");
+    for (const Sample& sample : samples_) {
+      fprintf(out, "%lf %lf %lf %lf %lf %lf %lf %lf\n", sample.dropped,
+              sample.input_time_ms, sample.send_time_ms, sample.recv_time_ms,
+              sample.encoded_frame_size, sample.psnr, sample.ssim,
+              sample.render_time_ms);
+    }
+    fclose(out);
+  }
+
   const char* const test_label_;
+  std::string graph_data_output_filename_;
+  std::vector<Sample> samples_ GUARDED_BY(comparison_lock_);
   test::Statistics sender_time_ GUARDED_BY(comparison_lock_);
   test::Statistics receiver_time_ GUARDED_BY(comparison_lock_);
   test::Statistics psnr_ GUARDED_BY(comparison_lock_);
@@ -488,9 +550,11 @@
 
   rtc::CriticalSection crit_;
   std::deque<VideoFrame> frames_ GUARDED_BY(crit_);
+  std::deque<VideoSendStream::Stats> send_stats_ GUARDED_BY(crit_);
   VideoFrame last_rendered_frame_ GUARDED_BY(crit_);
   std::map<uint32_t, int64_t> send_times_ GUARDED_BY(crit_);
   std::map<uint32_t, int64_t> recv_times_ GUARDED_BY(crit_);
+  std::map<uint32_t, size_t> encoded_frame_sizes_ GUARDED_BY(crit_);
   VideoFrame first_send_frame_ GUARDED_BY(crit_);
   const double avg_psnr_threshold_;
   const double avg_ssim_threshold_;
@@ -504,11 +568,16 @@
 };
 
 void FullStackTest::RunTest(const FullStackTestParams& params) {
+  // TODO(ivica): Add num_temporal_layers as a param.
+  unsigned char num_temporal_layers =
+      params.graph_data_output_filename.empty() ? 2 : 1;
+
   test::DirectTransport send_transport(params.link);
   test::DirectTransport recv_transport(params.link);
   VideoAnalyzer analyzer(nullptr, &send_transport, params.test_label,
                          params.avg_psnr_threshold, params.avg_ssim_threshold,
-                         params.test_durations_secs * params.clip.fps);
+                         params.test_durations_secs * params.clip.fps,
+                         params.graph_data_output_filename);
 
   CreateCalls(Call::Config(), Call::Config());
 
@@ -557,18 +626,21 @@
       vp8_settings = VideoEncoder::GetDefaultVp8Settings();
       vp8_settings.denoisingOn = false;
       vp8_settings.frameDroppingOn = false;
-      vp8_settings.numberOfTemporalLayers = 2;
+      vp8_settings.numberOfTemporalLayers = num_temporal_layers;
       encoder_config_.encoder_specific_settings = &vp8_settings;
     } else if (params.codec == "VP9") {
       vp9_settings = VideoEncoder::GetDefaultVp9Settings();
       vp9_settings.denoisingOn = false;
       vp9_settings.frameDroppingOn = false;
-      vp9_settings.numberOfTemporalLayers = 2;
+      vp9_settings.numberOfTemporalLayers = num_temporal_layers;
       encoder_config_.encoder_specific_settings = &vp9_settings;
     }
 
     stream->temporal_layer_thresholds_bps.clear();
-    stream->temporal_layer_thresholds_bps.push_back(stream->target_bitrate_bps);
+    if (num_temporal_layers > 1) {
+      stream->temporal_layer_thresholds_bps.push_back(
+          stream->target_bitrate_bps);
+    }
   }
 
   CreateMatchingReceiveConfigs(&recv_transport);
diff --git a/webrtc/video/full_stack.h b/webrtc/video/full_stack.h
new file mode 100644
index 0000000..56ef4b3
--- /dev/null
+++ b/webrtc/video/full_stack.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_VIDEO_FULL_STACK_H_
+#define WEBRTC_VIDEO_FULL_STACK_H_
+
+#include <string>
+
+#include "webrtc/test/call_test.h"
+#include "webrtc/test/direct_transport.h"
+
+namespace webrtc {
+
+enum class ContentMode {
+  kRealTimeVideo,
+  kScreensharingStaticImage,
+  kScreensharingScrollingImage,
+};
+
+struct FullStackTestParams {
+  const char* test_label;
+  struct {
+    const char* name;
+    size_t width, height;
+    int fps;
+  } clip;
+  ContentMode mode;
+  int min_bitrate_bps;
+  int target_bitrate_bps;
+  int max_bitrate_bps;
+  double avg_psnr_threshold;
+  double avg_ssim_threshold;
+  int test_durations_secs;
+  std::string codec;
+  FakeNetworkPipe::Config link;
+  std::string graph_data_output_filename;
+};
+
+class FullStackTest : public test::CallTest {
+ protected:
+  void RunTest(const FullStackTestParams& params);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_FULL_STACK_H_
diff --git a/webrtc/video/full_stack_plot.py b/webrtc/video/full_stack_plot.py
new file mode 100755
index 0000000..92b50a4
--- /dev/null
+++ b/webrtc/video/full_stack_plot.py
@@ -0,0 +1,414 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""Generate graphs for data generated by full_stack_quality_sampler.cc.
+
+Usage examples:
+  Show end to end time for a single full stack test.
+  ./full_stack_plot.py -df end_to_end -o 600 --frames 1000 vp9_data.txt
+
+  Show simultaneously PSNR and encoded frame size for two different runs of
+  full stack test. Averaged over a cycle of 200 frames. Used e.g. for
+  screenshare slide test.
+  ./full_stack_plot.py -c 200 -df psnr -drf encoded_frame_size \\
+                       before.txt after.txt
+
+  Similar to the previous test, but multiple graphs.
+  ./full_stack_plot.py -c 200 -df psnr vp8.txt vp9.txt --next \\
+                       -c 200 -df sender_time vp8.txt vp9.txt --next \\
+                       -c 200 -df end_to_end vp8.txt vp9.txt
+"""
+
+import argparse
+from collections import defaultdict
+import itertools
+import sys
+import matplotlib.pyplot as plt
+import numpy
+
+# Fields
+DROPPED = 0
+INPUT_TIME = 1              # ms
+SEND_TIME = 2               # ms
+RECV_TIME = 3               # ms
+ENCODED_FRAME_SIZE = 4      # bytes
+PSNR = 5
+SSIM = 6
+RENDER_TIME = 7             # ms
+
+TOTAL_RAW_FIELDS = 8
+
+SENDER_TIME = TOTAL_RAW_FIELDS + 0
+RECEIVER_TIME = TOTAL_RAW_FIELDS + 1
+END_TO_END = TOTAL_RAW_FIELDS + 2
+RENDERED_DELTA = TOTAL_RAW_FIELDS + 3
+
+FIELD_MASK = 255
+
+# Options
+HIDE_DROPPED = 256
+RIGHT_Y_AXIS = 512
+
+# internal field id, field name, title
+_fields = [
+    # Raw
+    (DROPPED, "dropped", "dropped"),
+    (INPUT_TIME, "input_time_ms", "input time"),
+    (SEND_TIME, "send_time_ms", "send time"),
+    (RECV_TIME, "recv_time_ms", "recv time"),
+    (ENCODED_FRAME_SIZE, "encoded_frame_size", "encoded frame size"),
+    (PSNR, "psnr", "PSNR"),
+    (SSIM, "ssim", "SSIM"),
+    (RENDER_TIME, "render_time_ms", "render time"),
+    # Auto-generated
+    (SENDER_TIME, "sender_time", "sender time"),
+    (RECEIVER_TIME, "receiver_time", "receiver time"),
+    (END_TO_END, "end_to_end", "end to end"),
+    (RENDERED_DELTA, "rendered_delta", "rendered delta"),
+]
+
+name_to_id = {field[1]: field[0] for field in _fields}
+id_to_title = {field[0]: field[2] for field in _fields}
+
+
+def field_arg_to_id(arg):
+  if arg == "none":
+    return None
+  if arg in name_to_id:
+    return name_to_id[arg]
+  if arg + "_ms" in name_to_id:
+    return name_to_id[arg + "_ms"]
+  raise Exception("Unrecognized field name \"{}\"".format(arg))
+
+
+class PlotLine(object):
+  """Data for a single graph line."""
+
+  def __init__(self, label, values, flags):
+    self.label = label
+    self.values = values
+    self.flags = flags
+
+
+class Data(object):
+  """Object representing one full stack test."""
+
+  def __init__(self, filename):
+    self.title = ""
+    self.length = 0
+    self.samples = defaultdict(list)
+
+    self._read_samples(filename)
+
+  def _read_samples(self, filename):
+    """Reads graph data from the given file."""
+    f = open(filename)
+    it = iter(f)
+
+    self.title = it.next().strip()
+    self.length = int(it.next())
+    field_names = [name.strip() for name in it.next().split()]
+    field_ids = [name_to_id[name] for name in field_names]
+
+    for field_id in field_ids:
+      self.samples[field_id] = [0.0] * self.length
+
+    for sample_id in xrange(self.length):
+      for col, value in enumerate(it.next().split()):
+        self.samples[field_ids[col]][sample_id] = float(value)
+
+    self._subtract_first_input_time()
+    self._generate_additional_data()
+
+    f.close()
+
+  def _subtract_first_input_time(self):
+    offset = self.samples[INPUT_TIME][0]
+    for field in [INPUT_TIME, SEND_TIME, RECV_TIME, RENDER_TIME]:
+      if field in self.samples:
+        self.samples[field] = [x - offset for x in self.samples[field]]
+
+  def _generate_additional_data(self):
+    """Calculates sender time, receiver time etc. from the raw data."""
+    s = self.samples
+    last_render_time = 0
+    for field_id in [SENDER_TIME, RECEIVER_TIME, END_TO_END, RENDERED_DELTA]:
+      s[field_id] = [0] * self.length
+
+    for k in range(self.length):
+      s[SENDER_TIME][k] = s[SEND_TIME][k] - s[INPUT_TIME][k]
+
+      decoded_time = s[RENDER_TIME][k]
+      s[RECEIVER_TIME][k] = decoded_time - s[RECV_TIME][k]
+      s[END_TO_END][k] = decoded_time - s[INPUT_TIME][k]
+      if not s[DROPPED][k]:
+        if k > 0:
+          s[RENDERED_DELTA][k] = decoded_time - last_render_time
+        last_render_time = decoded_time
+
+  def _hide(self, values):
+    """
+    Replaces values for dropped frames with None.
+    These values are then skipped by the plot() method.
+    """
+
+    return [None if self.samples[DROPPED][k] else values[k]
+            for k in range(len(values))]
+
+  def add_samples(self, config, target_lines_list):
+    """Creates graph lines from the current data set with given config."""
+    for field in config.fields:
+      # field is None means the user wants just to skip the color.
+      if field is None:
+        target_lines_list.append(None)
+        continue
+
+      field_id = field & FIELD_MASK
+      values = self.samples[field_id]
+
+      if field & HIDE_DROPPED:
+        values = self._hide(values)
+
+      target_lines_list.append(PlotLine(
+          self.title + " " + id_to_title[field_id],
+          values, field & ~FIELD_MASK))
+
+
+def average_over_cycle(values, length):
+  """
+  Returns the list:
+    [
+        avg(values[0], values[length], ...),
+        avg(values[1], values[length + 1], ...),
+        ...
+        avg(values[length - 1], values[2 * length - 1], ...),
+    ]
+
+  Skips None values when calculating the average value.
+  """
+
+  total = [0.0] * length
+  count = [0] * length
+  for k in range(len(values)):
+    if values[k] is not None:
+      total[k % length] += values[k]
+      count[k % length] += 1
+
+  result = [0.0] * length
+  for k in range(length):
+    result[k] = total[k] / count[k] if count[k] else None
+  return result
+
+
+class PlotConfig(object):
+  """Object representing a single graph."""
+
+  def __init__(self, fields, data_list, cycle_length=None, frames=None,
+               offset=0, output_filename=None, title="Graph"):
+    self.fields = fields
+    self.data_list = data_list
+    self.cycle_length = cycle_length
+    self.frames = frames
+    self.offset = offset
+    self.output_filename = output_filename
+    self.title = title
+
+  def plot(self, ax1):
+    lines = []
+    for data in self.data_list:
+      if not data:
+        # Add None lines to skip the colors.
+        lines.extend([None] * len(self.fields))
+      else:
+        data.add_samples(self, lines)
+
+    def _slice_values(values):
+      if self.offset:
+        values = values[self.offset:]
+      if self.frames:
+        values = values[:self.frames]
+      return values
+
+    length = None
+    for line in lines:
+      if line is None:
+        continue
+
+      line.values = _slice_values(line.values)
+      if self.cycle_length:
+        line.values = average_over_cycle(line.values, self.cycle_length)
+
+      if length is None:
+        length = len(line.values)
+      elif length != len(line.values):
+        raise Exception("All arrays should have the same length!")
+
+    ax1.set_xlabel("Frame", fontsize="large")
+    if any(line.flags & RIGHT_Y_AXIS for line in lines if line):
+      ax2 = ax1.twinx()
+      ax2.set_xlabel("Frame", fontsize="large")
+    else:
+      ax2 = None
+
+    # Have to implement color_cycle manually, due to two scales in a graph.
+    color_cycle = ["b", "r", "g", "c", "m", "y", "k"]
+    color_iter = itertools.cycle(color_cycle)
+
+    for line in lines:
+      if not line:
+        color_iter.next()
+        continue
+
+      if self.cycle_length:
+        x = numpy.array(range(self.cycle_length))
+      else:
+        x = numpy.array(range(self.offset, self.offset + len(line.values)))
+      y = numpy.array(line.values)
+      ax = ax2 if line.flags & RIGHT_Y_AXIS else ax1
+      ax.plot(x, y, "o-", label=line.label, markersize=3.0, linewidth=1.0,
+              color=color_iter.next())
+
+    ax1.grid(True)
+    if ax2:
+      ax1.legend(loc="upper left", shadow=True, fontsize="large")
+      ax2.legend(loc="upper right", shadow=True, fontsize="large")
+    else:
+      ax1.legend(loc="best", shadow=True, fontsize="large")
+
+
+def load_files(filenames):
+  result = []
+  for filename in filenames:
+    if filename in load_files.cache:
+      result.append(load_files.cache[filename])
+    else:
+      data = Data(filename)
+      load_files.cache[filename] = data
+      result.append(data)
+  return result
+load_files.cache = {}
+
+
+def get_parser():
+  class CustomAction(argparse.Action):
+
+    def __call__(self, parser, namespace, values, option_string=None):
+      if "ordered_args" not in namespace:
+        namespace.ordered_args = []
+      namespace.ordered_args.append((self.dest, values))
+
+  parser = argparse.ArgumentParser(
+      description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
+
+  parser.add_argument(
+      "-c", "--cycle_length", nargs=1, action=CustomAction,
+      type=int, help="Cycle length over which to average the values.")
+  parser.add_argument(
+      "-f", "--field", nargs=1, action=CustomAction,
+      help="Name of the field to show. Use 'none' to skip a color.")
+  parser.add_argument("-r", "--right", nargs=0, action=CustomAction,
+                      help="Use right Y axis for given field.")
+  parser.add_argument("-d", "--drop", nargs=0, action=CustomAction,
+                      help="Hide values for dropped frames.")
+  parser.add_argument("-o", "--offset", nargs=1, action=CustomAction, type=int,
+                      help="Frame offset.")
+  parser.add_argument("-n", "--next", nargs=0, action=CustomAction,
+                      help="Separator for multiple graphs.")
+  parser.add_argument(
+      "--frames", nargs=1, action=CustomAction, type=int,
+      help="Frame count to show or take into account while averaging.")
+  parser.add_argument("-t", "--title", nargs=1, action=CustomAction,
+                      help="Title of the graph.")
+  parser.add_argument(
+      "-O", "--output_filename", nargs=1, action=CustomAction,
+      help="Use to save the graph into a file. "
+           "Otherwise, a window will be shown.")
+  parser.add_argument(
+      "files", nargs="+", action=CustomAction,
+      help="List of text-based files generated by full_stack.cc")
+  return parser
+
+
+def _plot_config_from_args(args, graph_num):
+  # Pylint complains about using kwargs, so have to do it this way.
+  cycle_length = None
+  frames = None
+  offset = 0
+  output_filename = None
+  title = "Graph"
+
+  fields = []
+  files = []
+  mask = 0
+  for key, values in args:
+    if key == "cycle_length":
+      cycle_length = values[0]
+    elif key == "frames":
+      frames = values[0]
+    elif key == "offset":
+      offset = values[0]
+    elif key == "output_filename":
+      output_filename = values[0]
+    elif key == "title":
+      title = values[0]
+    elif key == "drop":
+      mask |= HIDE_DROPPED
+    elif key == "right":
+      mask |= RIGHT_Y_AXIS
+    elif key == "field":
+      field_id = field_arg_to_id(values[0])
+      fields.append(field_id | mask if field_id is not None else None)
+      mask = 0  # Reset mask after the field argument.
+    elif key == "files":
+      files.extend(values)
+
+  if not files:
+    raise Exception("Missing file argument(s) for graph #{}".format(graph_num))
+  if not fields:
+    raise Exception("Missing field argument(s) for graph #{}".format(graph_num))
+
+  return PlotConfig(fields, load_files(files), cycle_length=cycle_length,
+      frames=frames, offset=offset, output_filename=output_filename,
+      title=title)
+
+
+def plot_configs_from_args(args):
+  """Generates plot configs for given command line arguments."""
+  # The way it works:
+  #   First we detect separators -n/--next and split arguments into groups, one
+  #   for each plot. For each group, we partially parse it with
+  #   argparse.ArgumentParser, modified to remember the order of arguments.
+  #   Then we traverse the argument list and fill the PlotConfig.
+  args = itertools.groupby(args, lambda x: x in ["-n", "--next"])
+  args = list(list(group) for match, group in args if not match)
+
+  parser = get_parser()
+  plot_configs = []
+  for index, raw_args in enumerate(args):
+    graph_args = parser.parse_args(raw_args).ordered_args
+    plot_configs.append(_plot_config_from_args(graph_args, index))
+  return plot_configs
+
+
+def show_or_save_plots(plot_configs):
+  for config in plot_configs:
+    fig = plt.figure(figsize=(14.0, 10.0))
+    ax = fig.add_subplot(1, 1, 1)
+
+    plt.title(config.title)
+    config.plot(ax)
+    if config.output_filename:
+      print "Saving to", config.output_filename
+      fig.savefig(config.output_filename)
+      plt.close(fig)
+
+  plt.show()
+
+if __name__ == "__main__":
+  show_or_save_plots(plot_configs_from_args(sys.argv[1:]))
diff --git a/webrtc/video/full_stack_quality_sampler.cc b/webrtc/video/full_stack_quality_sampler.cc
new file mode 100644
index 0000000..a179756
--- /dev/null
+++ b/webrtc/video/full_stack_quality_sampler.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gflags/gflags.h"
+#include "webrtc/test/field_trial.h"
+#include "webrtc/test/run_test.h"
+#include "webrtc/video/full_stack.h"
+
+namespace webrtc {
+namespace flags {
+
+DEFINE_string(title, "Full stack graph", "Graph title.");
+std::string Title() {
+  return static_cast<std::string>(FLAGS_title);
+}
+
+DEFINE_string(filename, "graph_data.txt", "Name of a target graph data file.");
+std::string Filename() {
+  return static_cast<std::string>(FLAGS_filename);
+}
+
+DEFINE_string(clip_name, "screenshare_slides", "Clip name, resource name.");
+std::string ClipName() {
+  return static_cast<std::string>(FLAGS_clip_name);
+}
+
+DEFINE_int32(width, 1850, "Video width (crops source).");
+size_t Width() {
+  return static_cast<size_t>(FLAGS_width);
+}
+
+DEFINE_int32(height, 1110, "Video height (crops source).");
+size_t Height() {
+  return static_cast<size_t>(FLAGS_height);
+}
+
+DEFINE_int32(fps, 5, "Frames per second.");
+int Fps() {
+  return static_cast<int>(FLAGS_fps);
+}
+
+DEFINE_int32(
+    content_mode,
+    1,
+    "0 - real time video, 1 - screenshare static, 2 - screenshare scrolling.");
+ContentMode ContentModeFlag() {
+  switch (FLAGS_content_mode) {
+    case 0:
+      return ContentMode::kRealTimeVideo;
+    case 1:
+      return ContentMode::kScreensharingStaticImage;
+    case 2:
+      return ContentMode::kScreensharingScrollingImage;
+    default:
+      RTC_NOTREACHED() << "Unknown content mode!";
+      return ContentMode::kScreensharingStaticImage;
+  }
+}
+
+DEFINE_int32(test_duration, 60, "Duration of the test in seconds.");
+int TestDuration() {
+  return static_cast<int>(FLAGS_test_duration);
+}
+
+DEFINE_int32(min_bitrate, 50000, "Minimum video bitrate.");
+int MinBitrate() {
+  return static_cast<int>(FLAGS_min_bitrate);
+}
+
+DEFINE_int32(target_bitrate,
+             500000,
+             "Target video bitrate. (Default value here different than in full "
+             "stack tests!)");
+int TargetBitrate() {
+  return static_cast<int>(FLAGS_target_bitrate);
+}
+
+DEFINE_int32(max_bitrate,
+             500000,
+             "Maximum video bitrate. (Default value here different than in "
+             "full stack tests!)");
+int MaxBitrate() {
+  return static_cast<int>(FLAGS_max_bitrate);
+}
+
+DEFINE_string(codec, "VP9", "Video codec to use.");
+std::string Codec() {
+  return static_cast<std::string>(FLAGS_codec);
+}
+
+DEFINE_string(
+    force_fieldtrials,
+    "",
+    "Field trials control experimental feature code which can be forced. "
+    "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
+    " will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
+    "trials are separated by \"/\"");
+}  // namespace flags
+
+class FullStackGenGraph : public FullStackTest {
+ public:
+  void TestBody() override {
+    std::string title = flags::Title();
+    std::string clip_name = flags::ClipName();
+    FullStackTestParams params = {
+        title.c_str(),
+        {clip_name.c_str(), flags::Width(), flags::Height(), flags::Fps()},
+        flags::ContentModeFlag(),
+        flags::MinBitrate(),
+        flags::TargetBitrate(),
+        flags::MaxBitrate(),
+        0.0,  // avg_psnr_threshold
+        0.0,  // avg_ssim_threshold
+        flags::TestDuration(),
+        flags::Codec()};
+    params.graph_data_output_filename = flags::Filename();
+
+    RunTest(params);
+  }
+};
+
+void FullStackRun(void) {
+  FullStackGenGraph full_stack;
+  full_stack.TestBody();
+}
+}  // namespace webrtc
+
+int main(int argc, char* argv[]) {
+  ::testing::InitGoogleTest(&argc, argv);
+  google::ParseCommandLineFlags(&argc, &argv, true);
+  webrtc::test::InitFieldTrialsFromString(
+      webrtc::flags::FLAGS_force_fieldtrials);
+  webrtc::test::RunTest(webrtc::FullStackRun);
+  return 0;
+}
diff --git a/webrtc/video/video_receive_stream.cc b/webrtc/video/video_receive_stream.cc
index 52e37b7..5737ca9 100644
--- a/webrtc/video/video_receive_stream.cc
+++ b/webrtc/video/video_receive_stream.cc
@@ -103,6 +103,8 @@
   strcpy(codec.plName, decoder.payload_name.c_str());
   if (decoder.payload_name == "VP8") {
     codec.codecType = kVideoCodecVP8;
+  } else if (decoder.payload_name == "VP9") {
+    codec.codecType = kVideoCodecVP9;
   } else if (decoder.payload_name == "H264") {
     codec.codecType = kVideoCodecH264;
   } else {
@@ -111,6 +113,8 @@
 
   if (codec.codecType == kVideoCodecVP8) {
     codec.codecSpecific.VP8 = VideoEncoder::GetDefaultVp8Settings();
+  } else if (codec.codecType == kVideoCodecVP9) {
+    codec.codecSpecific.VP9 = VideoEncoder::GetDefaultVp9Settings();
   } else if (codec.codecType == kVideoCodecH264) {
     codec.codecSpecific.H264 = VideoEncoder::GetDefaultH264Settings();
   }
diff --git a/webrtc/webrtc_tests.gypi b/webrtc/webrtc_tests.gypi
index 489df75..8186415 100644
--- a/webrtc/webrtc_tests.gypi
+++ b/webrtc/webrtc_tests.gypi
@@ -45,6 +45,32 @@
       ],
     },
     {
+      'target_name': 'full_stack_quality_sampler',
+      'type': 'executable',
+      'sources': [
+        'test/mac/run_test.mm',
+        'test/run_test.cc',
+        'test/run_test.h',
+        'video/full_stack.cc',
+        'video/full_stack_quality_sampler.cc',
+      ],
+      'conditions': [
+        ['OS=="mac"', {
+          'sources!': [
+            'test/run_test.cc',
+          ],
+        }],
+      ],
+      'dependencies': [
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/third_party/gflags/gflags.gyp:gflags',
+        '<(webrtc_root)/modules/modules.gyp:video_capture',
+        'test/webrtc_test_common.gyp:webrtc_test_common',
+        'test/test.gyp:test_main',
+        'webrtc',
+      ],
+    },
+    {
       'target_name': 'loopback_base',
       'type': 'static_library',
       'sources': [