Implement settable min/start/max bitrates in Call.

These parameters are set by the x-google-*-bitrate SDP parameters. This
is implemented on a Call level instead of per-stream like the currently
underlying VideoEngine implementation to allow this refactoring to not
reconfigure the VideoCodec at all but rather adjust bandwidth-estimator
parameters.
Also implements SetMaxSendBandwidth in WebRtcVideoEngine2 as it's a SDP
parameter and allowing it to be dynamically readjusted in Call.

R=mflodman@webrtc.org, stefan@webrtc.org
BUG=1788

Review URL: https://webrtc-codereview.appspot.com/26199004

git-svn-id: http://webrtc.googlecode.com/svn/trunk@7746 4adac7df-926f-26a2-2b94-8c16560cd09d
diff --git a/talk/media/webrtc/webrtcvideoengine2.cc b/talk/media/webrtc/webrtcvideoengine2.cc
index 608b807..06dee05 100644
--- a/talk/media/webrtc/webrtcvideoengine2.cc
+++ b/talk/media/webrtc/webrtcvideoengine2.cc
@@ -200,18 +200,8 @@
   stream.max_framerate =
       codec.framerate != 0 ? codec.framerate : kDefaultVideoMaxFramerate;
 
-  int min_bitrate = kMinVideoBitrate;
-  codec.GetParam(kCodecParamMinBitrate, &min_bitrate);
-  // Clamp the min video bitrate, this is set from JavaScript directly and needs
-  // to be sanitized.
-  if (min_bitrate < kMinVideoBitrate) {
-    min_bitrate = kMinVideoBitrate;
-  }
-
-  int max_bitrate = kMaxVideoBitrate;
-  codec.GetParam(kCodecParamMaxBitrate, &max_bitrate);
-  stream.min_bitrate_bps = min_bitrate * 1000;
-  stream.target_bitrate_bps = stream.max_bitrate_bps = max_bitrate * 1000;
+  stream.min_bitrate_bps = kMinVideoBitrate * 1000;
+  stream.target_bitrate_bps = stream.max_bitrate_bps = kMaxVideoBitrate * 1000;
 
   int max_qp = kDefaultQpMax;
   codec.GetParam(kCodecParamMaxQuantization, &max_qp);
@@ -703,11 +693,6 @@
     config.voice_engine = voice_engine->voe()->engine();
   }
 
-  // Set start bitrate for the call. A default is provided by SetDefaultOptions.
-  int start_bitrate_kbps;
-  options_.video_start_bitrate.Get(&start_bitrate_kbps);
-  config.stream_start_bitrate_bps = start_bitrate_kbps * 1000;
-
   call_.reset(call_factory->CreateCall(config));
 
   rtcp_receiver_report_ssrc_ = kDefaultRtcpReceiverReportSsrc;
@@ -721,8 +706,6 @@
   options_.suspend_below_min_bitrate.Set(false);
   options_.use_payload_padding.Set(false);
   options_.video_noise_reduction.Set(true);
-  options_.video_start_bitrate.Set(
-      webrtc::Call::Config::kDefaultStartBitrateBps / 1000);
   options_.screencast_min_bitrate.Set(0);
 }
 
@@ -841,6 +824,29 @@
     it->second->SetCodec(supported_codecs.front());
   }
 
+  VideoCodec codec = supported_codecs.front().codec;
+  int bitrate_kbps;
+  if (codec.GetParam(kCodecParamMinBitrate, &bitrate_kbps) &&
+      bitrate_kbps > 0) {
+    bitrate_config_.min_bitrate_bps = bitrate_kbps * 1000;
+  } else {
+    bitrate_config_.min_bitrate_bps = 0;
+  }
+  if (codec.GetParam(kCodecParamStartBitrate, &bitrate_kbps) &&
+      bitrate_kbps > 0) {
+    bitrate_config_.start_bitrate_bps = bitrate_kbps * 1000;
+  } else {
+    // Do not reconfigure start bitrate unless it's specified and positive.
+    bitrate_config_.start_bitrate_bps = -1;
+  }
+  if (codec.GetParam(kCodecParamMaxBitrate, &bitrate_kbps) &&
+      bitrate_kbps > 0) {
+    bitrate_config_.max_bitrate_bps = bitrate_kbps * 1000;
+  } else {
+    bitrate_config_.max_bitrate_bps = -1;
+  }
+  call_->SetBitrateConfig(bitrate_config_);
+
   return true;
 }
 
@@ -1276,9 +1282,19 @@
   return true;
 }
 
-bool WebRtcVideoChannel2::SetMaxSendBandwidth(int bps) {
-  // TODO(pbos): Implement.
-  LOG(LS_VERBOSE) << "SetMaxSendBandwidth: " << bps;
+bool WebRtcVideoChannel2::SetMaxSendBandwidth(int max_bitrate_bps) {
+  LOG(LS_INFO) << "SetMaxSendBandwidth: " << max_bitrate_bps << "bps.";
+  if (max_bitrate_bps <= 0) {
+    // Unsetting max bitrate.
+    max_bitrate_bps = -1;
+  }
+  bitrate_config_.start_bitrate_bps = -1;
+  bitrate_config_.max_bitrate_bps = max_bitrate_bps;
+  if (max_bitrate_bps > 0 &&
+      bitrate_config_.min_bitrate_bps > max_bitrate_bps) {
+    bitrate_config_.min_bitrate_bps = max_bitrate_bps;
+  }
+  call_->SetBitrateConfig(bitrate_config_);
   return true;
 }
 
diff --git a/talk/media/webrtc/webrtcvideoengine2.h b/talk/media/webrtc/webrtcvideoengine2.h
index 9a5fe65..8ce6f36 100644
--- a/talk/media/webrtc/webrtcvideoengine2.h
+++ b/talk/media/webrtc/webrtcvideoengine2.h
@@ -491,6 +491,7 @@
   WebRtcVideoEncoderFactory2* const encoder_factory_;
   std::vector<VideoCodecSettings> recv_codecs_;
   std::vector<webrtc::RtpExtension> recv_rtp_extensions_;
+  webrtc::Call::Config::BitrateConfig bitrate_config_;
   VideoOptions options_;
 };
 
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.cc b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
index 8e0a785..6af5540 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.cc
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.cc
@@ -299,6 +299,11 @@
   return stats;
 }
 
+void FakeCall::SetBitrateConfig(
+    const webrtc::Call::Config::BitrateConfig& bitrate_config) {
+  config_.stream_bitrates = bitrate_config;
+}
+
 void FakeCall::SignalNetworkState(webrtc::Call::NetworkState state) {
   network_state_ = state;
 }
@@ -350,8 +355,6 @@
       cricket::WebRtcVideoDecoderFactory* decoder_factory,
       const std::vector<VideoCodec>& codecs);
 
-  void TestStartBitrate(bool override_start_bitrate, int start_bitrate_bps);
-
   WebRtcVideoEngine2 engine_;
   VideoCodec default_codec_;
   VideoCodec default_red_codec_;
@@ -480,39 +483,6 @@
   FAIL() << "Absolute Sender Time extension not in header-extension list.";
 }
 
-void WebRtcVideoEngine2Test::TestStartBitrate(bool override_start_bitrate,
-                                              int start_bitrate_bps) {
-  FakeCallFactory call_factory;
-  engine_.SetCallFactory(&call_factory);
-
-  engine_.Init(rtc::Thread::Current());
-
-  cricket::VideoOptions options;
-  if (override_start_bitrate) {
-    options.video_start_bitrate.Set(start_bitrate_bps / 1000);
-  }
-
-  rtc::scoped_ptr<VideoMediaChannel> channel(
-      engine_.CreateChannel(options, NULL));
-
-  EXPECT_EQ(override_start_bitrate
-                ? start_bitrate_bps
-                : webrtc::Call::Config::kDefaultStartBitrateBps,
-            call_factory.GetCall()->GetConfig().stream_start_bitrate_bps);
-}
-
-TEST_F(WebRtcVideoEngine2Test, UsesCorrectDefaultStartBitrate) {
-  TestStartBitrate(false, -1);
-}
-
-TEST_F(WebRtcVideoEngine2Test, CreateChannelCanUseIncreasedStartBitrate) {
-  TestStartBitrate(true, 2 * webrtc::Call::Config::kDefaultStartBitrateBps);
-}
-
-TEST_F(WebRtcVideoEngine2Test, CreateChannelCanUseDecreasedStartBitrate) {
-  TestStartBitrate(true, webrtc::Call::Config::kDefaultStartBitrateBps / 2);
-}
-
 TEST_F(WebRtcVideoEngine2Test, SetSendFailsBeforeSettingCodecs) {
   engine_.Init(rtc::Thread::Current());
   rtc::scoped_ptr<VideoMediaChannel> channel(
@@ -877,25 +847,25 @@
     return streams[streams.size() - 1];
   }
 
-  void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate,
-                                          const char* max_bitrate) {
+  void SetSendCodecsShouldWorkForBitrates(const char* min_bitrate_kbps,
+                                          int expected_min_bitrate_bps,
+                                          const char* start_bitrate_kbps,
+                                          int expected_start_bitrate_bps,
+                                          const char* max_bitrate_kbps,
+                                          int expected_max_bitrate_bps) {
     std::vector<VideoCodec> codecs;
     codecs.push_back(kVp8Codec);
-    codecs[0].params[kCodecParamMinBitrate] = min_bitrate;
-    codecs[0].params[kCodecParamMaxBitrate] = max_bitrate;
+    codecs[0].params[kCodecParamMinBitrate] = min_bitrate_kbps;
+    codecs[0].params[kCodecParamStartBitrate] = start_bitrate_kbps;
+    codecs[0].params[kCodecParamMaxBitrate] = max_bitrate_kbps;
     EXPECT_TRUE(channel_->SetSendCodecs(codecs));
 
-    FakeVideoSendStream* stream = AddSendStream();
-
-    std::vector<webrtc::VideoStream> video_streams = stream->GetVideoStreams();
-    ASSERT_EQ(1u, video_streams.size());
-    EXPECT_EQ(atoi(min_bitrate), video_streams.back().min_bitrate_bps / 1000);
-    EXPECT_EQ(atoi(max_bitrate), video_streams.back().max_bitrate_bps / 1000);
-
-    VideoCodec codec;
-    EXPECT_TRUE(channel_->GetSendCodec(&codec));
-    EXPECT_EQ(min_bitrate, codec.params[kCodecParamMinBitrate]);
-    EXPECT_EQ(max_bitrate, codec.params[kCodecParamMaxBitrate]);
+    EXPECT_EQ(expected_min_bitrate_bps,
+              fake_call_->GetConfig().stream_bitrates.min_bitrate_bps);
+    EXPECT_EQ(expected_start_bitrate_bps,
+              fake_call_->GetConfig().stream_bitrates.start_bitrate_bps);
+    EXPECT_EQ(expected_max_bitrate_bps,
+              fake_call_->GetConfig().stream_bitrates.max_bitrate_bps);
   }
 
   void TestSetSendRtpHeaderExtensions(const std::string& cricket_ext,
@@ -1630,8 +1600,19 @@
   EXPECT_EQ(kVp8Codec360p.height, streams[0].height);
 }
 
-TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithMinMaxBitrate) {
-  SetSendCodecsShouldWorkForBitrates("100", "200");
+TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithBitrates) {
+  SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+                                     200000);
+}
+
+TEST_F(WebRtcVideoChannel2Test,
+       SetSendCodecsWithoutBitratesUsesCorrectDefaults) {
+  SetSendCodecsShouldWorkForBitrates(
+      "", 0, "", -1, "", -1);
+}
+
+TEST_F(WebRtcVideoChannel2Test, SetSendCodecsCapsMinAndStartBitrate) {
+  SetSendCodecsShouldWorkForBitrates("-1", 0, "-100", -1, "", -1);
 }
 
 TEST_F(WebRtcVideoChannel2Test, SetSendCodecsRejectsMaxLessThanMinBitrate) {
@@ -1641,8 +1622,25 @@
   EXPECT_FALSE(channel_->SetSendCodecs(video_codecs));
 }
 
-TEST_F(WebRtcVideoChannel2Test, SetSendCodecsAcceptLargeMinMaxBitrate) {
-  SetSendCodecsShouldWorkForBitrates("1000", "2000");
+TEST_F(WebRtcVideoChannel2Test,
+       SetMaxSendBandwidthShouldPreserveOtherBitrates) {
+  SetSendCodecsShouldWorkForBitrates("100", 100000, "150", 150000, "200",
+                                     200000);
+  channel_->SetMaxSendBandwidth(300000);
+  EXPECT_EQ(100000, fake_call_->GetConfig().stream_bitrates.min_bitrate_bps)
+      << "Setting max bitrate should keep previous min bitrate.";
+  EXPECT_EQ(-1, fake_call_->GetConfig().stream_bitrates.start_bitrate_bps)
+      << "Setting max bitrate should not reset start bitrate.";
+  EXPECT_EQ(300000, fake_call_->GetConfig().stream_bitrates.max_bitrate_bps);
+}
+
+TEST_F(WebRtcVideoChannel2Test, SetMaxSendBandwidthShouldBeRemovable) {
+  channel_->SetMaxSendBandwidth(300000);
+  EXPECT_EQ(300000, fake_call_->GetConfig().stream_bitrates.max_bitrate_bps);
+  // <= 0 means disable (infinite) max bitrate.
+  channel_->SetMaxSendBandwidth(0);
+  EXPECT_EQ(-1, fake_call_->GetConfig().stream_bitrates.max_bitrate_bps)
+      << "Setting zero max bitrate did not reset start bitrate.";
 }
 
 TEST_F(WebRtcVideoChannel2Test, SetSendCodecsWithMaxQuantization) {
diff --git a/talk/media/webrtc/webrtcvideoengine2_unittest.h b/talk/media/webrtc/webrtcvideoengine2_unittest.h
index 48c4f64..1826e9c 100644
--- a/talk/media/webrtc/webrtcvideoengine2_unittest.h
+++ b/talk/media/webrtc/webrtcvideoengine2_unittest.h
@@ -129,9 +129,11 @@
 
   virtual webrtc::Call::Stats GetStats() const OVERRIDE;
 
+  virtual void SetBitrateConfig(
+      const webrtc::Call::Config::BitrateConfig& bitrate_config) OVERRIDE;
   virtual void SignalNetworkState(webrtc::Call::NetworkState state) OVERRIDE;
 
-  const webrtc::Call::Config config_;
+  webrtc::Call::Config config_;
   webrtc::Call::NetworkState network_state_;
   std::vector<webrtc::VideoCodec> codecs_;
   std::vector<FakeVideoSendStream*> video_send_streams_;
diff --git a/webrtc/call.h b/webrtc/call.h
index c6596f8..920c006 100644
--- a/webrtc/call.h
+++ b/webrtc/call.h
@@ -65,8 +65,7 @@
         : webrtc_config(NULL),
           send_transport(send_transport),
           voice_engine(NULL),
-          overuse_callback(NULL),
-          stream_start_bitrate_bps(kDefaultStartBitrateBps) {}
+          overuse_callback(NULL) {}
 
     static const int kDefaultStartBitrateBps;
 
@@ -81,11 +80,20 @@
     // captured frames. 'NULL' disables the callback.
     LoadObserver* overuse_callback;
 
-    // Start bitrate used before a valid bitrate estimate is calculated.
+    // Bitrate config used until valid bitrate estimates are calculated. Also
+    // used to cap total bitrate used.
     // Note: This is currently set only for video and is per-stream rather of
     // for the entire link.
     // TODO(pbos): Set start bitrate for entire Call.
-    int stream_start_bitrate_bps;
+    struct BitrateConfig {
+      BitrateConfig()
+          : min_bitrate_bps(0),
+            start_bitrate_bps(kDefaultStartBitrateBps),
+            max_bitrate_bps(-1) {}
+      int min_bitrate_bps;
+      int start_bitrate_bps;
+      int max_bitrate_bps;
+    } stream_bitrates;
   };
 
   struct Stats {
@@ -121,6 +129,13 @@
   // pacing delay, etc.
   virtual Stats GetStats() const = 0;
 
+  // TODO(pbos): Like BitrateConfig above this is currently per-stream instead
+  // of maximum for entire Call. This should be fixed along with the above.
+  // Specifying a start bitrate (>0) will currently reset the current bitrate
+  // estimate. This is due to how the 'x-google-start-bitrate' flag is currently
+  // implemented.
+  virtual void SetBitrateConfig(
+      const Config::BitrateConfig& bitrate_config) = 0;
   virtual void SignalNetworkState(NetworkState state) = 0;
 
   virtual ~Call() {}
diff --git a/webrtc/video/call.cc b/webrtc/video/call.cc
index 2b4f76f..512e82c 100644
--- a/webrtc/video/call.cc
+++ b/webrtc/video/call.cc
@@ -119,6 +119,8 @@
   virtual DeliveryStatus DeliverPacket(const uint8_t* packet,
                                        size_t length) OVERRIDE;
 
+  virtual void SetBitrateConfig(
+      const webrtc::Call::Config::BitrateConfig& bitrate_config) OVERRIDE;
   virtual void SignalNetworkState(NetworkState state) OVERRIDE;
 
  private:
@@ -176,6 +178,14 @@
   assert(video_engine != NULL);
   assert(config.send_transport != NULL);
 
+  assert(config.stream_bitrates.min_bitrate_bps >= 0);
+  assert(config.stream_bitrates.start_bitrate_bps >=
+         config.stream_bitrates.min_bitrate_bps);
+  if (config.stream_bitrates.max_bitrate_bps != -1) {
+    assert(config.stream_bitrates.max_bitrate_bps >=
+           config.stream_bitrates.start_bitrate_bps);
+  }
+
   if (config.overuse_callback) {
     overuse_observer_proxy_.reset(
         new CpuOveruseObserverProxy(config.overuse_callback));
@@ -213,15 +223,10 @@
 
   // TODO(mflodman): Base the start bitrate on a current bandwidth estimate, if
   // the call has already started.
-  VideoSendStream* send_stream =
-      new VideoSendStream(config_.send_transport,
-                          overuse_observer_proxy_.get(),
-                          video_engine_,
-                          config,
-                          encoder_config,
-                          suspended_send_ssrcs_,
-                          base_channel_id_,
-                          config_.stream_start_bitrate_bps);
+  VideoSendStream* send_stream = new VideoSendStream(
+      config_.send_transport, overuse_observer_proxy_.get(), video_engine_,
+      config, encoder_config, suspended_send_ssrcs_, base_channel_id_,
+      config_.stream_bitrates);
 
   // This needs to be taken before send_crit_ as both locks need to be held
   // while changing network state.
@@ -342,6 +347,30 @@
   return stats;
 }
 
+void Call::SetBitrateConfig(
+    const webrtc::Call::Config::BitrateConfig& bitrate_config) {
+  assert(bitrate_config.min_bitrate_bps >= 0);
+  assert(bitrate_config.max_bitrate_bps == -1 ||
+         bitrate_config.max_bitrate_bps > 0);
+  if (config_.stream_bitrates.min_bitrate_bps ==
+          bitrate_config.min_bitrate_bps &&
+      (bitrate_config.start_bitrate_bps <= 0 ||
+       config_.stream_bitrates.start_bitrate_bps ==
+           bitrate_config.start_bitrate_bps) &&
+      config_.stream_bitrates.max_bitrate_bps ==
+          bitrate_config.max_bitrate_bps) {
+    // Nothing new to set, early abort to avoid encoder reconfigurations.
+    return;
+  }
+  config_.stream_bitrates = bitrate_config;
+  ReadLockScoped read_lock(*send_crit_);
+  for (std::map<uint32_t, VideoSendStream*>::const_iterator it =
+           send_ssrcs_.begin();
+       it != send_ssrcs_.end(); ++it) {
+    it->second->SetBitrateConfig(bitrate_config);
+  }
+}
+
 void Call::SignalNetworkState(NetworkState state) {
   // Take crit for entire function, it needs to be held while updating streams
   // to guarantee a consistent state across streams.
diff --git a/webrtc/video/call_perf_tests.cc b/webrtc/video/call_perf_tests.cc
index a6e619e..c898e25 100644
--- a/webrtc/video/call_perf_tests.cc
+++ b/webrtc/video/call_perf_tests.cc
@@ -640,7 +640,7 @@
 
     Call::Config GetSenderCallConfig() OVERRIDE {
       Call::Config config = EndToEndTest::GetSenderCallConfig();
-      config.stream_start_bitrate_bps = kInitialBitrateKbps * 1000;
+      config.stream_bitrates.start_bitrate_bps = kInitialBitrateKbps * 1000;
       return config;
     }
 
diff --git a/webrtc/video/end_to_end_tests.cc b/webrtc/video/end_to_end_tests.cc
index 769d3b1..0871da4 100644
--- a/webrtc/video/end_to_end_tests.cc
+++ b/webrtc/video/end_to_end_tests.cc
@@ -1566,7 +1566,7 @@
 
     Call::Config GetSenderCallConfig() OVERRIDE {
       Call::Config config = EndToEndTest::GetSenderCallConfig();
-      config.stream_start_bitrate_bps = kStartBitrateBps;
+      config.stream_bitrates.start_bitrate_bps = kStartBitrateBps;
       return config;
     }
 
diff --git a/webrtc/video/loopback.cc b/webrtc/video/loopback.cc
index 8013833..a1ebed1 100644
--- a/webrtc/video/loopback.cc
+++ b/webrtc/video/loopback.cc
@@ -123,8 +123,12 @@
   pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
   test::DirectTransport transport(pipe_config);
   Call::Config call_config(&transport);
-  call_config.stream_start_bitrate_bps =
+  call_config.stream_bitrates.min_bitrate_bps =
+      static_cast<int>(flags::MinBitrate()) * 1000;
+  call_config.stream_bitrates.start_bitrate_bps =
       static_cast<int>(flags::StartBitrate()) * 1000;
+  call_config.stream_bitrates.max_bitrate_bps =
+      static_cast<int>(flags::MaxBitrate()) * 1000;
   scoped_ptr<Call> call(Call::Create(call_config));
 
   // Loopback, call sends to itself.
@@ -157,9 +161,9 @@
   VideoStream* stream = &encoder_config.streams[0];
   stream->width = flags::Width();
   stream->height = flags::Height();
-  stream->min_bitrate_bps = static_cast<int>(flags::MinBitrate()) * 1000;
-  stream->target_bitrate_bps = static_cast<int>(flags::MaxBitrate()) * 1000;
-  stream->max_bitrate_bps = static_cast<int>(flags::MaxBitrate()) * 1000;
+  stream->min_bitrate_bps = call_config.stream_bitrates.min_bitrate_bps;
+  stream->target_bitrate_bps = call_config.stream_bitrates.max_bitrate_bps;
+  stream->max_bitrate_bps = call_config.stream_bitrates.max_bitrate_bps;
   stream->max_framerate = 30;
   stream->max_qp = 56;
 
diff --git a/webrtc/video/rampup_tests.cc b/webrtc/video/rampup_tests.cc
index 59f0fd4..5e73abf 100644
--- a/webrtc/video/rampup_tests.cc
+++ b/webrtc/video/rampup_tests.cc
@@ -403,7 +403,7 @@
 
   Call::Config call_config(&stream_observer);
   if (start_bitrate_bps != 0) {
-    call_config.stream_start_bitrate_bps = start_bitrate_bps;
+    call_config.stream_bitrates.start_bitrate_bps = start_bitrate_bps;
     stream_observer.set_start_bitrate_bps(start_bitrate_bps);
   }
 
diff --git a/webrtc/video/video_send_stream.cc b/webrtc/video/video_send_stream.cc
index f9727fa..5da2669 100644
--- a/webrtc/video/video_send_stream.cc
+++ b/webrtc/video/video_send_stream.cc
@@ -115,20 +115,26 @@
     const VideoEncoderConfig& encoder_config,
     const std::map<uint32_t, RtpState>& suspended_ssrcs,
     int base_channel,
-    int start_bitrate_bps)
+    Call::Config::BitrateConfig bitrate_config)
     : transport_adapter_(transport),
       encoded_frame_proxy_(config.post_encode_callback),
       config_(config),
-      start_bitrate_bps_(start_bitrate_bps),
+      bitrate_config_(bitrate_config),
       suspended_ssrcs_(suspended_ssrcs),
       external_codec_(NULL),
       channel_(-1),
-      use_default_bitrate_(true),
+      use_config_bitrate_(true),
       stats_proxy_(config) {
+  // Duplicate assert checking of bitrate config. These should be checked in
+  // Call but are added here for verbosity.
+  assert(bitrate_config.min_bitrate_bps >= 0);
+  assert(bitrate_config.start_bitrate_bps >= bitrate_config.min_bitrate_bps);
+  if (bitrate_config.max_bitrate_bps != -1)
+    assert(bitrate_config.max_bitrate_bps >= bitrate_config.start_bitrate_bps);
+
   video_engine_base_ = ViEBase::GetInterface(video_engine);
   video_engine_base_->CreateChannel(channel_, base_channel);
   assert(channel_ != -1);
-  assert(start_bitrate_bps_ > 0);
 
   rtp_rtcp_ = ViERTP_RTCP::GetInterface(video_engine);
   assert(rtp_rtcp_ != NULL);
@@ -385,10 +391,20 @@
     video_codec.qpMax = std::max(video_codec.qpMax,
                                  static_cast<unsigned int>(streams[i].max_qp));
   }
+  // Clamp bitrates to the bitrate config.
+  if (video_codec.minBitrate <
+      static_cast<unsigned int>(bitrate_config_.min_bitrate_bps / 1000)) {
+    video_codec.minBitrate = bitrate_config_.min_bitrate_bps / 1000;
+  }
+  if (bitrate_config_.max_bitrate_bps != -1 &&
+      video_codec.maxBitrate >
+          static_cast<unsigned int>(bitrate_config_.max_bitrate_bps / 1000)) {
+    video_codec.maxBitrate = bitrate_config_.max_bitrate_bps / 1000;
+  }
   unsigned int start_bitrate_bps;
   if (codec_->GetCodecTargetBitrate(channel_, &start_bitrate_bps) != 0 ||
-      use_default_bitrate_) {
-    start_bitrate_bps = start_bitrate_bps_;
+      use_config_bitrate_) {
+    start_bitrate_bps = bitrate_config_.start_bitrate_bps;
   }
   video_codec.startBitrate =
       static_cast<unsigned int>(start_bitrate_bps) / 1000;
@@ -417,7 +433,8 @@
   rtp_rtcp_->SetMinTransmitBitrate(channel_,
                                    config.min_transmit_bitrate_bps / 1000);
 
-  use_default_bitrate_ = false;
+  encoder_config_ = config;
+  use_config_bitrate_ = false;
   return true;
 }
 
@@ -480,6 +497,19 @@
   return rtp_states;
 }
 
+void VideoSendStream::SetBitrateConfig(
+    const Call::Config::BitrateConfig& bitrate_config) {
+  int last_start_bitrate_bps = bitrate_config_.start_bitrate_bps;
+  bitrate_config_ = bitrate_config;
+  if (bitrate_config_.start_bitrate_bps <= 0) {
+    bitrate_config_.start_bitrate_bps = last_start_bitrate_bps;
+  } else {
+    // Override start bitrate with bitrate from config.
+    use_config_bitrate_ = true;
+  }
+  ReconfigureVideoEncoder(encoder_config_);
+}
+
 void VideoSendStream::SignalNetworkState(Call::NetworkState state) {
   // When network goes up, enable RTCP status before setting transmission state.
   // When it goes down, disable RTCP afterwards. This ensures that any packets
diff --git a/webrtc/video/video_send_stream.h b/webrtc/video/video_send_stream.h
index 873785d..56d0d36 100644
--- a/webrtc/video/video_send_stream.h
+++ b/webrtc/video/video_send_stream.h
@@ -49,7 +49,7 @@
                   const VideoEncoderConfig& encoder_config,
                   const std::map<uint32_t, RtpState>& suspended_ssrcs,
                   int base_channel,
-                  int start_bitrate);
+                  Call::Config::BitrateConfig bitrate_config);
 
   virtual ~VideoSendStream();
 
@@ -72,6 +72,7 @@
   typedef std::map<uint32_t, RtpState> RtpStateMap;
   RtpStateMap GetRtpStates() const;
 
+  void SetBitrateConfig(const Call::Config::BitrateConfig& bitrate_config);
   void SignalNetworkState(Call::NetworkState state);
 
   int GetPacerQueuingDelayMs() const;
@@ -81,7 +82,8 @@
   TransportAdapter transport_adapter_;
   EncodedFrameCallbackAdapter encoded_frame_proxy_;
   const VideoSendStream::Config config_;
-  const int start_bitrate_bps_;
+  VideoEncoderConfig encoder_config_;
+  Call::Config::BitrateConfig bitrate_config_;
   std::map<uint32_t, RtpState> suspended_ssrcs_;
 
   ViEBase* video_engine_base_;
@@ -99,7 +101,7 @@
   // Used as a workaround to indicate that we should be using the configured
   // start bitrate initially, instead of the one reported by VideoEngine (which
   // defaults to too high).
-  bool use_default_bitrate_;
+  bool use_config_bitrate_;
 
   SendStatisticsProxy stats_proxy_;
 };
diff --git a/webrtc/video/video_send_stream_tests.cc b/webrtc/video/video_send_stream_tests.cc
index 9646f2b..6fedaf7 100644
--- a/webrtc/video/video_send_stream_tests.cc
+++ b/webrtc/video/video_send_stream_tests.cc
@@ -1583,4 +1583,102 @@
 
   RunBaseTest(&test);
 }
+
+TEST_F(VideoSendStreamTest, UsesCallStreamBitratesAndCanReconfigureBitrates) {
+  // These are chosen to be "kind of odd" to not be accidentally checked against
+  // default values.
+  static const int kMinBitrateKbps = 137;
+  static const int kStartBitrateKbps = 345;
+  static const int kLowerMaxBitrateKbps = 312;
+  static const int kMaxBitrateKbps = 413;
+  static const int kIncreasedStartBitrateKbps = 451;
+  static const int kIncreasedMaxBitrateKbps = 597;
+  class EncoderBitrateThresholdObserver : public test::SendTest,
+                                          public test::FakeEncoder {
+   public:
+    EncoderBitrateThresholdObserver()
+        : SendTest(kDefaultTimeoutMs),
+          FakeEncoder(Clock::GetRealTimeClock()),
+          num_initializations_(0) {}
+
+   private:
+    virtual int32_t InitEncode(const VideoCodec* codecSettings,
+                               int32_t numberOfCores,
+                               size_t maxPayloadSize) OVERRIDE {
+      if (num_initializations_ == 0) {
+        EXPECT_EQ(static_cast<unsigned int>(kMinBitrateKbps),
+                  codecSettings->minBitrate);
+        EXPECT_EQ(static_cast<unsigned int>(kStartBitrateKbps),
+                  codecSettings->startBitrate);
+        EXPECT_EQ(static_cast<unsigned int>(kMaxBitrateKbps),
+                  codecSettings->maxBitrate);
+        observation_complete_->Set();
+      } else if (num_initializations_ == 1) {
+        EXPECT_EQ(static_cast<unsigned int>(kLowerMaxBitrateKbps),
+                  codecSettings->maxBitrate);
+        // The start bitrate should be kept (-1) and capped to the max bitrate.
+        // Since this is not an end-to-end call no receiver should have been
+        // returning a REMB that could lower this estimate.
+        EXPECT_EQ(codecSettings->startBitrate, codecSettings->maxBitrate);
+      } else if (num_initializations_ == 2) {
+        EXPECT_EQ(static_cast<unsigned int>(kIncreasedMaxBitrateKbps),
+                  codecSettings->maxBitrate);
+        EXPECT_EQ(static_cast<unsigned int>(kIncreasedStartBitrateKbps),
+                  codecSettings->startBitrate);
+      }
+      ++num_initializations_;
+      return FakeEncoder::InitEncode(codecSettings, numberOfCores,
+                                     maxPayloadSize);
+    }
+
+    virtual Call::Config GetSenderCallConfig() OVERRIDE {
+      Call::Config config(SendTransport());
+      config.stream_bitrates.min_bitrate_bps = kMinBitrateKbps * 1000;
+      config.stream_bitrates.start_bitrate_bps = kStartBitrateKbps * 1000;
+      config.stream_bitrates.max_bitrate_bps = kMaxBitrateKbps * 1000;
+      return config;
+    }
+
+    virtual void ModifyConfigs(
+        VideoSendStream::Config* send_config,
+        std::vector<VideoReceiveStream::Config>* receive_configs,
+        VideoEncoderConfig* encoder_config) OVERRIDE {
+      send_config->encoder_settings.encoder = this;
+      // Set bitrates lower/higher than min/max to make sure they are properly
+      // capped.
+      encoder_config->streams.front().min_bitrate_bps =
+          (kMinBitrateKbps - 10) * 1000;
+      encoder_config->streams.front().max_bitrate_bps =
+          (kIncreasedMaxBitrateKbps + 10) * 1000;
+    }
+
+    virtual void OnCallsCreated(Call* sender_call,
+                                Call* receiver_call) OVERRIDE {
+      call_ = sender_call;
+    }
+
+
+    virtual void PerformTest() OVERRIDE {
+      EXPECT_EQ(kEventSignaled, Wait())
+          << "Timed out while waiting encoder to be configured.";
+      Call::Config::BitrateConfig bitrate_config;
+      bitrate_config.min_bitrate_bps = 0;
+      bitrate_config.start_bitrate_bps = -1;
+      bitrate_config.max_bitrate_bps = kLowerMaxBitrateKbps * 1000;
+      call_->SetBitrateConfig(bitrate_config);
+      EXPECT_EQ(2, num_initializations_)
+          << "Encoder should have been reconfigured with the new value.";
+      bitrate_config.start_bitrate_bps = kIncreasedStartBitrateKbps * 1000;
+      bitrate_config.max_bitrate_bps = kIncreasedMaxBitrateKbps * 1000;
+      call_->SetBitrateConfig(bitrate_config);
+      EXPECT_EQ(3, num_initializations_)
+          << "Encoder should have been reconfigured with the new value.";
+    }
+
+    int num_initializations_;
+    webrtc::Call* call_;
+  } test;
+
+  RunBaseTest(&test);
+}
 }  // namespace webrtc